Skip to content

Consolidate env reading to single config object. #600

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 7 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 7 additions & 8 deletions datadog_lambda/api.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import logging
import os

from datadog_lambda.fips import fips_mode_enabled
from datadog_lambda.config import config

logger = logging.getLogger(__name__)
KMS_ENCRYPTION_CONTEXT_KEY = "LambdaFunctionName"
Expand Down Expand Up @@ -29,7 +29,6 @@ def decrypt_kms_api_key(kms_client, ciphertext):
is added. We need to try decrypting the API key both with and without the encryption context.
"""
# Try without encryption context, in case API key was encrypted using the AWS CLI
function_name = os.environ.get("AWS_LAMBDA_FUNCTION_NAME")
try:
plaintext = kms_client.decrypt(CiphertextBlob=decoded_bytes)[
"Plaintext"
Expand All @@ -43,7 +42,7 @@ def decrypt_kms_api_key(kms_client, ciphertext):
plaintext = kms_client.decrypt(
CiphertextBlob=decoded_bytes,
EncryptionContext={
KMS_ENCRYPTION_CONTEXT_KEY: function_name,
KMS_ENCRYPTION_CONTEXT_KEY: config.function_name,
},
)["Plaintext"].decode("utf-8")

Expand All @@ -66,7 +65,7 @@ def get_api_key() -> str:
DD_API_KEY = os.environ.get("DD_API_KEY", os.environ.get("DATADOG_API_KEY", ""))

LAMBDA_REGION = os.environ.get("AWS_REGION", "")
if fips_mode_enabled:
if config.fips_mode_enabled:
logger.debug(
"FIPS mode is enabled, using FIPS endpoints for secrets management."
)
Expand All @@ -82,7 +81,7 @@ def get_api_key() -> str:
return ""
endpoint_url = (
f"https://secretsmanager-fips.{secrets_region}.amazonaws.com"
if fips_mode_enabled
if config.fips_mode_enabled
else None
)
secrets_manager_client = _boto3_client(
Expand All @@ -95,7 +94,7 @@ def get_api_key() -> str:
# SSM endpoints: https://docs.aws.amazon.com/general/latest/gr/ssm.html
fips_endpoint = (
f"https://ssm-fips.{LAMBDA_REGION}.amazonaws.com"
if fips_mode_enabled
if config.fips_mode_enabled
else None
)
ssm_client = _boto3_client("ssm", endpoint_url=fips_endpoint)
Expand All @@ -106,7 +105,7 @@ def get_api_key() -> str:
# KMS endpoints: https://docs.aws.amazon.com/general/latest/gr/kms.html
fips_endpoint = (
f"https://kms-fips.{LAMBDA_REGION}.amazonaws.com"
if fips_mode_enabled
if config.fips_mode_enabled
else None
)
kms_client = _boto3_client("kms", endpoint_url=fips_endpoint)
Expand All @@ -118,7 +117,7 @@ def get_api_key() -> str:


def init_api():
if not os.environ.get("DD_FLUSH_TO_LOG", "").lower() == "true":
if not config.flush_to_log:
# Make sure that this package would always be lazy-loaded/outside from the critical path
# since underlying packages are quite heavy to load
# and useless with the extension unless sending metrics with timestamps
Expand Down
12 changes: 3 additions & 9 deletions datadog_lambda/cold_start.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import time
import os
from typing import List, Hashable
import logging

from datadog_lambda.config import config

logger = logging.getLogger(__name__)

_cold_start = True
Expand Down Expand Up @@ -86,14 +87,12 @@ def reset_node_stacks():

def push_node(module_name, file_path):
node = ImportNode(module_name, file_path, time.time_ns())
global import_stack
if import_stack:
import_stack[-1].children.append(node)
import_stack.append(node)


def pop_node(module_name):
global import_stack
if not import_stack:
return
node = import_stack.pop()
Expand All @@ -102,7 +101,6 @@ def pop_node(module_name):
end_time_ns = time.time_ns()
node.end_time_ns = end_time_ns
if not import_stack: # import_stack empty, a root node has been found
global root_nodes
root_nodes.append(node)


Expand Down Expand Up @@ -147,11 +145,7 @@ def wrapped_find_spec(*args, **kwargs):


def initialize_cold_start_tracing():
if (
is_new_sandbox()
and os.environ.get("DD_TRACE_ENABLED", "true").lower() == "true"
and os.environ.get("DD_COLD_START_TRACING", "true").lower() == "true"
):
if is_new_sandbox() and config.trace_enabled and config.cold_start_tracing:
from sys import meta_path

for importer in meta_path:
Expand Down
70 changes: 70 additions & 0 deletions datadog_lambda/config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
# Unless explicitly stated otherwise all files in this repository are licensed
# under the Apache License Version 2.0.
# This product includes software developed at Datadog (https://www.datadoghq.com/).
# Copyright 2019 Datadog, Inc.

import logging
import os


def _get_env(key, default=None, cast=None):
@property
def _getter(self):
if not hasattr(self, prop_key):
val = os.environ.get(key, default)
if cast is not None:
val = cast(val)
setattr(self, prop_key, val)
return getattr(self, prop_key)

prop_key = f"_config_{key}"
return _getter


def as_bool(val):
return val.lower() == "true" or val == "1"


class Config:

add_span_pointers = _get_env("DD_BOTOCORE_ADD_SPAN_POINTERS", "true", as_bool)
cold_start_tracing = _get_env("DD_COLD_START_TRACING", "true", as_bool)
enhanced_metrics_enabled = _get_env("DD_ENHANCED_METRICS", "true", as_bool)
flush_in_thread = _get_env("DD_FLUSH_IN_THREAD", "false", as_bool)
flush_to_log = _get_env("DD_FLUSH_TO_LOG", "false", as_bool)
function_name = _get_env("AWS_LAMBDA_FUNCTION_NAME")
is_gov_region = _get_env("AWS_REGION", "", lambda x: x.startswith("us-gov-"))
is_in_tests = _get_env("DD_INTEGRATION_TEST", "false", as_bool)
is_lambda_context = _get_env("AWS_LAMBDA_FUNCTION_NAME", None, bool)
otel_enabled = _get_env("DD_TRACE_OTEL_ENABLED", "false", as_bool)
telemetry_enabled = _get_env(
"DD_INSTRUMENTATION_TELEMETRY_ENABLED", "false", as_bool
)
trace_enabled = _get_env("DD_TRACE_ENABLED", "true", as_bool)

@property
def fips_mode_enabled(self):
if not hasattr(self, "_config_fips_mode_enabled"):
self._config_fips_mode_enabled = (
os.environ.get(
"DD_LAMBDA_FIPS_MODE",
"true" if self.is_gov_region else "false",
).lower()
== "true"
)
return self._config_fips_mode_enabled

def reset(self):
for attr in dir(self):
if attr.startswith("_config_"):
delattr(self, attr)


config = Config()

if config.is_gov_region or config.fips_mode_enabled:
logger = logging.getLogger(__name__)
logger.debug(
"Python Lambda Layer FIPS mode is %s.",
"enabled" if config.fips_mode_enabled else "not enabled",
)
19 changes: 0 additions & 19 deletions datadog_lambda/fips.py

This file was deleted.

17 changes: 5 additions & 12 deletions datadog_lambda/metric.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,13 @@

import enum
import logging
import os
import time
from datetime import datetime, timedelta

import ujson as json

from datadog_lambda.config import config
from datadog_lambda.extension import should_use_extension
from datadog_lambda.fips import fips_mode_enabled
from datadog_lambda.tags import dd_lambda_layer_tag, get_enhanced_metrics_tags

logger = logging.getLogger(__name__)
Expand All @@ -28,10 +27,10 @@ class MetricsHandler(enum.Enum):
def _select_metrics_handler():
if should_use_extension:
return MetricsHandler.EXTENSION
if os.environ.get("DD_FLUSH_TO_LOG", "").lower() == "true":
if config.flush_to_log:
return MetricsHandler.FORWARDER

if fips_mode_enabled:
if config.fips_mode_enabled:
logger.debug(
"With FIPS mode enabled, the Datadog API metrics handler is unavailable."
)
Expand All @@ -58,14 +57,8 @@ def _select_metrics_handler():
from datadog_lambda.api import init_api
from datadog_lambda.thread_stats_writer import ThreadStatsWriter

flush_in_thread = os.environ.get("DD_FLUSH_IN_THREAD", "").lower() == "true"
init_api()
lambda_stats = ThreadStatsWriter(flush_in_thread)


enhanced_metrics_enabled = (
os.environ.get("DD_ENHANCED_METRICS", "true").lower() == "true"
)
lambda_stats = ThreadStatsWriter(config.flush_in_thread)


def lambda_metric(metric_name, value, timestamp=None, tags=None, force_async=False):
Expand Down Expand Up @@ -191,7 +184,7 @@ def submit_enhanced_metric(metric_name, lambda_context):
metric_name (str): metric name w/o enhanced prefix i.e. "invocations" or "errors"
lambda_context (object): Lambda context dict passed to the function by AWS
"""
if not enhanced_metrics_enabled:
if not config.enhanced_metrics_enabled:
logger.debug(
"Not submitting enhanced metric %s because enhanced metrics are disabled",
metric_name,
Expand Down
12 changes: 4 additions & 8 deletions datadog_lambda/patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
# This product includes software developed at Datadog (https://www.datadoghq.com/).
# Copyright 2019 Datadog, Inc.

import os
import sys
import logging
import zlib
Expand All @@ -13,10 +12,8 @@
from wrapt.importer import when_imported
from ddtrace import patch_all as patch_all_dd

from datadog_lambda.tracing import (
get_dd_trace_context,
dd_tracing_enabled,
)
from datadog_lambda import config
from datadog_lambda.tracing import get_dd_trace_context
from collections.abc import MutableMapping

logger = logging.getLogger(__name__)
Expand All @@ -32,7 +29,7 @@ def patch_all():
"""
_patch_for_integration_tests()

if dd_tracing_enabled:
if config.trace_enabled:
patch_all_dd()
else:
_patch_http()
Expand All @@ -44,8 +41,7 @@ def _patch_for_integration_tests():
Patch `requests` to log the outgoing requests for integration tests.
"""
global _integration_tests_patched
is_in_tests = os.environ.get("DD_INTEGRATION_TEST", "false").lower() == "true"
if not _integration_tests_patched and is_in_tests:
if not _integration_tests_patched and config.is_in_tests:
wrap("requests", "Session.send", _log_request)
_integration_tests_patched = True

Expand Down
9 changes: 2 additions & 7 deletions datadog_lambda/span_pointers.py
Original file line number Diff line number Diff line change
@@ -1,28 +1,23 @@
from itertools import chain
import logging
import os
from typing import List
from typing import Optional

from ddtrace._trace._span_pointer import _SpanPointerDirection
from ddtrace._trace._span_pointer import _SpanPointerDescription

from datadog_lambda.config import config
from datadog_lambda.metric import submit_dynamodb_stream_type_metric
from datadog_lambda.trigger import EventTypes


logger = logging.getLogger(__name__)


dd_botocore_add_span_pointers = os.environ.get(
"DD_BOTOCORE_ADD_SPAN_POINTERS", "true"
).lower() in ("true", "1")


def calculate_span_pointers(
event_source,
event,
botocore_add_span_pointers=dd_botocore_add_span_pointers,
botocore_add_span_pointers=config.add_span_pointers,
) -> List[_SpanPointerDescription]:
try:
if botocore_add_span_pointers:
Expand Down
Loading
Loading