Skip to content

Commit 7203dc9

Browse files
committed
Consolidate env reading to single config object.
1 parent 1d6d28f commit 7203dc9

File tree

9 files changed

+106
-83
lines changed

9 files changed

+106
-83
lines changed

datadog_lambda/api.py

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import logging
22
import os
33

4-
from datadog_lambda.fips import fips_mode_enabled
4+
from datadog_lambda.config import config
55

66
logger = logging.getLogger(__name__)
77
KMS_ENCRYPTION_CONTEXT_KEY = "LambdaFunctionName"
@@ -29,7 +29,6 @@ def decrypt_kms_api_key(kms_client, ciphertext):
2929
is added. We need to try decrypting the API key both with and without the encryption context.
3030
"""
3131
# Try without encryption context, in case API key was encrypted using the AWS CLI
32-
function_name = os.environ.get("AWS_LAMBDA_FUNCTION_NAME")
3332
try:
3433
plaintext = kms_client.decrypt(CiphertextBlob=decoded_bytes)[
3534
"Plaintext"
@@ -43,7 +42,7 @@ def decrypt_kms_api_key(kms_client, ciphertext):
4342
plaintext = kms_client.decrypt(
4443
CiphertextBlob=decoded_bytes,
4544
EncryptionContext={
46-
KMS_ENCRYPTION_CONTEXT_KEY: function_name,
45+
KMS_ENCRYPTION_CONTEXT_KEY: config.function_name,
4746
},
4847
)["Plaintext"].decode("utf-8")
4948

@@ -66,7 +65,7 @@ def get_api_key() -> str:
6665
DD_API_KEY = os.environ.get("DD_API_KEY", os.environ.get("DATADOG_API_KEY", ""))
6766

6867
LAMBDA_REGION = os.environ.get("AWS_REGION", "")
69-
if fips_mode_enabled:
68+
if config.fips_mode_enabled:
7069
logger.debug(
7170
"FIPS mode is enabled, using FIPS endpoints for secrets management."
7271
)
@@ -82,7 +81,7 @@ def get_api_key() -> str:
8281
return ""
8382
endpoint_url = (
8483
f"https://secretsmanager-fips.{secrets_region}.amazonaws.com"
85-
if fips_mode_enabled
84+
if config.fips_mode_enabled
8685
else None
8786
)
8887
secrets_manager_client = _boto3_client(
@@ -95,7 +94,7 @@ def get_api_key() -> str:
9594
# SSM endpoints: https://docs.aws.amazon.com/general/latest/gr/ssm.html
9695
fips_endpoint = (
9796
f"https://ssm-fips.{LAMBDA_REGION}.amazonaws.com"
98-
if fips_mode_enabled
97+
if config.fips_mode_enabled
9998
else None
10099
)
101100
ssm_client = _boto3_client("ssm", endpoint_url=fips_endpoint)
@@ -106,7 +105,7 @@ def get_api_key() -> str:
106105
# KMS endpoints: https://docs.aws.amazon.com/general/latest/gr/kms.html
107106
fips_endpoint = (
108107
f"https://kms-fips.{LAMBDA_REGION}.amazonaws.com"
109-
if fips_mode_enabled
108+
if config.fips_mode_enabled
110109
else None
111110
)
112111
kms_client = _boto3_client("kms", endpoint_url=fips_endpoint)
@@ -118,7 +117,7 @@ def get_api_key() -> str:
118117

119118

120119
def init_api():
121-
if not os.environ.get("DD_FLUSH_TO_LOG", "").lower() == "true":
120+
if not config.flush_to_log:
122121
# Make sure that this package would always be lazy-loaded/outside from the critical path
123122
# since underlying packages are quite heavy to load
124123
# and useless with the extension unless sending metrics with timestamps

datadog_lambda/cold_start.py

Lines changed: 3 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,9 @@
11
import time
2-
import os
32
from typing import List, Hashable
43
import logging
54

5+
from datadog_lambda.config import config
6+
67
logger = logging.getLogger(__name__)
78

89
_cold_start = True
@@ -86,14 +87,12 @@ def reset_node_stacks():
8687

8788
def push_node(module_name, file_path):
8889
node = ImportNode(module_name, file_path, time.time_ns())
89-
global import_stack
9090
if import_stack:
9191
import_stack[-1].children.append(node)
9292
import_stack.append(node)
9393

9494

9595
def pop_node(module_name):
96-
global import_stack
9796
if not import_stack:
9897
return
9998
node = import_stack.pop()
@@ -102,7 +101,6 @@ def pop_node(module_name):
102101
end_time_ns = time.time_ns()
103102
node.end_time_ns = end_time_ns
104103
if not import_stack: # import_stack empty, a root node has been found
105-
global root_nodes
106104
root_nodes.append(node)
107105

108106

@@ -147,11 +145,7 @@ def wrapped_find_spec(*args, **kwargs):
147145

148146

149147
def initialize_cold_start_tracing():
150-
if (
151-
is_new_sandbox()
152-
and os.environ.get("DD_TRACE_ENABLED", "true").lower() == "true"
153-
and os.environ.get("DD_COLD_START_TRACING", "true").lower() == "true"
154-
):
148+
if is_new_sandbox() and config.trace_enabled and config.cold_start_tracing:
155149
from sys import meta_path
156150

157151
for importer in meta_path:

datadog_lambda/config.py

Lines changed: 72 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,72 @@
1+
# Unless explicitly stated otherwise all files in this repository are licensed
2+
# under the Apache License Version 2.0.
3+
# This product includes software developed at Datadog (https://www.datadoghq.com/).
4+
# Copyright 2019 Datadog, Inc.
5+
6+
import logging
7+
import os
8+
9+
10+
def _get_env(key, default=None, cast=None):
11+
"""Get an environment variable with a default value."""
12+
val = os.environ.get(key, default)
13+
if cast is not None:
14+
try:
15+
val = cast(val)
16+
except ValueError:
17+
raise ValueError(f"Invalid value for {key}: {val}")
18+
return cast(default)
19+
return val
20+
21+
22+
def as_bool(val):
23+
"""Convert a string to a boolean."""
24+
if isinstance(val, bool):
25+
return val
26+
if isinstance(val, str):
27+
val = val.lower()
28+
if val in ("true", "1", "yes"):
29+
return True
30+
elif val in ("false", "0", "no"):
31+
return False
32+
raise ValueError(f"Invalid boolean value: {val}")
33+
34+
35+
class config:
36+
37+
function_name = os.environ.get("AWS_LAMBDA_FUNCTION_NAME")
38+
flush_to_log = os.environ.get("DD_FLUSH_TO_LOG", "").lower() == "true"
39+
trace_enabled = os.environ.get("DD_TRACE_ENABLED", "true").lower() == "true"
40+
cold_start_tracing = (
41+
os.environ.get("DD_COLD_START_TRACING", "true").lower() == "true"
42+
)
43+
is_gov_region = os.environ.get("AWS_REGION", "").startswith("us-gov-")
44+
fips_mode_enabled = (
45+
os.environ.get(
46+
"DD_LAMBDA_FIPS_MODE",
47+
"true" if is_gov_region else "false",
48+
).lower()
49+
== "true"
50+
)
51+
log_level = (os.environ.get("DD_LOG_LEVEL") or "INFO").upper()
52+
flush_in_thread = os.environ.get("DD_FLUSH_IN_THREAD", "").lower() == "true"
53+
enhanced_metrics_enabled = (
54+
os.environ.get("DD_ENHANCED_METRICS", "true").lower() == "true"
55+
)
56+
is_in_tests = os.environ.get("DD_INTEGRATION_TEST", "false").lower() == "true"
57+
add_span_pointers = os.environ.get(
58+
"DD_BOTOCORE_ADD_SPAN_POINTERS", "true"
59+
).lower() in ("true", "1")
60+
otel_enabled = os.environ.get("DD_TRACE_OTEL_ENABLED", "false").lower() == "true"
61+
is_lambda_context = bool(function_name)
62+
telemetry_enabled = os.environ.get(
63+
("DD_INSTRUMENTATION_TELEMETRY_ENABLED", "false").lower() == "true"
64+
)
65+
66+
67+
if config.is_gov_region or config.fips_mode_enabled:
68+
logger = logging.getLogger(__name__)
69+
logger.debug(
70+
"Python Lambda Layer FIPS mode is %s.",
71+
"enabled" if config.fips_mode_enabled else "not enabled",
72+
)

datadog_lambda/fips.py

Lines changed: 0 additions & 19 deletions
This file was deleted.

datadog_lambda/logger.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import logging
2-
import os
2+
3+
from datadog_lambda.config import config
34

45
try:
56
_level_mappping = logging.getLevelNamesMapping()
@@ -18,10 +19,9 @@
1819

1920
def initialize_logging(name):
2021
logger = logging.getLogger(name)
21-
str_level = (os.environ.get("DD_LOG_LEVEL") or "INFO").upper()
22-
level = _level_mappping.get(str_level)
22+
level = _level_mappping.get(config.log_level)
2323
if level is None:
2424
logger.setLevel(logging.INFO)
25-
logger.warning("Invalid log level: %s Defaulting to INFO", str_level)
25+
logger.warning("Invalid log level: %s Defaulting to INFO", config.log_level)
2626
else:
2727
logger.setLevel(level)

datadog_lambda/metric.py

Lines changed: 5 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -5,14 +5,13 @@
55

66
import enum
77
import logging
8-
import os
98
import time
109
from datetime import datetime, timedelta
1110

1211
import ujson as json
1312

13+
from datadog_lambda.config import config
1414
from datadog_lambda.extension import should_use_extension
15-
from datadog_lambda.fips import fips_mode_enabled
1615
from datadog_lambda.tags import dd_lambda_layer_tag, get_enhanced_metrics_tags
1716

1817
logger = logging.getLogger(__name__)
@@ -28,10 +27,10 @@ class MetricsHandler(enum.Enum):
2827
def _select_metrics_handler():
2928
if should_use_extension:
3029
return MetricsHandler.EXTENSION
31-
if os.environ.get("DD_FLUSH_TO_LOG", "").lower() == "true":
30+
if config.flush_to_log:
3231
return MetricsHandler.FORWARDER
3332

34-
if fips_mode_enabled:
33+
if config.fips_mode_enabled:
3534
logger.debug(
3635
"With FIPS mode enabled, the Datadog API metrics handler is unavailable."
3736
)
@@ -58,14 +57,8 @@ def _select_metrics_handler():
5857
from datadog_lambda.api import init_api
5958
from datadog_lambda.thread_stats_writer import ThreadStatsWriter
6059

61-
flush_in_thread = os.environ.get("DD_FLUSH_IN_THREAD", "").lower() == "true"
6260
init_api()
63-
lambda_stats = ThreadStatsWriter(flush_in_thread)
64-
65-
66-
enhanced_metrics_enabled = (
67-
os.environ.get("DD_ENHANCED_METRICS", "true").lower() == "true"
68-
)
61+
lambda_stats = ThreadStatsWriter(config.flush_in_thread)
6962

7063

7164
def lambda_metric(metric_name, value, timestamp=None, tags=None, force_async=False):
@@ -191,7 +184,7 @@ def submit_enhanced_metric(metric_name, lambda_context):
191184
metric_name (str): metric name w/o enhanced prefix i.e. "invocations" or "errors"
192185
lambda_context (object): Lambda context dict passed to the function by AWS
193186
"""
194-
if not enhanced_metrics_enabled:
187+
if not config.enhanced_metrics_enabled:
195188
logger.debug(
196189
"Not submitting enhanced metric %s because enhanced metrics are disabled",
197190
metric_name,

datadog_lambda/patch.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
# This product includes software developed at Datadog (https://www.datadoghq.com/).
44
# Copyright 2019 Datadog, Inc.
55

6-
import os
76
import sys
87
import logging
98
import zlib
@@ -13,6 +12,7 @@
1312
from wrapt.importer import when_imported
1413
from ddtrace import patch_all as patch_all_dd
1514

15+
from datadog_lambda import config
1616
from datadog_lambda.tracing import (
1717
get_dd_trace_context,
1818
dd_tracing_enabled,
@@ -44,8 +44,7 @@ def _patch_for_integration_tests():
4444
Patch `requests` to log the outgoing requests for integration tests.
4545
"""
4646
global _integration_tests_patched
47-
is_in_tests = os.environ.get("DD_INTEGRATION_TEST", "false").lower() == "true"
48-
if not _integration_tests_patched and is_in_tests:
47+
if not _integration_tests_patched and config.is_in_tests:
4948
wrap("requests", "Session.send", _log_request)
5049
_integration_tests_patched = True
5150

datadog_lambda/span_pointers.py

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,28 +1,23 @@
11
from itertools import chain
22
import logging
3-
import os
43
from typing import List
54
from typing import Optional
65

76
from ddtrace._trace._span_pointer import _SpanPointerDirection
87
from ddtrace._trace._span_pointer import _SpanPointerDescription
98

9+
from datadog_lambda import config
1010
from datadog_lambda.metric import submit_dynamodb_stream_type_metric
1111
from datadog_lambda.trigger import EventTypes
1212

1313

1414
logger = logging.getLogger(__name__)
1515

1616

17-
dd_botocore_add_span_pointers = os.environ.get(
18-
"DD_BOTOCORE_ADD_SPAN_POINTERS", "true"
19-
).lower() in ("true", "1")
20-
21-
2217
def calculate_span_pointers(
2318
event_source,
2419
event,
25-
botocore_add_span_pointers=dd_botocore_add_span_pointers,
20+
botocore_add_span_pointers=config.add_span_pointers,
2621
) -> List[_SpanPointerDescription]:
2722
try:
2823
if botocore_add_span_pointers:

0 commit comments

Comments
 (0)