Skip to content

Commit ef89890

Browse files
authored
refactor(s3): Move all cache files to the cache/ folder (#833)
* ci: Refine variable for enabling get resources tags Signed-off-by: Vincent Boutour <vincent.boutour@datadoghq.com> * chore: Using a common dir for cache stuff Signed-off-by: Vincent Boutour <vincent.boutour@datadoghq.com> * chore: Fixing the failed_events folder prefix Signed-off-by: Vincent Boutour <vincent.boutour@datadoghq.com> --------- Signed-off-by: Vincent Boutour <vincent.boutour@datadoghq.com>
1 parent 94fda6f commit ef89890

File tree

8 files changed

+68
-39
lines changed

8 files changed

+68
-39
lines changed

.github/workflows/integration_test.yml

Lines changed: 19 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,18 @@
1+
---
12
name: Integration tests
23

4+
permissions:
5+
actions: none
6+
checks: none
7+
contents: read
8+
deployments: none
9+
issues: none
10+
packages: none
11+
pages: none
12+
pull-requests: none
13+
repository-projects: none
14+
security-events: none
15+
316
on: [pull_request]
417

518
jobs:
@@ -9,10 +22,13 @@ jobs:
922
strategy:
1023
max-parallel: 4
1124
matrix:
12-
python-version: ['3.10', '3.11']
25+
python-version: ["3.10", "3.11"]
1326
steps:
14-
- name: Checkout source
15-
uses: actions/checkout@v3
27+
- name: Checkout
28+
uses: actions/checkout@v4
29+
with:
30+
ref: ${{ github.event.pull_request.head.sha }}
31+
fetch-depth: 0
1632

1733
- name: Run integration tests
1834
run: |

aws/logs_monitoring/caching/base_tags_cache.py

Lines changed: 11 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,19 @@
1-
import os
2-
import logging
31
import json
4-
from time import time
2+
import logging
3+
import os
54
from random import randint
5+
from time import time
6+
67
import boto3
78
from botocore.exceptions import ClientError
9+
10+
from caching.common import get_last_modified_time
811
from settings import (
912
DD_S3_BUCKET_NAME,
10-
DD_TAGS_CACHE_TTL_SECONDS,
13+
DD_S3_CACHE_DIRNAME,
1114
DD_S3_CACHE_LOCK_TTL_SECONDS,
15+
DD_TAGS_CACHE_TTL_SECONDS,
1216
)
13-
from caching.common import get_last_modified_time
1417
from telemetry import send_forwarder_internal_metrics
1518

1619
JITTER_MIN = 1
@@ -26,6 +29,7 @@ def __init__(
2629
cache_lock_filename,
2730
tags_ttl_seconds=DD_TAGS_CACHE_TTL_SECONDS,
2831
):
32+
self.cache_dirname = DD_S3_CACHE_DIRNAME
2933
self.tags_ttl_seconds = tags_ttl_seconds
3034
self.tags_by_id = {}
3135
self.last_tags_fetch_time = 0
@@ -43,10 +47,10 @@ def get_resources_paginator(self):
4347
return self.resource_tagging_client.get_paginator("get_resources")
4448

4549
def get_cache_name_with_prefix(self):
46-
return f"{self.cache_prefix}_{self.cache_filename}"
50+
return f"{self.cache_dirname}/{self.cache_prefix}_{self.cache_filename}"
4751

4852
def get_cache_lock_with_prefix(self):
49-
return f"{self.cache_prefix}_{self.cache_lock_filename}"
53+
return f"{self.cache_dirname}/{self.cache_prefix}_{self.cache_lock_filename}"
5054

5155
def write_cache_to_s3(self, data):
5256
"""Writes tags cache to s3"""

aws/logs_monitoring/caching/cloudwatch_log_group_cache.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,9 +6,11 @@
66

77
import boto3
88
from botocore.config import Config
9+
910
from caching.common import sanitize_aws_tag_string
1011
from settings import (
1112
DD_S3_BUCKET_NAME,
13+
DD_S3_CACHE_DIRNAME,
1214
DD_S3_LOG_GROUP_CACHE_DIRNAME,
1315
DD_TAGS_CACHE_TTL_SECONDS,
1416
)
@@ -20,7 +22,7 @@ def __init__(
2022
self,
2123
prefix,
2224
):
23-
self.cache_dirname = DD_S3_LOG_GROUP_CACHE_DIRNAME
25+
self.cache_dirname = f"{DD_S3_CACHE_DIRNAME}/{DD_S3_LOG_GROUP_CACHE_DIRNAME}"
2426
self.cache_ttl_seconds = DD_TAGS_CACHE_TTL_SECONDS
2527
self.bucket_name = DD_S3_BUCKET_NAME
2628
self.cache_prefix = prefix

aws/logs_monitoring/caching/lambda_cache.py

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,22 @@
11
import os
2+
23
from botocore.exceptions import ClientError
4+
35
from caching.base_tags_cache import BaseTagsCache
46
from caching.common import parse_get_resources_response_for_tags_by_arn
5-
from telemetry import send_forwarder_internal_metrics
67
from settings import (
7-
DD_S3_CACHE_FILENAME,
8-
DD_S3_CACHE_LOCK_FILENAME,
8+
DD_S3_LAMBDA_CACHE_FILENAME,
9+
DD_S3_LAMBDA_CACHE_LOCK_FILENAME,
910
GET_RESOURCES_LAMBDA_FILTER,
1011
)
12+
from telemetry import send_forwarder_internal_metrics
1113

1214

1315
class LambdaTagsCache(BaseTagsCache):
1416
def __init__(self, prefix):
15-
super().__init__(prefix, DD_S3_CACHE_FILENAME, DD_S3_CACHE_LOCK_FILENAME)
17+
super().__init__(
18+
prefix, DD_S3_LAMBDA_CACHE_FILENAME, DD_S3_LAMBDA_CACHE_LOCK_FILENAME
19+
)
1620

1721
def should_fetch_tags(self):
1822
return os.environ.get("DD_FETCH_LAMBDA_TAGS", "false").lower() == "true"

aws/logs_monitoring/retry/storage.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,12 @@
1-
import os
1+
import json
22
import logging
3+
import os
34
from time import time
4-
import json
5+
56
import boto3
67
from botocore.exceptions import ClientError
7-
from settings import DD_RETRY_PATH, DD_S3_BUCKET_NAME
8+
9+
from settings import DD_S3_BUCKET_NAME, DD_S3_RETRY_DIRNAME
810

911
logger = logging.getLogger(__name__)
1012
logger.setLevel(logging.getLevelName(os.environ.get("DD_LOG_LEVEL", "INFO").upper()))
@@ -76,7 +78,7 @@ def _fetch_data_for_key(self, key):
7678
return None
7779

7880
def _get_key_prefix(self, retry_prefix):
79-
return f"{DD_RETRY_PATH}/{self.function_prefix}/{str(retry_prefix)}/"
81+
return f"{DD_S3_RETRY_DIRNAME}/{self.function_prefix}/{str(retry_prefix)}/"
8082

8183
def _serialize(self, data):
8284
return bytes(json.dumps(data).encode("UTF-8"))

aws/logs_monitoring/settings.py

Lines changed: 13 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -4,11 +4,11 @@
44
# Copyright 2021 Datadog, Inc.
55

66
import base64
7+
import logging
78
import os
89

910
import boto3
1011
import botocore.config
11-
import logging
1212

1313
logger = logging.getLogger()
1414
logger.setLevel(logging.getLevelName(os.environ.get("DD_LOG_LEVEL", "INFO").upper()))
@@ -255,15 +255,20 @@ def __init__(self, name, pattern, placeholder):
255255
DD_ADDITIONAL_TARGET_LAMBDAS = get_env_var("DD_ADDITIONAL_TARGET_LAMBDAS", default=None)
256256

257257
DD_S3_BUCKET_NAME = get_env_var("DD_S3_BUCKET_NAME", default=None)
258+
258259
# These default cache names remain unchanged so we can get existing cache data for these
259-
DD_S3_CACHE_FILENAME = "cache.json"
260-
DD_S3_CACHE_LOCK_FILENAME = "cache.lock"
260+
DD_S3_CACHE_DIRNAME = "cache"
261+
262+
DD_S3_LAMBDA_CACHE_FILENAME = "lambda.json"
263+
DD_S3_LAMBDA_CACHE_LOCK_FILENAME = "lambda.lock"
264+
261265
DD_S3_STEP_FUNCTIONS_CACHE_FILENAME = "step-functions-cache.json"
262266
DD_S3_STEP_FUNCTIONS_CACHE_LOCK_FILENAME = "step-functions-cache.lock"
263-
DD_S3_TAGS_CACHE_FILENAME = "s3-cache.json"
264-
DD_S3_TAGS_CACHE_LOCK_FILENAME = "s3-cache.lock"
265267

266-
DD_S3_LOG_GROUP_CACHE_DIRNAME = "log-group-cache"
268+
DD_S3_TAGS_CACHE_FILENAME = "s3.json"
269+
DD_S3_TAGS_CACHE_LOCK_FILENAME = "s3.lock"
270+
271+
DD_S3_LOG_GROUP_CACHE_DIRNAME = "log-group"
267272

268273
DD_TAGS_CACHE_TTL_SECONDS = int(get_env_var("DD_TAGS_CACHE_TTL_SECONDS", default=300))
269274
DD_S3_CACHE_LOCK_TTL_SECONDS = 60
@@ -272,7 +277,7 @@ def __init__(self, name, pattern, placeholder):
272277
GET_RESOURCES_S3_FILTER = "s3:bucket"
273278

274279

275-
# Retyer
276-
DD_RETRY_PATH = "failed_events"
280+
# Retryer
281+
DD_S3_RETRY_DIRNAME = "failed_events"
277282
DD_RETRY_KEYWORD = "retry"
278283
DD_STORE_FAILED_EVENTS = get_env_var("DD_STORE_FAILED_EVENTS", "false", boolean=True)

aws/logs_monitoring/template.yaml

Lines changed: 7 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -327,6 +327,9 @@ Conditions:
327327
- !Equals [!Ref ReservedConcurrency, ""]
328328
ShouldUseAccessLogBucket: !Not
329329
- !Equals [!Ref DdForwarderBucketsAccessLogsTarget, ""]
330+
ShouldDdFetchTags: !Or
331+
- !Equals [!Ref DdFetchLambdaTags, true]
332+
- !Equals [!Ref DdFetchStepFunctionsTags, true]
330333
SetForwarderBucket: !Or
331334
- !Condition CreateS3Bucket
332335
- !Not
@@ -583,8 +586,8 @@ Resources:
583586
Condition:
584587
StringLike:
585588
s3:prefix:
586-
- "retry/*"
587-
- "log-group-cache/*"
589+
- "failed_events/*"
590+
- "cache/*"
588591
Effect: Allow
589592
- !Ref AWS::NoValue
590593
- Action:
@@ -606,8 +609,9 @@ Resources:
606609
- !Sub "${DdApiKeySecretArn}*"
607610
Effect: Allow
608611
# Fetch Lambda resource tags for data enrichment
612+
# Fetch Step Functions resource tags for data enrichment
609613
- !If
610-
- SetDdFetchLambdaTags
614+
- ShouldDdFetchTags
611615
- Action:
612616
- tag:GetResources
613617
Resource: "*"
@@ -621,14 +625,6 @@ Resources:
621625
Resource: "*"
622626
Effect: Allow
623627
- !Ref AWS::NoValue
624-
# Fetch Step Functions resource tags for data enrichment
625-
- !If
626-
- SetDdFetchStepFunctionsTags
627-
- Action:
628-
- tag:GetResources
629-
Resource: "*"
630-
Effect: Allow
631-
- !Ref AWS::NoValue
632628
# Required for Lambda deployed in VPC
633629
- !If
634630
- UseVPC

aws/logs_monitoring/tools/build_bundle.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
3434
cd $DIR
3535

3636
# Read the desired version
37-
if [ -z "$1" ]; then
37+
if [[ -z ${1:-} ]]; then
3838
log_error "Must specify a desired version number"
3939
elif [[ ! $1 =~ [0-9]+\.[0-9]+\.[0-9]+ ]]; then
4040
log_error "Must use a semantic version, e.g., 3.1.4"

0 commit comments

Comments
 (0)