diff --git a/.github/actions/api-deploy-ecs/action.yml b/.github/actions/api-deploy-ecs/action.yml index deaf762efe4e..10b94d1d104f 100644 --- a/.github/actions/api-deploy-ecs/action.yml +++ b/.github/actions/api-deploy-ecs/action.yml @@ -62,6 +62,10 @@ inputs: description: The flagsmith-rbac git revision to use when building deployment package. required: false default: main + sse_pgp_private_key: + description: Private PGP key used for encrypting/decrypting access logs + required: true + outputs: api_ecr_image_url: description: The image URL from ECR @@ -147,9 +151,11 @@ runs: ECR_REPOSITORY: ${{ inputs.aws_ecr_repository_arn }} DOCKER_BUILDKIT: '1' run: | + echo "Load pgp private key" + cat <<< ${{ input.sse_pgp_private_key }} > sse_pgp_pkey echo "Building docker image with URL: " echo $ECR_REPOSITORY:$IMAGE_TAG - docker build -t $ECR_REPOSITORY:$IMAGE_TAG -f api/Dockerfile --build-arg SAML_INSTALLED=1 --build-arg POETRY_OPTS="--with saml,auth-controller" . + docker build --secret id=sse_pgp_pkey,src=./sse_pgp_pkey -t $ECR_REPOSITORY:$IMAGE_TAG -f api/Dockerfile --build-arg SAML_INSTALLED=1 --build-arg POETRY_OPTS="--with saml,auth-controller" . docker push $ECR_REPOSITORY:$IMAGE_TAG echo "image=$ECR_REPOSITORY:$IMAGE_TAG" >> $GITHUB_OUTPUT shell: bash diff --git a/.github/workflows/api-deploy-production-ecs.yml b/.github/workflows/api-deploy-production-ecs.yml index e033d36069ac..2a86d51d349f 100644 --- a/.github/workflows/api-deploy-production-ecs.yml +++ b/.github/workflows/api-deploy-production-ecs.yml @@ -50,6 +50,7 @@ jobs: aws_ecs_service_name: flagsmith-task-processor-svc-eu-west-2-bf77140 aws_task_definitions_directory_path: infrastructure/aws/production api_ecr_image_url: ${{ steps.deploy-api.outputs.api_ecr_image_url }} + sse_pgp_private_key: ${{ secrets.SSE_PGP_PRIVATE_KEY }} run-tests: runs-on: ubuntu-latest diff --git a/.github/workflows/api-deploy-staging-ecs.yml b/.github/workflows/api-deploy-staging-ecs.yml index 1418b5fc8c18..193b01d721c0 100644 --- a/.github/workflows/api-deploy-staging-ecs.yml +++ b/.github/workflows/api-deploy-staging-ecs.yml @@ -51,6 +51,7 @@ jobs: aws_ecs_service_name: flagsmith-task-processor-svc-eu-west-2-792c644 aws_task_definitions_directory_path: infrastructure/aws/staging api_ecr_image_url: ${{ steps.deploy-api.outputs.api_ecr_image_url }} + sse_pgp_private_key: ${{ secrets.SSE_PGP_PRIVATE_KEY }} run-tests: runs-on: ubuntu-latest diff --git a/api/.env-ci b/api/.env-ci index be4eee0b55a2..5238897b83db 100644 --- a/api/.env-ci +++ b/api/.env-ci @@ -1,3 +1,10 @@ DATABASE_URL=postgresql://postgres:postgres@localhost:5432/postgres ANALYTICS_DATABASE_URL=postgres://postgres:postgres@localhost:5432/analytics PYTEST_ADDOPTS=--cov . --cov-report xml -n auto --dist worksteal + +# used by moto # ref https://github.com/getmoto/moto/issues/5941 +AWS_ACCESS_KEY_ID=testing +AWS_SECRET_ACCESS_KEY=testing +AWS_SECURITY_TOKEN=testing +AWS_SESSION_TOKEN=testing +AWS_DEFAULT_REGION=eu-east-2 diff --git a/api/Dockerfile b/api/Dockerfile index abd1af5c3c8b..161244e3fb5a 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -29,6 +29,11 @@ RUN if [ "${SAML_INSTALLED}" = "1" ]; then apt-get update && apt-get install -y ARG TARGETARCH RUN if [ "${TARGETARCH}" != "amd64" ]; then apt-get update && apt-get install -y libpq-dev && rm -rf /var/lib/apt/lists/*; fi; +# Install GnuPG(and import private key) if secret file exists +RUN --mount=type=secret,id=pgp_pkey if [ -f /run/secrets/sse_pgp_pkey ]; then \ + apt-get update && apt-get install -y gnupg && gpg --import /run/secrets/sse_pgp_pkey; fi; + + # Copy the python venv from step 2 COPY --from=build-python /usr/local/lib/python3.11/site-packages /usr/local/lib/python3.11/site-packages # Copy the bin folder as well to copy the executables created in package installation diff --git a/api/app/settings/common.py b/api/app/settings/common.py index dfbb84f0cbc6..ee6f0399d5b6 100644 --- a/api/app/settings/common.py +++ b/api/app/settings/common.py @@ -907,6 +907,9 @@ # Real time(server sent events) settings SSE_SERVER_BASE_URL = env.str("SSE_SERVER_BASE_URL", None) SSE_AUTHENTICATION_TOKEN = env.str("SSE_AUTHENTICATION_TOKEN", None) +AWS_SSE_LOGS_BUCKET_NAME = env.str("AWS_SSE_LOGS_BUCKET_NAME", None) +SSE_INFLUXDB_BUCKET = env.str("SSE_INFLUXDB_BUCKET", None) + DISABLE_INVITE_LINKS = env.bool("DISABLE_INVITE_LINKS", False) diff --git a/api/app/settings/test.py b/api/app/settings/test.py index 50bd5effc573..799d4bf6469d 100644 --- a/api/app/settings/test.py +++ b/api/app/settings/test.py @@ -11,3 +11,5 @@ "signup": "100/min", "user": "100000/day", } + +AWS_SSE_LOGS_BUCKET_NAME = "test_bucket" diff --git a/api/poetry.lock b/api/poetry.lock index 90cccc4edf42..40135a469ae3 100644 --- a/api/poetry.lock +++ b/api/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "aiohttp" @@ -3316,6 +3316,17 @@ files = [ [package.extras] cli = ["click (>=5.0)"] +[[package]] +name = "python-gnupg" +version = "0.5.1" +description = "A wrapper for the Gnu Privacy Guard (GPG or GnuPG)" +optional = false +python-versions = "*" +files = [ + {file = "python-gnupg-0.5.1.tar.gz", hash = "sha256:5674bad4e93876c0b0d3197e314d7f942d39018bf31e2b833f6788a6813c3fb8"}, + {file = "python_gnupg-0.5.1-py2.py3-none-any.whl", hash = "sha256:bf9b2d9032ef38139b7d64184176cd0b293eaeae6e4f93f50e304c7051174482"}, +] + [[package]] name = "python-http-client" version = "3.3.7" @@ -3391,6 +3402,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -3398,8 +3410,15 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -3416,6 +3435,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -3423,6 +3443,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -4371,5 +4392,5 @@ requests = ">=2.7,<3.0" [metadata] lock-version = "2.0" -python-versions = "^3.10" -content-hash = "dc66f4669e76325b8135718c55c95e68bb11805e181c80961cea58af5fc9b2bb" +python-versions = ">=3.10,<3.12" +content-hash = "a11fec03de5c44c28448991c66bfc740cf0b97fa5d0a71629cd9a7a95b49f81d" diff --git a/api/pyproject.toml b/api/pyproject.toml index 929d1dcac786..096081d8cf05 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -102,6 +102,7 @@ django-axes = "~5.32.0" pydantic = "~1.10.9" pyngo = "~1.6.0" flagsmith = "^3.4.0" +python-gnupg = "^0.5.1" [tool.poetry.group.auth-controller] optional = true diff --git a/api/sse/dataclasses.py b/api/sse/dataclasses.py new file mode 100644 index 000000000000..fa070bff8eb2 --- /dev/null +++ b/api/sse/dataclasses.py @@ -0,0 +1,7 @@ +from dataclasses import dataclass + + +@dataclass(eq=True) +class SSEAccessLogs: + generated_at: str # ISO 8601 + api_key: str diff --git a/api/sse/sse_service.py b/api/sse/sse_service.py index b862cbac6551..3ed5ea6cf49a 100644 --- a/api/sse/sse_service.py +++ b/api/sse/sse_service.py @@ -1,8 +1,16 @@ +import csv from functools import wraps +from io import StringIO +from typing import Generator +import boto3 +import gnupg from django.conf import settings -from . import tasks +from sse import tasks +from sse.dataclasses import SSEAccessLogs + +s3 = boto3.resource("s3") def _sse_enabled(get_project_from_first_arg=lambda obj: obj.project): @@ -43,3 +51,18 @@ def send_environment_update_message_for_environment(environment): tasks.send_environment_update_message.delay( args=(environment.api_key, environment.updated_at.isoformat()) ) + + +def stream_access_logs() -> Generator[SSEAccessLogs, None, None]: + gpg = gnupg.GPG() + bucket = s3.Bucket(settings.AWS_SSE_LOGS_BUCKET_NAME) + for log_file in bucket.objects.all(): + encrypted_body = log_file.get()["Body"].read() + decrypted_body = gpg.decrypt(encrypted_body) + + reader = csv.reader(StringIO(decrypted_body.data.decode())) + + for row in reader: + yield SSEAccessLogs(*row) + + log_file.delete() diff --git a/api/sse/tasks.py b/api/sse/tasks.py index 3710f53395a8..162286a59f40 100644 --- a/api/sse/tasks.py +++ b/api/sse/tasks.py @@ -1,11 +1,23 @@ +import logging +from datetime import timedelta + import requests +from app_analytics.influxdb_wrapper import influxdb_client from django.conf import settings +from influxdb_client import Point, WriteOptions +from environments.models import Environment from projects.models import Project -from task_processor.decorators import register_task_handler +from sse import sse_service +from task_processor.decorators import ( + register_recurring_task, + register_task_handler, +) from .exceptions import SSEAuthTokenNotSet +logger = logging.getLogger(__name__) + @register_task_handler() def send_environment_update_message_for_project( @@ -27,6 +39,50 @@ def send_environment_update_message(environment_key: str, updated_at): response.raise_for_status() +if settings.AWS_SSE_LOGS_BUCKET_NAME: + + @register_recurring_task( + run_every=timedelta(minutes=5), + ) + def update_sse_usage(): + agg_request_count: dict[str, int] = {} + agg_last_event_generated_at: dict[str, str] = {} + + for log in sse_service.stream_access_logs(): + agg_request_count[log.api_key] = agg_request_count.get(log.api_key, 0) + 1 + agg_last_event_generated_at[log.api_key] = log.generated_at + + with influxdb_client.write_api( + write_options=WriteOptions(batch_size=100, flush_interval=1000) + ) as write_api: + environments = Environment.objects.filter( + api_key__in=agg_request_count.keys() + ).values("api_key", "id", "project_id", "project__organisation_id") + + for environment in environments: + record = _get_influx_point( + environment["id"], + environment["project_id"], + environment["project__organisation_id"], + agg_request_count[environment["api_key"]], + agg_last_event_generated_at[environment["api_key"]], + ) + write_api.write(bucket=settings.SSE_INFLUXDB_BUCKET, record=record) + + +def _get_influx_point( + environment_id: int, project_id: int, organisation_id: int, count: int, time: str +) -> Point: + return ( + Point("sse_call") + .field("request_count", count) + .tag("organisation_id", organisation_id) + .tag("project_id", project_id) + .tag("environment_id", environment_id) + .time(time) + ) + + def get_auth_header(): if not settings.SSE_AUTHENTICATION_TOKEN: raise SSEAuthTokenNotSet() diff --git a/api/tests/unit/sse/test_sse_service.py b/api/tests/unit/sse/test_sse_service.py index 9bd0a4a7a0d5..2bfc79988d5e 100644 --- a/api/tests/unit/sse/test_sse_service.py +++ b/api/tests/unit/sse/test_sse_service.py @@ -1,9 +1,17 @@ +import boto3 import pytest +from django.conf import settings +from moto import mock_s3 +from moto.core import patch_resource from pytest_lazyfixture import lazy_fixture +from pytest_mock import MockerFixture +from sse.dataclasses import SSEAccessLogs from sse.sse_service import ( + s3, send_environment_update_message_for_environment, send_environment_update_message_for_project, + stream_access_logs, ) @@ -96,3 +104,64 @@ def test_send_environment_update_message_for_environment_schedules_task_correctl realtime_enabled_project_environment_one.updated_at.isoformat(), ) ) + + +@mock_s3 +def test_stream_access_logs(mocker: MockerFixture): + # Given - Some test data + first_log = SSEAccessLogs("2023-11-27T06:42:47+0000", "key_one") + second_log = SSEAccessLogs("2023-11-27T06:42:47+0000", "key_two") + third_log = SSEAccessLogs("2023-11-27T06:42:47+0000", "key_three") + + first_encrypted_object_data = b"first_bucket_encrypted_data" + first_decrypted_object_data = ( + f"{first_log.generated_at},{first_log.api_key}\n" + f"{second_log.generated_at},{second_log.api_key}".encode() + ) + second_encrypted_object_data = b"second_bucket_encrypted_data" + second_decrypted_object_data = ( + f"{third_log.generated_at},{third_log.api_key}".encode() + ) + + # patch the s3 resource because it was created before the mock_s3 decorator was applied + # ref: https://docs.getmoto.org/en/latest/docs/getting_started.html#patching-the-client-or-resource + patch_resource(s3) + + # Next, let's create a bucket + bucket_name = settings.AWS_SSE_LOGS_BUCKET_NAME + s3_client = boto3.client("s3", region_name="eu-west-2") + s3_client.create_bucket( + Bucket=bucket_name, + CreateBucketConfiguration={"LocationConstraint": "eu-west-2"}, + ) + # put some objects + s3_client.put_object( + Body=first_encrypted_object_data, Bucket=bucket_name, Key="first_object" + ) + s3_client.put_object( + Body=second_encrypted_object_data, Bucket=bucket_name, Key="second_object" + ) + + mocked_gpg = mocker.patch("sse.sse_service.gnupg.GPG", autospec=True) + + mocked_gpg.return_value.decrypt.side_effect = [ + mocker.MagicMock(data=first_decrypted_object_data), + mocker.MagicMock(data=second_decrypted_object_data), + ] + + # When + access_logs = list(stream_access_logs()) + + # Then + assert access_logs == [first_log, second_log, third_log] + + # gpg decrypt was called correctly + mocked_gpg.return_value.decrypt.assert_has_calls( + [ + mocker.call(first_encrypted_object_data), + mocker.call(second_encrypted_object_data), + ] + ) + + # And, bucket is now empty + assert "Contents" not in s3_client.list_objects(Bucket=bucket_name) diff --git a/api/tests/unit/sse/test_tasks.py b/api/tests/unit/sse/test_tasks.py index 494ddf345493..b0e1d76a541b 100644 --- a/api/tests/unit/sse/test_tasks.py +++ b/api/tests/unit/sse/test_tasks.py @@ -1,12 +1,19 @@ from datetime import datetime +from typing import Callable +from unittest.mock import call import pytest +from pytest_django.fixtures import SettingsWrapper +from pytest_mock import MockerFixture +from environments.models import Environment +from sse.dataclasses import SSEAccessLogs from sse.exceptions import SSEAuthTokenNotSet from sse.tasks import ( get_auth_header, send_environment_update_message, send_environment_update_message_for_project, + update_sse_usage, ) @@ -79,3 +86,55 @@ def test_auth_header_raises_exception_if_token_not_set(settings): # When with pytest.raises(SSEAuthTokenNotSet): get_auth_header() + + +def test_track_sse_usage( + mocker: MockerFixture, + environment: Environment, + django_assert_num_queries: Callable, + settings: SettingsWrapper, +): + # Given - two valid logs + first_access_log = SSEAccessLogs(datetime.now().isoformat(), environment.api_key) + second_access_log = SSEAccessLogs(datetime.now().isoformat(), environment.api_key) + + # and, another log with invalid api key + third_access_log = SSEAccessLogs(datetime.now().isoformat(), "third_key") + + mocker.patch( + "sse.sse_service.stream_access_logs", + return_value=[first_access_log, second_access_log, third_access_log], + ) + influxdb_bucket = "test_bucket" + settings.SSE_INFLUXDB_BUCKET = influxdb_bucket + + mocked_influx_db_client = mocker.patch("sse.tasks.influxdb_client") + mocked_influx_point = mocker.patch("sse.tasks.Point") + + # When + with django_assert_num_queries(1): + update_sse_usage() + + # Then + # Point was generated correctly + mocked_influx_point.assert_has_calls( + [ + call("sse_call"), + call().field("request_count", 2), + call().field().tag("organisation_id", environment.project.organisation_id), + call().field().tag().tag("project_id", environment.project_id), + call().field().tag().tag().tag("environment_id", environment.id), + call().field().tag().tag().tag().time(second_access_log.generated_at), + ] + ) + + # Only valid logs were written to InfluxDB + write_method = ( + mocked_influx_db_client.write_api.return_value.__enter__.return_value.write + ) + + assert write_method.call_count == 1 + write_method.assert_called_once_with( + bucket=influxdb_bucket, + record=mocked_influx_point().field().tag().tag().tag().time(), + ) diff --git a/infrastructure/aws/production/ecs-task-definition-task-processor.json b/infrastructure/aws/production/ecs-task-definition-task-processor.json index 9ed1c7efdb01..7581bae21d08 100644 --- a/infrastructure/aws/production/ecs-task-definition-task-processor.json +++ b/infrastructure/aws/production/ecs-task-definition-task-processor.json @@ -71,6 +71,14 @@ "name": "INFLUXDB_URL", "value": "https://eu-central-1-1.aws.cloud2.influxdata.com" }, + { + "name": "SSE_INFLUXDB_BUCKET", + "value": "sse_prod" + }, + { + "name": "AWS_SSE_LOGS_BUCKET_NAME", + "value": "flagsmith-fastly-logs-production" + }, { "name": "OAUTH_CLIENT_ID", "value": "232959427810-br6ltnrgouktp0ngsbs04o14ueb9rch0.apps.googleusercontent.com" diff --git a/infrastructure/aws/staging/ecs-task-definition-task-processor.json b/infrastructure/aws/staging/ecs-task-definition-task-processor.json index fd478584a024..3917b51369a8 100644 --- a/infrastructure/aws/staging/ecs-task-definition-task-processor.json +++ b/infrastructure/aws/staging/ecs-task-definition-task-processor.json @@ -114,6 +114,22 @@ { "name": "TASK_DELETE_BATCH_SIZE", "value": "100000" + }, + { + "name": "INFLUXDB_ORG", + "value": "ben.rometsch@bullet-train.io" + }, + { + "name": "INFLUXDB_URL", + "value": "https://eu-central-1-1.aws.cloud2.influxdata.com" + }, + { + "name": "SSE_INFLUXDB_BUCKET", + "value": "sse_staging" + }, + { + "name": "AWS_SSE_LOGS_BUCKET_NAME", + "value": "flagsmith-fastly-logs-staging" } ], "secrets": [ @@ -160,6 +176,10 @@ { "name": "PIPEDRIVE_API_TOKEN", "valueFrom": "arn:aws:secretsmanager:eu-west-2:302456015006:secret:ECS-API-heAdoB:PIPEDRIVE_API_TOKEN::" + }, + { + "name": "INFLUXDB_TOKEN", + "valueFrom": "arn:aws:secretsmanager:eu-west-2:302456015006:secret:ECS-API-heAdoB:INFLUXDB_TOKEN::" } ], "logConfiguration": {