diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml new file mode 100644 index 0000000..b5b4406 --- /dev/null +++ b/.github/workflows/test.yaml @@ -0,0 +1,9 @@ +name: Tests + +on: + pull_request: + +jobs: + unit-tests: + uses: canonical/operator-workflows/.github/workflows/test.yaml@cea2ac306b4f4c1475d73b1a4c766d62e5b1c8a9 + secrets: inherit diff --git a/.licenserc.yaml b/.licenserc.yaml new file mode 100644 index 0000000..5aa32de --- /dev/null +++ b/.licenserc.yaml @@ -0,0 +1,37 @@ +header: + license: + spdx-id: Apache-2.0 + copyright-owner: Canonical Ltd. + copyright-year: 2024 + content: | + Copyright [year] [owner] + See LICENSE file for licensing details. + paths: + - '**' + paths-ignore: + - '.github/**' + - '**/.gitkeep' + - '**/*.cfg' + - '**/*.conf' + - '**/*.j2' + - '**/*.json' + - '**/*.md' + - '**/*.rule' + - '**/*.tmpl' + - '**/*.txt' + - '**/*.jinja' + - '.codespellignore' + - '.dockerignore' + - '.flake8' + - '.jujuignore' + - '.gitignore' + - '.licenserc.yaml' + - '.trivyignore' + - '.woke.yaml' + - '.woke.yml' + - 'CODEOWNERS' + - 'icon.svg' + - 'LICENSE' + - 'trivy.yaml' + - 'lib/**' + comment: on-failure diff --git a/.woke.yaml b/.woke.yaml new file mode 100644 index 0000000..18a5868 --- /dev/null +++ b/.woke.yaml @@ -0,0 +1,9 @@ +ignore_files: + # Ignore ingress charm library as it uses non compliant terminology: + # whitelist. + - lib/charms/data_platform_libs/v0/data_models.py + - lib/charms/data_platform_libs/v0/database_requires.py + - lib/charms/data_platform_libs/v0/s3.py +rules: + # Ignore "master" - the database relation event received from the library. + - name: master diff --git a/dev-requirements.txt b/dev-requirements.txt new file mode 100644 index 0000000..5c6c3e2 --- /dev/null +++ b/dev-requirements.txt @@ -0,0 +1 @@ +tox==4.15.1 diff --git a/pyproject.toml b/pyproject.toml index e10531c..724f93d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,11 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +[tool.bandit] +exclude_dirs = ["/venv/"] +[tool.bandit.assert_used] +skips = ["*/*test.py", "*/test_*.py", "*tests/*.py"] + # Testing tools configuration [tool.coverage.run] branch = true @@ -11,36 +19,26 @@ log_cli_level = "INFO" # Formatting tools configuration [tool.black] -line-length = 99 +line-length = 120 target-version = ["py38"] -# Linting tools configuration -[tool.ruff] -line-length = 99 -select = ["E", "W", "F", "C", "N", "D", "I001"] -extend-ignore = [ - "D203", - "D204", - "D213", - "D215", - "D400", - "D404", - "D406", - "D407", - "D408", - "D409", - "D413", -] -ignore = ["E501", "D107"] -extend-exclude = ["__pycache__", "*.egg_info"] -per-file-ignores = {"tests/*" = ["D100","D101","D102","D103","D104"]} +[tool.isort] +profile = "black" -[tool.ruff.mccabe] +# Linting tools configuration +[tool.flake8] +max-line-length = 120 +max-doc-length = 99 max-complexity = 10 - -[tool.codespell] -skip = "build,lib,venv,icon.svg,.tox,.git,.mypy_cache,.ruff_cache,.coverage" - -[tool.pyright] -include = ["src/**.py"] - +exclude = [".git", "__pycache__", ".tox", "build", "dist", "*.egg_info", "venv"] +select = ["E", "W", "F", "C", "N", "R", "D", "H"] +# Ignore W503, E501 because using black creates errors with this +# Ignore D107 Missing docstring in __init__ +ignore = ["W503", "E501", "D107"] +# D100, D101, D102, D103: Ignore missing docstrings in tests +per-file-ignores = ["tests/*:D100,D101,D102,D103,D104"] +docstring-convention = "google" +# Check for properly formatted copyright header in each file +copyright-check = "True" +copyright-author = "Canonical Ltd." +copyright-regexp = "Copyright\\s\\d{4}([-,]\\d{4})*\\s+%(author)s" diff --git a/src/relations/airbyte_ui.py b/src/relations/airbyte_ui.py index 434aa95..18fdf69 100644 --- a/src/relations/airbyte_ui.py +++ b/src/relations/airbyte_ui.py @@ -5,10 +5,11 @@ import logging -from log import log_event_handler from ops import framework from ops.model import ActiveStatus +from log import log_event_handler + logger = logging.getLogger(__name__) @@ -23,12 +24,8 @@ def __init__(self, charm): """ super().__init__(charm, "airbyte-server") self.charm = charm - charm.framework.observe( - charm.on.airbyte_server_relation_joined, self._on_airbyte_server_relation_joined - ) - charm.framework.observe( - charm.on.airbyte_server_relation_changed, self._on_airbyte_server_relation_joined - ) + charm.framework.observe(charm.on.airbyte_server_relation_joined, self._on_airbyte_server_relation_joined) + charm.framework.observe(charm.on.airbyte_server_relation_changed, self._on_airbyte_server_relation_joined) @log_event_handler(logger) def _on_airbyte_server_relation_joined(self, event): diff --git a/src/relations/minio.py b/src/relations/minio.py index d2c12cd..7196619 100644 --- a/src/relations/minio.py +++ b/src/relations/minio.py @@ -5,12 +5,17 @@ import logging -from charm_helpers import construct_svc_endpoint from charmed_kubeflow_chisme.exceptions import ErrorWithStatus -from log import log_event_handler from ops import framework from ops.model import BlockedStatus, WaitingStatus -from serialized_data_interface import NoCompatibleVersions, NoVersionsListed, get_interfaces +from serialized_data_interface import ( + NoCompatibleVersions, + NoVersionsListed, + get_interfaces, +) + +from charm_helpers import construct_svc_endpoint +from log import log_event_handler logger = logging.getLogger(__name__) @@ -28,15 +33,9 @@ def __init__(self, charm): self.charm = charm # Handle minio relation. - charm.framework.observe( - charm.on.object_storage_relation_joined, self._on_object_storage_relation_changed - ) - charm.framework.observe( - charm.on.object_storage_relation_changed, self._on_object_storage_relation_changed - ) - charm.framework.observe( - charm.on.object_storage_relation_broken, self._on_object_storage_relation_broken - ) + charm.framework.observe(charm.on.object_storage_relation_joined, self._on_object_storage_relation_changed) + charm.framework.observe(charm.on.object_storage_relation_changed, self._on_object_storage_relation_changed) + charm.framework.observe(charm.on.object_storage_relation_broken, self._on_object_storage_relation_broken) @log_event_handler(logger) def _on_object_storage_relation_changed(self, event): @@ -90,22 +89,37 @@ def _on_object_storage_relation_broken(self, event) -> None: self.charm._update(event) def _get_interfaces(self): - """Retrieve interface object.""" + """Retrieve interface object. + + Returns: + list of charm interfaces. + + Raises: + ErrorWithStatus: if an anticipated error occurs. + """ try: charm = self.charm - # Hack: get_interfaces checks for peer relation which does not exist under requires/provides list in charmcraft.yaml + # Hack: get_interfaces checks for peer relation which does not exist under + # requires/provides list in charmcraft.yaml del charm.meta.relations["peer"] interfaces = get_interfaces(charm) except NoVersionsListed as err: - raise ErrorWithStatus(err, WaitingStatus) + raise ErrorWithStatus(err, WaitingStatus) from err except NoCompatibleVersions as err: - raise ErrorWithStatus(err, BlockedStatus) + raise ErrorWithStatus(err, BlockedStatus) from err return interfaces def _get_object_storage_data(self, interfaces): """Unpacks and returns the object-storage relation data. - Raises CheckFailedError if an anticipated error occurs. + Args: + interfaces: list of charm interfaces. + + Returns: + object storage connection data. + + Raises: + ErrorWithStatus: if an anticipated error occurs. """ if not ((obj_storage := interfaces["object-storage"]) and obj_storage.get_data()): raise ErrorWithStatus("Waiting for object-storage relation data", WaitingStatus) @@ -118,6 +132,6 @@ def _get_object_storage_data(self, interfaces): f"Unexpected error unpacking object storage data - data format not " f"as expected. Caught exception: '{str(e)}'", BlockedStatus, - ) + ) from e return obj_storage diff --git a/src/relations/postgresql.py b/src/relations/postgresql.py index 720f57b..03fa04c 100644 --- a/src/relations/postgresql.py +++ b/src/relations/postgresql.py @@ -6,11 +6,12 @@ import logging from charms.data_platform_libs.v0.database_requires import DatabaseEvent -from literals import DB_NAME -from log import log_event_handler from ops import framework from ops.model import WaitingStatus +from literals import DB_NAME +from log import log_event_handler + logger = logging.getLogger(__name__) diff --git a/src/relations/s3.py b/src/relations/s3.py index 54dbda9..c6e3c1c 100644 --- a/src/relations/s3.py +++ b/src/relations/s3.py @@ -10,9 +10,10 @@ CredentialsChangedEvent, CredentialsGoneEvent, ) -from log import log_event_handler from ops import framework +from log import log_event_handler + logger = logging.getLogger(__name__) @@ -27,9 +28,7 @@ def __init__(self, charm): """ super().__init__(charm, "s3") self.charm = charm - charm.framework.observe( - charm.s3_client.on.credentials_changed, self._on_s3_credentials_changed - ) + charm.framework.observe(charm.s3_client.on.credentials_changed, self._on_s3_credentials_changed) charm.framework.observe(charm.s3_client.on.credentials_gone, self._on_s3_credentials_gone) @log_event_handler(logger) @@ -81,9 +80,7 @@ def _retrieve_s3_parameters(self): "access-key", "secret-key", ] - missing_required_parameters = [ - param for param in required_parameters if param not in s3_parameters - ] + missing_required_parameters = [param for param in required_parameters if param not in s3_parameters] if missing_required_parameters: logger.warning( f"Missing required S3 parameters in relation with S3 integrator: {missing_required_parameters}" @@ -104,9 +101,9 @@ def _retrieve_s3_parameters(self): # Clean up extra slash symbols to avoid issues on 3rd-party storages # like Ceph Object Gateway (radosgw). s3_parameters["endpoint"] = s3_parameters["endpoint"].rstrip("/") - s3_parameters["path"] = ( - f'/{s3_parameters["path"].strip("/")}' # The slash in the beginning is required by pgBackRest. - ) + s3_parameters[ + "path" + ] = f'/{s3_parameters["path"].strip("/")}' # The slash in the beginning is required by pgBackRest. s3_parameters["bucket"] = s3_parameters["bucket"].strip("/") return s3_parameters, [] diff --git a/src/s3_helpers.py b/src/s3_helpers.py index e1c332c..612f560 100644 --- a/src/s3_helpers.py +++ b/src/s3_helpers.py @@ -15,33 +15,33 @@ class S3Client: """Client for S3 operations.""" def __init__(self, s3_parameters): - """Construct. + """Initialize an S3 connection using the provided parameters. Args: - charm: The charm to attach the hooks to. + s3_parameters: S3 connection parameters. + + Raises: + ValueError: If a session fails to be created. """ + self.s3_parameters = s3_parameters endpoint = s3_parameters.get("endpoint") session = boto3.session.Session( aws_access_key_id=s3_parameters.get("access-key"), aws_secret_access_key=s3_parameters.get("secret-key"), region_name=s3_parameters.get("region"), # Region can be optional for MinIO ) - self.s3_parameters = s3_parameters try: self.s3_resource = session.resource("s3", endpoint_url=endpoint) self.s3_client = session.client("s3", endpoint_url=endpoint) - except ValueError as e: - logger.exception( - "Failed to create a session in region=%s.", s3_parameters.get("region") - ) - raise e + except Exception as e: + logger.exception("Failed to create a session in region=%s.", s3_parameters.get("region")) + raise ValueError("Failed to create a session") from e def create_bucket_if_not_exists(self, bucket_name): """Create the S3 bucket if it does not exist. Args: - s3_parameters: S3 parameters fetched from the S3 integrator relation. - endpoint: S3 service endpoint. + bucket_name (str): name of bucket to create Raises: e (ValueError): if a session could not be created. @@ -56,9 +56,7 @@ def create_bucket_if_not_exists(self, bucket_name): except ClientError as e: error_code = int(e.response["Error"]["Code"]) if error_code == 404: - logger.warning( - "Bucket %s doesn't exist or you don't have access to it.", bucket_name - ) + logger.warning("Bucket %s doesn't exist or you don't have access to it.", bucket_name) exists = False else: logger.exception("Unexpected error: %s", e) @@ -70,12 +68,16 @@ def create_bucket_if_not_exists(self, bucket_name): s3_bucket.wait_until_exists() logger.info("Created bucket '%s' in region=%s", bucket_name, region) except ClientError as error: - logger.exception( - "Couldn't create bucket named '%s' in region=%s.", bucket_name, region - ) + logger.exception("Couldn't create bucket named '%s' in region=%s.", bucket_name, region) raise error - def set_bucket_lifecycle_policy(self, bucket, ttl): + def set_bucket_lifecycle_policy(self, bucket_name, ttl): + """Set lifecycle policy of bucket to purge files after a certain time. + + Args: + bucket_name: Name of bucket. + ttl: Time to live of logs (in days). + """ lifecycle_policy = { "Rules": [ { @@ -86,6 +88,4 @@ def set_bucket_lifecycle_policy(self, bucket, ttl): } ] } - self.s3_client.put_bucket_lifecycle_configuration( - Bucket=bucket, LifecycleConfiguration=lifecycle_policy - ) + self.s3_client.put_bucket_lifecycle_configuration(Bucket=bucket_name, LifecycleConfiguration=lifecycle_policy) diff --git a/src/scripts/pod-sweeper.sh b/src/scripts/pod-sweeper.sh index 7fb54b5..d5e0d0b 100755 --- a/src/scripts/pod-sweeper.sh +++ b/src/scripts/pod-sweeper.sh @@ -1,4 +1,6 @@ #!/bin/bash +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. # https://github.com/airbytehq/airbyte-platform/blob/main/charts/airbyte-pod-sweeper/templates/configmap.yaml # TODO(kelkawi-a): Move this to Airbyte ROCK @@ -51,7 +53,7 @@ do if [ "$POD_DATE" -lt "$RUNNING_DATE" ]; then delete_pod "$POD_NAME" "$POD_STATUS" "$POD_DATE_STR" fi - elif [ -n "${SUCCEEDED_TTL_MINUTES}" ] && ([[ "$POD_STATUS" = "Succeeded" ]] || [[ "$POD_STATUS" = "Completed" ]]); then + elif [ -n "${SUCCEEDED_TTL_MINUTES}" ] && { [[ "$POD_STATUS" = "Succeeded" ]] || [[ "$POD_STATUS" = "Completed" ]]; }; then if [ "$POD_DATE" -lt "$SUCCESS_DATE" ]; then delete_pod "$POD_NAME" "$POD_STATUS" "$POD_DATE_STR" fi diff --git a/tests/integration/test_charm.py b/tests/integration/test_charm.py index e8b264c..4b73607 100644 --- a/tests/integration/test_charm.py +++ b/tests/integration/test_charm.py @@ -1,5 +1,4 @@ -#!/usr/bin/env python3 -# Copyright 2024 Ali +# Copyright 2024 Canonical Ltd. # See LICENSE file for licensing details. import asyncio @@ -29,7 +28,5 @@ async def test_build_and_deploy(ops_test: OpsTest): # Deploy the charm and wait for active/idle status await asyncio.gather( ops_test.model.deploy(charm, resources=resources, application_name=APP_NAME), - ops_test.model.wait_for_idle( - apps=[APP_NAME], status="active", raise_on_blocked=True, timeout=1000 - ), + ops_test.model.wait_for_idle(apps=[APP_NAME], status="active", raise_on_blocked=True, timeout=1000), ) diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 0000000..9902dfe --- /dev/null +++ b/tests/unit/__init__.py @@ -0,0 +1,9 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + + +"""Unit tests config.""" + +import ops.testing + +ops.testing.SIMULATE_CAN_CONNECT = True diff --git a/tests/unit/test_charm.py b/tests/unit/test_charm.py index 6b5a15e..7368e18 100644 --- a/tests/unit/test_charm.py +++ b/tests/unit/test_charm.py @@ -1,68 +1,423 @@ -# Copyright 2024 Ali +# Copyright 2024 Canonical Ltd. # See LICENSE file for licensing details. # # Learn more about testing at: https://juju.is/docs/sdk/testing -import unittest -import ops -import ops.testing +"""Charm unit tests.""" + +# pylint:disable=protected-access,too-many-public-methods + +import logging +from unittest import TestCase, mock + +from ops.model import ActiveStatus, BlockedStatus, MaintenanceStatus +from ops.pebble import CheckStatus +from ops.testing import Harness + from charm import AirbyteK8SOperatorCharm +from src.literals import BASE_ENV, CONTAINERS +from src.structured_config import StorageType + +logging.basicConfig(level=logging.DEBUG) + +mock_incomplete_pebble_plan = {"services": {"airbyte": {"override": "replace"}}} + +MODEL_NAME = "airbyte-model" +APP_NAME = "airbyte-k8s" + +minio_object_storage_data = { + "access-key": "access", + "secret-key": "secret", + "service": "service", + "port": "9000", + "namespace": "namespace", + "secure": False, + "endpoint": "endpoint", +} + + +class TestCharm(TestCase): + """Unit tests. + Attrs: + maxDiff: Specifies max difference shown by failed tests. + """ + + maxDiff = None -class TestCharm(unittest.TestCase): def setUp(self): - self.harness = ops.testing.Harness(AirbyteK8SOperatorCharm) + """Set up for the unit tests.""" + self.harness = Harness(AirbyteK8SOperatorCharm) self.addCleanup(self.harness.cleanup) + for container_name in list(CONTAINERS.keys()): + self.harness.set_can_connect(container_name, True) + self.harness.set_leader(True) + self.harness.set_model_name("airbyte-model") + self.harness.add_network("10.0.0.10", endpoint="peer") self.harness.begin() - def test_httpbin_pebble_ready(self): - # Expected plan after Pebble ready with default config - expected_plan = { - "services": { - "httpbin": { - "override": "replace", - "summary": "httpbin", - "command": "gunicorn -b 0.0.0.0:80 httpbin:app -k gevent", - "startup": "enabled", - "environment": {"GUNICORN_CMD_ARGS": "--log-level info"}, - } + def test_initial_plan(self): + """The initial pebble plan is empty.""" + harness = self.harness + for container_name in list(CONTAINERS.keys()): + initial_plan = harness.get_container_pebble_plan(container_name).to_dict() + self.assertEqual(initial_plan, {}) + + def test_blocked_by_peer_relation_not_ready(self): + """The charm is blocked without a peer relation.""" + harness = self.harness + + simulate_pebble_readiness(harness) + + # The BlockStatus is set with a message. + self.assertEqual(harness.model.unit.status, BlockedStatus("peer relation not ready")) + + def test_blocked_by_db(self): + """The charm is blocked without a db:pgsql relation with a ready master.""" + harness = self.harness + + # Simulate peer relation readiness. + harness.add_relation("peer", "airbyte") + + simulate_pebble_readiness(harness) + + # The BlockStatus is set with a message. + self.assertEqual( + harness.model.unit.status, + BlockedStatus("database relation not ready"), + ) + + def test_blocked_by_minio(self): + """The charm is blocked without a minio relation.""" + harness = self.harness + + # Simulate peer relation readiness. + harness.add_relation("peer", "airbyte") + + simulate_pebble_readiness(harness) + + # Simulate db readiness. + event = make_database_changed_event("db") + harness.charm.postgresql._on_database_changed(event) + + # The BlockStatus is set with a message. + self.assertEqual( + harness.model.unit.status, + BlockedStatus("minio relation not ready"), + ) + + def test_blocked_by_s3(self): + """The charm is blocked without a minio relation.""" + harness = self.harness + + harness.update_config({"storage-type": "S3"}) + + # Simulate peer relation readiness. + harness.add_relation("peer", "airbyte") + + simulate_pebble_readiness(harness) + + # Simulate db readiness. + event = make_database_changed_event("db") + harness.charm.postgresql._on_database_changed(event) + + # The BlockStatus is set with a message. + self.assertEqual( + harness.model.unit.status, + BlockedStatus("s3 relation not ready"), + ) + + def test_ready_with_minio(self): + """The pebble plan is correctly generated when the charm is ready.""" + harness = self.harness + harness.update_config({"storage-type": "MINIO"}) + + simulate_lifecycle(harness) + + # The plan is generated after pebble is ready. + for container_name in list(CONTAINERS.keys()): + want_plan = create_plan(container_name, "MINIO") + + got_plan = harness.get_container_pebble_plan(container_name).to_dict() + self.assertEqual(got_plan, want_plan) + + # The service was started. + service = harness.model.unit.get_container(container_name).get_service(container_name) + self.assertTrue(service.is_running()) + + self.assertEqual(harness.model.unit.status, MaintenanceStatus("replanning application")) + + def test_ready_with_s3(self): + """The pebble plan is correctly generated when the charm is ready.""" + harness = self.harness + harness.update_config({"storage-type": "S3"}) + + simulate_lifecycle(harness) + + # The plan is generated after pebble is ready. + for container_name in list(CONTAINERS.keys()): + want_plan = create_plan(container_name, "S3") + + got_plan = harness.get_container_pebble_plan(container_name).to_dict() + self.assertEqual(got_plan, want_plan) + + # The service was started. + service = harness.model.unit.get_container(container_name).get_service(container_name) + self.assertTrue(service.is_running()) + + self.assertEqual(harness.model.unit.status, MaintenanceStatus("replanning application")) + + def test_update_status_up(self): + """The charm updates the unit status to active based on UP status.""" + harness = self.harness + + simulate_lifecycle(harness) + for container_name in list(CONTAINERS.keys()): + container = harness.model.unit.get_container(container_name) + if CONTAINERS[container_name]: + container.get_check = mock.Mock(status="up") + container.get_check.return_value.status = CheckStatus.UP + + harness.charm.on.update_status.emit() + self.assertEqual(harness.model.unit.status, ActiveStatus()) + + def test_update_status_down(self): + """The charm updates the unit status to maintenance based on DOWN status.""" + harness = self.harness + + simulate_lifecycle(harness) + + for container_name in list(CONTAINERS.keys()): + container = harness.model.unit.get_container(container_name) + if CONTAINERS[container_name]: + container.get_check = mock.Mock(status="up") + container.get_check.return_value.status = CheckStatus.DOWN + + harness.charm.on.update_status.emit() + self.assertEqual(harness.model.unit.status, MaintenanceStatus("Status check: DOWN")) + + def test_incomplete_pebble_plan(self): + """The charm re-applies the pebble plan if incomplete.""" + harness = self.harness + simulate_lifecycle(harness) + + for container_name in list(CONTAINERS.keys()): + container = harness.model.unit.get_container(container_name) + container.add_layer(container_name, mock_incomplete_pebble_plan, combine=True) + if CONTAINERS[container_name]: + container.get_check = mock.Mock(status="up") + container.get_check.return_value.status = CheckStatus.UP + + harness.charm.on.update_status.emit() + + self.assertEqual( + harness.model.unit.status, + ActiveStatus(), + ) + plan = harness.get_container_pebble_plan("airbyte-server").to_dict() + assert plan != mock_incomplete_pebble_plan + + @mock.patch("charm.AirbyteK8SOperatorCharm._validate_pebble_plan", return_value=True) + @mock.patch("s3_helpers.S3Client.create_bucket_if_not_exists", return_value=None) + @mock.patch("s3_helpers.S3Client.set_bucket_lifecycle_policy", return_value=None) + def test_missing_pebble_plan( + self, set_bucket_lifecycle_policy, create_bucket_if_not_exists, mock_validate_pebble_plan + ): + """The charm re-applies the pebble plan if missing.""" + harness = self.harness + simulate_lifecycle(harness) + + mock_validate_pebble_plan.return_value = False + harness.charm.on.update_status.emit() + self.assertEqual( + harness.model.unit.status, + MaintenanceStatus("replanning application"), + ) + plan = harness.get_container_pebble_plan("airbyte-server").to_dict() + assert plan is not None + + +@mock.patch("s3_helpers.S3Client.create_bucket_if_not_exists", return_value=None) +@mock.patch("s3_helpers.S3Client.set_bucket_lifecycle_policy", return_value=None) +@mock.patch( + "relations.minio.MinioRelation._get_object_storage_data", + return_value=minio_object_storage_data, +) +@mock.patch("relations.minio.MinioRelation._get_interfaces", return_value=None) +def simulate_lifecycle( + harness, + _get_interfaces, + _get_object_storage_data, + create_bucket_if_not_exists, + set_bucket_lifecycle_policy, +): + """Simulate a healthy charm life-cycle. + + Args: + harness: ops.testing.Harness object used to simulate charm lifecycle. + """ + # Simulate peer relation readiness. + harness.add_relation("peer", "airbyte") + + simulate_pebble_readiness(harness) + + # Simulate db readiness. + event = make_database_changed_event("db") + harness.charm.postgresql._on_database_changed(event) + + # Simulate minio relation + harness.add_relation("object-storage", "airbyte") + harness.charm.minio._on_object_storage_relation_changed(None) + + # Simulate s3 relation + relation_id = harness.add_relation("s3-parameters", "airbyte") + harness.update_relation_data( + relation_id, + "airbyte", + s3_provider_databag(), + ) + + +def simulate_pebble_readiness(harness): + # Simulate pebble readiness on all containers. + for container_name in list(CONTAINERS.keys()): + container = harness.model.unit.get_container(container_name) + harness.charm.on[container_name].pebble_ready.emit(container) + + +def make_database_changed_event(rel_name): + """Create and return a mock master changed event. + + The event is generated by the relation with the given name. + + Args: + rel_name: Name of the database relation (db or visibility) + + Returns: + Event dict. + """ + return type( + "Event", + (), + { + "endpoints": "myhost:5432,anotherhost:2345", + "username": f"jean-luc@{rel_name}", + "password": "inner-light", + "relation": type("Relation", (), {"name": rel_name}), + }, + ) + + +def s3_provider_databag(): + """Create and return mock s3 credentials. + + Returns: + S3 parameters. + """ + return { + "access-key": "access", + "secret-key": "secret", + "bucket": "bucket_name", + "endpoint": "http://endpoint", + "path": "path", + "region": "region", + "s3-uri-style": "path", + } + + +def create_plan(container_name, storage_type): + want_plan = { + "services": { + container_name: { + "summary": container_name, + "command": f"/bin/bash -c airbyte-app/bin/{container_name}", + "startup": "enabled", + "override": "replace", + "environment": { + **BASE_ENV, + "AWS_ACCESS_KEY_ID": "access", + "AWS_SECRET_ACCESS_KEY": "secret", + "DATABASE_DB": "airbyte-k8s_db", + "DATABASE_HOST": "myhost", + "DATABASE_PASSWORD": "inner-light", + "DATABASE_PORT": "5432", + "DATABASE_URL": "jdbc:postgresql://myhost:5432/airbyte-k8s_db", + "DATABASE_USER": "jean-luc@db", + "JOB_KUBE_NAMESPACE": MODEL_NAME, + "JOB_KUBE_SERVICEACCOUNT": APP_NAME, + "KEYCLOAK_DATABASE_URL": "jdbc:postgresql://myhost:5432/airbyte-k8s_db?currentSchema=keycloak", + "LOG_LEVEL": "INFO", + "RUNNING_TTL_MINUTES": 240, + "S3_LOG_BUCKET": "airbyte-dev-logs", + "STORAGE_BUCKET_ACTIVITY_PAYLOAD": "airbyte-payload-storage", + "STORAGE_BUCKET_LOG": "airbyte-dev-logs", + "STORAGE_BUCKET_STATE": "airbyte-state-storage", + "STORAGE_BUCKET_WORKLOAD_OUTPUT": "airbyte-state-storage", + "STORAGE_TYPE": storage_type, + "SUCCEEDED_TTL_MINUTES": 30, + "TEMPORAL_HOST": "temporal-k8s:7233", + "UNSUCCESSFUL_TTL_MINUTES": 1440, + "WEBAPP_URL": "http://airbyte-ui-k8s:8080", + "AIRBYTE_URL": "http://airbyte-ui-k8s:8080", + "WORKERS_MICRONAUT_ENVIRONMENTS": "control-plane", + "WORKER_ENVIRONMENT": "kubernetes", + "WORKER_LOGS_STORAGE_TYPE": storage_type, + "WORKER_STATE_STORAGE_TYPE": storage_type, + "AIRBYTE_API_HOST": "airbyte-k8s:8006/api/public", + "AIRBYTE_SERVER_HOST": "airbyte-k8s:8001", + "CONFIG_API_HOST": "airbyte-k8s:8001", + "CONNECTOR_BUILDER_API_HOST": "airbyte-k8s:80", + "CONNECTOR_BUILDER_SERVER_API_HOST": "airbyte-k8s:80", + "INTERNAL_API_HOST": "airbyte-k8s:8001", + }, }, - } - # Simulate the container coming up and emission of pebble-ready event - self.harness.container_pebble_ready("httpbin") - # Get the plan now we've run PebbleReady - updated_plan = self.harness.get_container_pebble_plan("httpbin").to_dict() - # Check we've got the plan we expected - self.assertEqual(expected_plan, updated_plan) - # Check the service was started - service = self.harness.model.unit.get_container("httpbin").get_service("httpbin") - self.assertTrue(service.is_running()) - # Ensure we set an ActiveStatus with no message - self.assertEqual(self.harness.model.unit.status, ops.ActiveStatus()) - - def test_config_changed_valid_can_connect(self): - # Ensure the simulated Pebble API is reachable - self.harness.set_can_connect("httpbin", True) - # Trigger a config-changed event with an updated value - self.harness.update_config({"log-level": "debug"}) - # Get the plan now we've run PebbleReady - updated_plan = self.harness.get_container_pebble_plan("httpbin").to_dict() - updated_env = updated_plan["services"]["httpbin"]["environment"] - # Check the config change was effective - self.assertEqual(updated_env, {"GUNICORN_CMD_ARGS": "--log-level debug"}) - self.assertEqual(self.harness.model.unit.status, ops.ActiveStatus()) - - def test_config_changed_valid_cannot_connect(self): - # Trigger a config-changed event with an updated value - self.harness.update_config({"log-level": "debug"}) - # Check the charm is in WaitingStatus - self.assertIsInstance(self.harness.model.unit.status, ops.WaitingStatus) - - def test_config_changed_invalid(self): - # Ensure the simulated Pebble API is reachable - self.harness.set_can_connect("httpbin", True) - # Trigger a config-changed event with an updated value - self.harness.update_config({"log-level": "foobar"}) - # Check the charm is in BlockedStatus - self.assertIsInstance(self.harness.model.unit.status, ops.BlockedStatus) + }, + } + + if storage_type == StorageType.minio: + want_plan["services"][container_name]["environment"].update( + { + "MINIO_ENDPOINT": "http://service.namespace.svc.cluster.local:9000", + "AWS_ACCESS_KEY_ID": "access", + "AWS_SECRET_ACCESS_KEY": "secret", + "STATE_STORAGE_MINIO_ENDPOINT": "http://service.namespace.svc.cluster.local:9000", + "STATE_STORAGE_MINIO_ACCESS_KEY": "access", + "STATE_STORAGE_MINIO_SECRET_ACCESS_KEY": "secret", + "STATE_STORAGE_MINIO_BUCKET_NAME": "airbyte-state-storage", + "S3_PATH_STYLE_ACCESS": "true", + } + ) + + if storage_type == StorageType.s3: + want_plan["services"][container_name]["environment"].update( + { + "AWS_ACCESS_KEY_ID": "access", + "AWS_SECRET_ACCESS_KEY": "secret", + "S3_LOG_BUCKET_REGION": "region", + "AWS_DEFAULT_REGION": "region", + } + ) + + application_info = CONTAINERS[container_name] + if application_info: + want_plan["services"][container_name].update( + { + "on-check-failure": {"up": "ignore"}, + } + ) + want_plan.update( + { + "checks": { + "up": { + "override": "replace", + "period": "10s", + "http": { + "url": f"http://localhost:{application_info['port']}{application_info['health_endpoint']}" + }, + } + } + } + ) + + return want_plan diff --git a/tests/unit/test_state.py b/tests/unit/test_state.py new file mode 100644 index 0000000..5dd9fb3 --- /dev/null +++ b/tests/unit/test_state.py @@ -0,0 +1,69 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. +# +# Learn more about testing at: https://juju.is/docs/sdk/testing + +import json +from unittest import TestCase + +from state import State + +"""Charm state unit tests.""" + + +class TestState(TestCase): + """Unit tests for state. + + Attrs: + maxDiff: Specifies max difference shown by failed tests. + """ + + maxDiff = None + + def test_get(self): + """It is possible to retrieve attributes from the state.""" + state = make_state({"foo": json.dumps("bar")}) + self.assertEqual(state.foo, "bar") + self.assertIsNone(state.bad) + + def test_set(self): + """It is possible to set attributes in the state.""" + data = {"foo": json.dumps("bar")} + state = make_state(data) + state.foo = 42 + state.list = [1, 2, 3] + self.assertEqual(state.foo, 42) + self.assertEqual(state.list, [1, 2, 3]) + self.assertEqual(data, {"foo": "42", "list": "[1, 2, 3]"}) + + def test_del(self): + """It is possible to unset attributes in the state.""" + data = {"foo": json.dumps("bar"), "answer": json.dumps(42)} + state = make_state(data) + del state.foo + self.assertIsNone(state.foo) + self.assertEqual(data, {"answer": "42"}) + # Deleting a name that is not set does not error. + del state.foo + + def test_is_ready(self): + """The state is not ready when it is not possible to get relations.""" + state = make_state({}) + self.assertTrue(state.is_ready()) + + state = State("myapp", lambda: None) + self.assertFalse(state.is_ready()) + + +def make_state(data): + """Create state object. + + Args: + data: Data to be included in state. + + Returns: + State object with data. + """ + app = "myapp" + rel = type("Rel", (), {"data": {app: data}})() + return State(app, lambda: rel) diff --git a/tests/unit/test_structured_config.py b/tests/unit/test_structured_config.py new file mode 100644 index 0000000..cf7bd97 --- /dev/null +++ b/tests/unit/test_structured_config.py @@ -0,0 +1,74 @@ +#!/usr/bin/env python3 +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Structured config unit tests.""" + +import logging + +import pytest +from ops.testing import Harness + +from charm import AirbyteK8SOperatorCharm + +logger = logging.getLogger(__name__) + + +@pytest.fixture +def _harness(): + """Harness setup for tests.""" + _harness = Harness(AirbyteK8SOperatorCharm) + _harness.begin_with_initial_hooks() + return _harness + + +def test_config_parsing_parameters_integer_values(_harness) -> None: + """Check that integer fields are parsed correctly.""" + integer_fields = [ + "logs-ttl", + "pod-running-ttl-minutes", + "pod-successful-ttl-minutes", + "pod-unsuccessful-ttl-minutes", + ] + erroneus_values = [-5] + valid_values = [42, 100, 1] + for field in integer_fields: + check_invalid_values(_harness, field, erroneus_values) + check_valid_values(_harness, field, valid_values) + + +def test_product_related_values(_harness) -> None: + """Test specific parameters for each field.""" + erroneus_values = ["test-value", "foo", "bar"] + + # storage-type + check_invalid_values(_harness, "storage-type", erroneus_values) + accepted_values = ["MINIO", "S3"] + check_valid_values(_harness, "storage-type", accepted_values) + + +def check_valid_values(_harness, field: str, accepted_values: list) -> None: + """Check the correctness of the passed values for a field. + + Args: + _harness: Harness object. + field: The configuration field to test. + accepted_values: List of accepted values for this field. + """ + for value in accepted_values: + _harness.update_config({field: value}) + assert _harness.charm.config[field] == value + + +def check_invalid_values(_harness, field: str, erroneus_values: list) -> None: + """Check the incorrectness of the passed values for a field. + + Args: + _harness: Harness object. + field: The configuration field to test. + erroneus_values: List of invalid values for this field. + """ + for value in erroneus_values: + _harness.update_config({field: value}) + with pytest.raises(ValueError): + _ = _harness.charm.config[field] diff --git a/tox.ini b/tox.ini index dfe9e84..8141ee6 100644 --- a/tox.ini +++ b/tox.ini @@ -4,8 +4,9 @@ [tox] no_package = True skip_missing_interpreters = True -env_list = format, lint, static, unit +env_list = fmt, lint, static, unit, coverage-report min_version = 4.0.0 +max-line-length=120 [vars] src_path = {tox_root}/src @@ -23,28 +24,44 @@ pass_env = CHARM_BUILD_DIR MODEL_SETTINGS -[testenv:format] -description = Apply coding style standards to code +[testenv:fmt] +description = Format the code deps = - black - ruff + black==22.8.0 + isort==5.10.1 commands = - black {[vars]all_path} - ruff --fix {[vars]all_path} + isort {[vars]src_path} {[vars]tests_path} + black {[vars]src_path} {[vars]tests_path} [testenv:lint] -description = Check code against coding style standards +description = Lint the code deps = - black - ruff - codespell + mypy + pylint + pydocstyle + pytest + black==22.8.0 + codespell==2.2.1 + flake8==5.0.4 + flake8-builtins==1.5.3 + flake8-copyright==0.2.3 + flake8-docstrings==1.6.0 + isort==5.10.1 + pep8-naming==0.13.2 + pyproject-flake8==5.0.4.post1 + flake8-docstrings-complete>=1.0.3 + flake8-test-docs>=1.0 commands = - # if this charm owns a lib, uncomment "lib_path" variable - # and uncomment the following line - # codespell {[vars]lib_path} - codespell {tox_root} - ruff {[vars]all_path} - black --check --diff {[vars]all_path} + pydocstyle {[vars]src_path} + codespell {toxinidir} --skip {toxinidir}/.git --skip {toxinidir}/.tox \ + --skip {toxinidir}/build --skip {toxinidir}/lib --skip {toxinidir}/venv \ + --skip {toxinidir}/.mypy_cache --skip {toxinidir}/icon.svg + pflake8 {[vars]src_path} + isort --check-only --diff {[vars]src_path} {[vars]tests_path} + black --check --diff {[vars]src_path} {[vars]tests_path} + mypy {[vars]all_path} --ignore-missing-imports --follow-imports=skip --install-types --non-interactive + pylint {[vars]src_path} --disable=E0401,W1203,W0613,W0718,R0903,W1514,C0103,R0913,C0301,W0212,R0902,C0104,W0640,R0801,W0511,R0914,R0912 + [testenv:unit] description = Run unit tests @@ -62,25 +79,37 @@ commands = {[vars]tests_path}/unit coverage report +[testenv:coverage-report] +description = Create test coverage report +deps = + coverage[toml] + pytest + -r{toxinidir}/requirements.txt +commands = + coverage report + [testenv:static] -description = Run static type checks +description = Run static analysis tests deps = - pyright - -r {tox_root}/requirements.txt + bandit[toml] + -r{toxinidir}/requirements.txt commands = - pyright {posargs} + bandit -c {toxinidir}/pyproject.toml -r {[vars]src_path} {[vars]tests_path} [testenv:integration] description = Run integration tests deps = - pytest - juju - pytest-operator - -r {tox_root}/requirements.txt + ipdb==0.13.9 + juju==3.1.0.1 + pytest==7.1.3 + pytest-operator==0.35.0 + temporalio==1.1.0 + pytest-asyncio==0.21 + -r{toxinidir}/requirements.txt commands = pytest -v \ -s \ --tb native \ --log-cli-level=INFO \ {posargs} \ - {[vars]tests_path}/integration + {[vars]tests_path}/integration/new_test_charm.py