From 686a121336ea13cf8b412a5a129250ae60b4f004 Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Mon, 27 Jan 2025 01:51:45 +0200 Subject: [PATCH 01/41] feat: introduce Kafka and Redis broadcast settings for improved container configuration --- tests/conftest.py | 13 +++- tests/containers/broadcast_container_base.py | 53 ++++++++++--- tests/containers/kafka_broadcast_container.py | 16 +++- tests/containers/kafka_ui_container.py | 17 ++++- tests/containers/redis_broadcast_container.py | 13 ++-- tests/containers/redis_ui_container.py | 14 ++-- .../settings/kafka_broadcast_settings.py | 75 ++++++++++++------- .../settings/redis_broadcast_settings.py | 39 ++++++++++ tests/containers/zookeeper_container.py | 6 +- tests/fixtures/broadcasters.py | 71 +++++++++--------- 10 files changed, 222 insertions(+), 95 deletions(-) create mode 100644 tests/containers/settings/redis_broadcast_settings.py diff --git a/tests/conftest.py b/tests/conftest.py index bdded10dc..0f13b3bd2 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -86,7 +86,7 @@ def temp_dir(): print(f"Temporary directory removed: {dir_path}") -@pytest.fixture(scope="session") +@pytest.fixture(scope="session", autouse=True) def opal_network(): """Creates a Docker network and yields it. @@ -129,13 +129,13 @@ def number_of_opal_servers(): @pytest.fixture(scope="session") def opal_servers( opal_network: Network, - broadcast_channel: BroadcastContainerBase, + #broadcast_channel: BroadcastContainerBase, policy_repo: PolicyRepoBase, number_of_opal_servers: int, opal_server_image: str, topics: dict[str, int], - # kafka_broadcast_channel: KafkaBroadcastContainer, - # redis_broadcast_channel: RedisBroadcastContainer, + kafka_broadcast_channel: KafkaBroadcastContainer, + #redis_broadcast_channel: RedisBroadcastContainer, session_matrix, ): """Fixture that initializes and manages OPAL server containers for testing. @@ -162,11 +162,16 @@ def opal_servers( List[OpalServerContainer]: A list of running OPAL server containers. """ + #broadcast_channel = redis_broadcast_channel + broadcast_channel = kafka_broadcast_channel + broadcast_channel = broadcast_channel[0] + if not broadcast_channel: raise ValueError("Missing 'broadcast_channel' container.") containers = [] # List to store container instances + for i in range(number_of_opal_servers): container_name = f"opal_server_{i+1}" diff --git a/tests/containers/broadcast_container_base.py b/tests/containers/broadcast_container_base.py index 4a278ee85..0ad398077 100644 --- a/tests/containers/broadcast_container_base.py +++ b/tests/containers/broadcast_container_base.py @@ -6,16 +6,47 @@ def __init__(self): PermitContainer.__init__(self) def get_url(self) -> str: - url = ( - self.settings.protocol - + "://" - + self.settings.user - + ":" - + self.settings.password - + "@" - + self.settings.container_name - + ":" - + str(self.settings.port) - ) + + + url = "" + + match self.settings.protocol: + case "redis": + url = ( + self.settings.protocol + + "://" + + self.settings.container_name + + ":" + + str(self.settings.port) + ) + print(url) + return url + case "postgres": + url = ( + self.settings.protocol + + "://" + + self.settings.user + + ":" + + self.settings.password + + "@" + + self.settings.container_name + + ":" + + str(self.settings.port) + ) + print(url) + return url + case "kafka": + url = ( + self.settings.protocol + + "://" + + self.settings.kafka_container_name + + ":" + + str(self.settings.kafka_port) + ) + print(url) + return url + + case _: + raise ValueError(f"Unknown broadcast container type: {self.settings}") print(url) return url diff --git a/tests/containers/kafka_broadcast_container.py b/tests/containers/kafka_broadcast_container.py index 1fa52e1d1..d235deaa7 100644 --- a/tests/containers/kafka_broadcast_container.py +++ b/tests/containers/kafka_broadcast_container.py @@ -2,8 +2,9 @@ from testcontainers.core.network import Network from testcontainers.kafka import KafkaContainer -import docker -from tests.containers.permitContainer import PermitContainer +from tests.containers.broadcast_container_base import BroadcastContainerBase +from tests.containers.PermitContainer import PermitContainer +from tests.containers.settings import kafka_broadcast_settings from tests.containers.zookeeper_container import ZookeeperContainer @@ -12,6 +13,7 @@ def __init__( self, network: Network, zookeeper_container: ZookeeperContainer, + kafka_settings: kafka_broadcast_settings, docker_client_kw: dict | None = None, **kwargs, ) -> None: @@ -20,6 +22,8 @@ def __init__( labels.update({"com.docker.compose.project": "pytest"}) kwargs["labels"] = labels + self.settings = kafka_settings + self.zookeeper_container = zookeeper_container self.network = network @@ -28,6 +32,12 @@ def __init__( self.with_network(self.network) + for key, value in self.settings.getKafkaEnvVars().items(): + self.with_env(key, value) + self.with_network_aliases("broadcast_channel") + # Add a custom name for the container - self.with_name(f"kafka_broadcast_channel") + self.with_name(self.settings.kafka_container_name) + + self.start() diff --git a/tests/containers/kafka_ui_container.py b/tests/containers/kafka_ui_container.py index a01b70f1f..4cb3c0ed0 100644 --- a/tests/containers/kafka_ui_container.py +++ b/tests/containers/kafka_ui_container.py @@ -2,7 +2,8 @@ from testcontainers.core.network import Network from tests.containers.kafka_broadcast_container import KafkaBroadcastContainer -from tests.containers.permitContainer import PermitContainer +from tests.containers.PermitContainer import PermitContainer +from tests.containers.settings.kafka_broadcast_settings import KafkaBroadcastSettings class KafkaUIContainer(PermitContainer, DockerContainer): @@ -10,6 +11,7 @@ def __init__( self, network: Network, kafka_container: KafkaBroadcastContainer, + kafka_settings: KafkaBroadcastSettings, docker_client_kw: dict | None = None, **kwargs, ) -> None: @@ -18,6 +20,8 @@ def __init__( labels.update({"com.docker.compose.project": "pytest"}) kwargs["labels"] = labels + self.settings = kafka_settings + self.kafka_container = kafka_container self.network = network @@ -28,9 +32,14 @@ def __init__( self, image=self.image, docker_client_kw=docker_client_kw, **kwargs ) - self.with_name("kafka-ui") - self.with_bind_ports(8080, 8080) - self.with_env("KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS", "kafka:9092") + + for key, value in self.settings.getKafkaUiEnvVars().items(): + self.with_env(key, value) + + self.with_bind_ports(8080, self.settings.kafka_ui_port) + self.with_name(self.settings.kafka_ui_container_name) self.with_network(self.network) self.with_network_aliases("Kafka_ui") + + self.start() \ No newline at end of file diff --git a/tests/containers/redis_broadcast_container.py b/tests/containers/redis_broadcast_container.py index b11ad61b6..0afff27bd 100644 --- a/tests/containers/redis_broadcast_container.py +++ b/tests/containers/redis_broadcast_container.py @@ -1,6 +1,8 @@ from testcontainers.core.network import Network from testcontainers.redis import RedisContainer +from tests.containers.broadcast_container_base import BroadcastContainerBase +from tests.containers.settings.redis_broadcast_settings import RedisBroadcastSettings from tests.containers.permitContainer import PermitContainer @@ -8,13 +10,12 @@ class RedisBroadcastContainer(PermitContainer, RedisContainer): def __init__( self, network: Network, + redisContainerSettings: RedisBroadcastSettings, docker_client_kw: dict | None = None, **kwargs, ) -> None: - # Add custom labels to the kwargs - labels = kwargs.get("labels", {}) - labels.update({"com.docker.compose.project": "pytest"}) - kwargs["labels"] = labels + + self.settings = redisContainerSettings self.network = network @@ -25,4 +26,6 @@ def __init__( self.with_network_aliases("broadcast_channel") # Add a custom name for the container - self.with_name(f"redis_broadcast_channel") + self.with_name(self.settings.container_name) + + self.start() diff --git a/tests/containers/redis_ui_container.py b/tests/containers/redis_ui_container.py index eb02df918..0bc8cfed8 100644 --- a/tests/containers/redis_ui_container.py +++ b/tests/containers/redis_ui_container.py @@ -2,9 +2,11 @@ from testcontainers.core.network import Network from testcontainers.redis import RedisContainer +from tests import utils from tests.containers.permitContainer import PermitContainer + class RedisUIContainer(PermitContainer, DockerContainer): def __init__( self, @@ -13,15 +15,11 @@ def __init__( docker_client_kw: dict | None = None, **kwargs, ) -> None: - # Add custom labels to the kwargs - labels = kwargs.get("labels", {}) - labels.update({"com.docker.compose.project": "pytest"}) - kwargs["labels"] = labels self.redis_container = redis_container self.network = network - self.container_name = "redis-ui" - self.image = "redislabs/redisinsight:latest" + self.container_name = f"{self.redis_container.settings.container_name}-ui" + self.image = "redis/redisinsight:latest" PermitContainer.__init__(self) DockerContainer.__init__( @@ -31,6 +29,8 @@ def __init__( self.with_name(self.container_name) self.with_network(self.network) - self.with_bind_ports(5540, 5540) + self.with_bind_ports(5540, utils.find_available_port(5540)) self.with_network_aliases("redis_ui") + + self.start() \ No newline at end of file diff --git a/tests/containers/settings/kafka_broadcast_settings.py b/tests/containers/settings/kafka_broadcast_settings.py index f388e3d2f..4620783d4 100644 --- a/tests/containers/settings/kafka_broadcast_settings.py +++ b/tests/containers/settings/kafka_broadcast_settings.py @@ -2,44 +2,69 @@ from testcontainers.core.utils import setup_logger class KafkaBroadcastSettings: - def __init__(self, host, port, user, password, database): + def __init__(self, kafka_port: int | None = None): self.logger = setup_logger("KafkaBroadcastSettings") - self.host = host - self.port = port - self.user = user - self.password = password - self.database = database + self.load_from_env() + + + self.kafka_port = kafka_port if kafka_port else self.kafka_port + + self.protocol = "kafka" self.validate_dependencies() def validate_dependencies(self): """Validate required dependencies before starting the server.""" - if not self.host: - raise ValueError("POSTGRES_HOST is required.") - if not self.port: + if not self.kafka_port: raise ValueError("POSTGRES_PORT is required.") - if not self.user: - raise ValueError("POSTGRES_USER is required.") - if not self.password: - raise ValueError("POSTGRES_PASSWORD is required.") - if not self.database: - raise ValueError("POSTGRES_DATABASE is required.") - self.logger.info(f"{self.kafka_container_name} | Dependencies validated successfully.") - def getEnvVars(self): + + def getKafkaUiEnvVars(self): + return { + "KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS": "kafka:9092" + } + + def getZookiperEnvVars(self): + return { + "ZOOKEEPER_CLIENT_PORT": self.zookeeper_port, + "ZOOKEEPER_TICK_TIME": self.zookeeper_tick_time, + "ALLOW_ANONYMOUS_LOGIN": self.zookeeper_allow_anonymous_login + } + + def getKafkaEnvVars(self): return { - "POSTGRES_HOST": self.host, - "POSTGRES_PORT": self.port, - "POSTGRES_USER": self.user, - "POSTGRES_PASSWORD": self.password, - "POSTGRES_DATABASE": self.database, } def load_from_env(self): + """ + Load Kafka settings from environment variables. + + The following environment variables are supported: + - KAFKA_IMAGE_NAME + - KAFKA_CONTAINER_NAME + - KAFKA_CLIENT_PORT + - KAFKA_ADMIN_PORT + - KAFKA_UI_IMAGE_NAME + - KAFKA_UI_CONTAINER_NAME + - KAFKA_UI_PORT + - KAFKA_UI_URL + - KAFKA_BROKER_ID + - KAFKA_ZOOKEEPER_CONNECT + - KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR + - KAFKA_LISTENER_SECURITY_PROTOCOL_MAP + - KAFKA_ADVERTISED_LISTENERS + - ALLOW_PLAINTEXT_LISTENER + - KAFKA_TOPIC_AUTO_CREATE + - KAFKA_TRANSACTION_STATE_LOG_MIN_ISR + - KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR + - KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS + + If an environment variable is not set, a default value is used. + """ self.host = os.getenv("POSTGRES_HOST", "localhost") self.port = int(os.getenv("POSTGRES_PORT", 5432)) self.user = os.getenv("POSTGRES_USER", "postgres") @@ -70,9 +95,9 @@ def load_from_env(self): self.kafka_ui_port = os.getenv("KAFKA_UI_PORT", 8080) - self.kafka_ui_url = os.getenv( - "KAFKA_UI_URL", f"http://{self.kafka_ui_host}:{self.kafka_ui_port}" - ) + # self.kafka_ui_url = os.getenv( + # "KAFKA_UI_URL", f"http://{self.kafka_ui_host}:{self.kafka_ui_port}" + # ) self.broker_id = os.getenv("KAFKA_BROKER_ID", 1) self.zookeeper_connect = os.getenv( diff --git a/tests/containers/settings/redis_broadcast_settings.py b/tests/containers/settings/redis_broadcast_settings.py new file mode 100644 index 000000000..6168efa98 --- /dev/null +++ b/tests/containers/settings/redis_broadcast_settings.py @@ -0,0 +1,39 @@ +import os +from testcontainers.core.utils import setup_logger + + +class RedisBroadcastSettings: + def __init__( + self, + container_name: str | None = None, + port: int | None = None, + password: str | None = None, + ): + + self.logger = setup_logger("PostgresBroadcastSettings") + + self.load_from_env() + + self.container_name = container_name if container_name else self.container_name + self.port = port if port else self.port + self.password = password if password else self.password + self.protocol = "redis" + + self.validate_dependencies() + + def validate_dependencies(self): + """Validate required dependencies before starting the server.""" + if not self.port: + raise ValueError("POSTGRES_PORT is required.") + if not self.password: + raise ValueError("POSTGRES_PASSWORD is required.") + + self.logger.info(f"{self.container_name} | Dependencies validated successfully.") + + + + def load_from_env(self): + self.container_name = os.getenv("REDIS_CONTAINER_NAME", "redis_broadcast_channel") + self.port = int(os.getenv("REDIS_PORT", 6379)) + self.password = os.getenv("REDIS_PASSWORD", "redis") + diff --git a/tests/containers/zookeeper_container.py b/tests/containers/zookeeper_container.py index 30c35204e..ac1f019c4 100644 --- a/tests/containers/zookeeper_container.py +++ b/tests/containers/zookeeper_container.py @@ -4,14 +4,16 @@ import docker from tests.containers.permitContainer import PermitContainer +from tests.containers.settings.kafka_broadcast_settings import KafkaBroadcastSettings class ZookeeperContainer(PermitContainer, DockerContainer): def __init__( self, network: Network, + settings: KafkaBroadcastSettings, docker_client_kw: dict | None = None, - **kwargs, + **kwargs: dict, ) -> None: # Add custom labels to the kwargs labels = kwargs.get("labels", {}) @@ -38,3 +40,5 @@ def __init__( self.with_network_aliases("zookeper") # Add a custom name for the container self.with_name(f"zookeeper") + + self.start() \ No newline at end of file diff --git a/tests/fixtures/broadcasters.py b/tests/fixtures/broadcasters.py index 0fa65d246..9f62c79bf 100644 --- a/tests/fixtures/broadcasters.py +++ b/tests/fixtures/broadcasters.py @@ -7,9 +7,11 @@ from tests.containers.postgres_broadcast_container import PostgresBroadcastContainer from tests.containers.redis_broadcast_container import RedisBroadcastContainer from tests.containers.redis_ui_container import RedisUIContainer +from tests.containers.settings.kafka_broadcast_settings import KafkaBroadcastSettings from tests.containers.settings.postgres_broadcast_settings import ( PostgresBroadcastSettings, ) +from tests.containers.settings.redis_broadcast_settings import RedisBroadcastSettings from tests.containers.zookeeper_container import ZookeeperContainer logger = setup_logger(__name__) @@ -24,17 +26,18 @@ def postgres_broadcast_channel(opal_network: Network): unless an exception is raised during teardown. """ try: - with PostgresBroadcastContainer( - network=opal_network, settings=PostgresBroadcastSettings() - ) as container: - yield container - - try: - if container.get_wrapped_container().status == "running": - container.stop() - except Exception: - logger.error(f"Failed to stop containers: {container}") - return + container = PostgresBroadcastContainer( + network=opal_network, + settings=PostgresBroadcastSettings() + ) + yield [container] + + try: + if container.get_wrapped_container().status == "running": + container.stop() + except Exception: + logger.error(f"Failed to stop containers: {container}") + return except Exception as e: logger.error( @@ -55,20 +58,18 @@ def kafka_broadcast_channel(opal_network: Network): stop. """ - with ZookeeperContainer(opal_network) as zookeeper_container: - with KafkaBroadcastContainer( - opal_network, zookeeper_container - ) as kafka_container: - with KafkaUIContainer(opal_network, kafka_container) as kafka_ui_container: - containers = [zookeeper_container, kafka_container, kafka_ui_container] - yield containers + zookeeper_container = ZookeeperContainer(opal_network, KafkaBroadcastSettings()) + kafka_container = KafkaBroadcastContainer(opal_network, zookeeper_container, KafkaBroadcastSettings()) + kafka_ui_container = KafkaUIContainer(opal_network, kafka_container, KafkaBroadcastSettings()) + containers = [kafka_container, zookeeper_container, kafka_ui_container] + yield containers - for container in containers: - try: - container.stop() - except Exception: - logger.error(f"Failed to stop container: {container}") - return + for container in containers: + try: + container.stop() + except Exception: + logger.error(f"Failed to stop container: {container}") + return @pytest.fixture(scope="session") @@ -79,17 +80,17 @@ def redis_broadcast_channel(opal_network: Network): container. The yield value is a list of the two containers. The fixture stops the containers after the test is done. """ - with RedisBroadcastContainer(opal_network) as redis_container: - with RedisUIContainer(opal_network, redis_container) as redis_ui_container: - containers = [redis_container, redis_ui_container] - yield containers - - for container in containers: - try: - container.stop() - except Exception: - logger.error(f"Failed to stop containers: {container}") - return + redis_container = RedisBroadcastContainer(opal_network, redisContainerSettings=RedisBroadcastSettings()) + redis_ui_container = RedisUIContainer(opal_network, redis_container) + containers = [redis_container, redis_ui_container] + yield containers + + for container in containers: + try: + container.stop() + except Exception: + logger.error(f"Failed to stop containers: {container}") + return @pytest.fixture(scope="session") From 0a166634eaae0259b5f784f665150f8a485b5ed3 Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Thu, 6 Mar 2025 05:50:49 +0200 Subject: [PATCH 02/41] feat: refactor test container imports and update Redis broadcast container with URL method --- tests/conftest.py | 10 +++++----- tests/containers/kafka_broadcast_container.py | 2 +- tests/containers/kafka_ui_container.py | 2 +- tests/containers/redis_broadcast_container.py | 3 +++ tests/run.sh | 4 ++-- 5 files changed, 12 insertions(+), 9 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 0f13b3bd2..0f5bc66b7 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -62,7 +62,7 @@ def cancel_wait_for_client_after_timeout(): print(f"Failed to attach debugger: {e}") utils.export_env("OPAL_TESTS_DEBUG", "true") -utils.install_opal_server_and_client() +# utils.install_opal_server_and_client() @pytest.fixture(scope="session") @@ -134,8 +134,8 @@ def opal_servers( number_of_opal_servers: int, opal_server_image: str, topics: dict[str, int], - kafka_broadcast_channel: KafkaBroadcastContainer, - #redis_broadcast_channel: RedisBroadcastContainer, + # kafka_broadcast_channel: KafkaBroadcastContainer, + redis_broadcast_channel: RedisBroadcastContainer, session_matrix, ): """Fixture that initializes and manages OPAL server containers for testing. @@ -162,8 +162,8 @@ def opal_servers( List[OpalServerContainer]: A list of running OPAL server containers. """ - #broadcast_channel = redis_broadcast_channel - broadcast_channel = kafka_broadcast_channel + broadcast_channel = redis_broadcast_channel + # broadcast_channel = kafka_broadcast_channel broadcast_channel = broadcast_channel[0] if not broadcast_channel: diff --git a/tests/containers/kafka_broadcast_container.py b/tests/containers/kafka_broadcast_container.py index d235deaa7..5366c414c 100644 --- a/tests/containers/kafka_broadcast_container.py +++ b/tests/containers/kafka_broadcast_container.py @@ -3,7 +3,7 @@ from testcontainers.kafka import KafkaContainer from tests.containers.broadcast_container_base import BroadcastContainerBase -from tests.containers.PermitContainer import PermitContainer +from tests.containers.permitContainer import PermitContainer from tests.containers.settings import kafka_broadcast_settings from tests.containers.zookeeper_container import ZookeeperContainer diff --git a/tests/containers/kafka_ui_container.py b/tests/containers/kafka_ui_container.py index 4cb3c0ed0..c2864cf16 100644 --- a/tests/containers/kafka_ui_container.py +++ b/tests/containers/kafka_ui_container.py @@ -2,7 +2,7 @@ from testcontainers.core.network import Network from tests.containers.kafka_broadcast_container import KafkaBroadcastContainer -from tests.containers.PermitContainer import PermitContainer +from tests.containers.permitContainer import PermitContainer from tests.containers.settings.kafka_broadcast_settings import KafkaBroadcastSettings diff --git a/tests/containers/redis_broadcast_container.py b/tests/containers/redis_broadcast_container.py index 0afff27bd..591177761 100644 --- a/tests/containers/redis_broadcast_container.py +++ b/tests/containers/redis_broadcast_container.py @@ -29,3 +29,6 @@ def __init__( self.with_name(self.settings.container_name) self.start() + + def get_url(self) -> str: + return f"redis://{self.settings.container_name}:{self.settings.port}" diff --git a/tests/run.sh b/tests/run.sh index 11a2b1b75..e41371dfc 100755 --- a/tests/run.sh +++ b/tests/run.sh @@ -36,10 +36,10 @@ function main { # Check if a specific test is provided if [[ -n "$1" ]]; then echo "Running specific test: $1" - python -Xfrozen_modules=off -m debugpy --listen 5678 -m pytest -s "$@" + python3 -Xfrozen_modules=off -m debugpy --listen 5678 -m pytest -s "$@" else echo "Running all tests..." - python -Xfrozen_modules=off -m debugpy --listen 5678 -m pytest -s + python3 -Xfrozen_modules=off -m debugpy --listen 5678 -m pytest -s fi echo "Done!" From bfb7947c6f416f24e80aa45d66129a3e27604c73 Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Thu, 13 Mar 2025 14:36:02 -0700 Subject: [PATCH 03/41] feat: implement Kafka and Zookeeper container classes for improved testing setup --- tests/conftest.py | 23 ++++++++----- .../{ => kafka}/kafka_broadcast_container.py | 4 +-- .../{ => kafka}/kafka_ui_container.py | 4 +-- .../settings/kafka_broadcast_settings.py | 0 .../{ => kafka}/zookeeper_container.py | 2 +- .../postgres_broadcast_container.py | 1 + tests/fixtures/broadcasters.py | 33 ++++++++++--------- 7 files changed, 38 insertions(+), 29 deletions(-) rename tests/containers/{ => kafka}/kafka_broadcast_container.py (90%) rename tests/containers/{ => kafka}/kafka_ui_container.py (88%) rename tests/containers/{ => kafka}/settings/kafka_broadcast_settings.py (100%) rename tests/containers/{ => kafka}/zookeeper_container.py (93%) diff --git a/tests/conftest.py b/tests/conftest.py index 0f5bc66b7..931641ffa 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -98,9 +98,16 @@ def opal_network(): print("Removing network...") time.sleep(5) # wait for the containers to stop - network.remove() - print("Network removed") - + try: + for container in network._network.containers: + container.kill() + network.remove() + logger.info("Network removed.") + except Exception as e: + if logger.level == "DEBUG": + logger.error(f"Failed to remove network: {e}") + else: + logger.error(f"Failed to remove network got exception") @pytest.fixture(scope="session") def number_of_opal_servers(): @@ -129,13 +136,13 @@ def number_of_opal_servers(): @pytest.fixture(scope="session") def opal_servers( opal_network: Network, - #broadcast_channel: BroadcastContainerBase, policy_repo: PolicyRepoBase, number_of_opal_servers: int, opal_server_image: str, topics: dict[str, int], - # kafka_broadcast_channel: KafkaBroadcastContainer, - redis_broadcast_channel: RedisBroadcastContainer, + # broadcast_channel: BroadcastContainerBase, + kafka_broadcast_channel: KafkaBroadcastContainer, + # redis_broadcast_channel: RedisBroadcastContainer, session_matrix, ): """Fixture that initializes and manages OPAL server containers for testing. @@ -162,8 +169,8 @@ def opal_servers( List[OpalServerContainer]: A list of running OPAL server containers. """ - broadcast_channel = redis_broadcast_channel - # broadcast_channel = kafka_broadcast_channel + # broadcast_channel = redis_broadcast_channel + broadcast_channel = kafka_broadcast_channel broadcast_channel = broadcast_channel[0] if not broadcast_channel: diff --git a/tests/containers/kafka_broadcast_container.py b/tests/containers/kafka/kafka_broadcast_container.py similarity index 90% rename from tests/containers/kafka_broadcast_container.py rename to tests/containers/kafka/kafka_broadcast_container.py index 5366c414c..62ef4c6dc 100644 --- a/tests/containers/kafka_broadcast_container.py +++ b/tests/containers/kafka/kafka_broadcast_container.py @@ -4,8 +4,8 @@ from tests.containers.broadcast_container_base import BroadcastContainerBase from tests.containers.permitContainer import PermitContainer -from tests.containers.settings import kafka_broadcast_settings -from tests.containers.zookeeper_container import ZookeeperContainer +from tests.containers.kafka.settings import kafka_broadcast_settings +from tests.containers.kafka.zookeeper_container import ZookeeperContainer class KafkaBroadcastContainer(PermitContainer, KafkaContainer): diff --git a/tests/containers/kafka_ui_container.py b/tests/containers/kafka/kafka_ui_container.py similarity index 88% rename from tests/containers/kafka_ui_container.py rename to tests/containers/kafka/kafka_ui_container.py index c2864cf16..4254f0749 100644 --- a/tests/containers/kafka_ui_container.py +++ b/tests/containers/kafka/kafka_ui_container.py @@ -1,9 +1,9 @@ from testcontainers.core.container import DockerContainer from testcontainers.core.network import Network -from tests.containers.kafka_broadcast_container import KafkaBroadcastContainer +from tests.containers.kafka.kafka_broadcast_container import KafkaBroadcastContainer from tests.containers.permitContainer import PermitContainer -from tests.containers.settings.kafka_broadcast_settings import KafkaBroadcastSettings +from tests.containers.kafka.settings.kafka_broadcast_settings import KafkaBroadcastSettings class KafkaUIContainer(PermitContainer, DockerContainer): diff --git a/tests/containers/settings/kafka_broadcast_settings.py b/tests/containers/kafka/settings/kafka_broadcast_settings.py similarity index 100% rename from tests/containers/settings/kafka_broadcast_settings.py rename to tests/containers/kafka/settings/kafka_broadcast_settings.py diff --git a/tests/containers/zookeeper_container.py b/tests/containers/kafka/zookeeper_container.py similarity index 93% rename from tests/containers/zookeeper_container.py rename to tests/containers/kafka/zookeeper_container.py index ac1f019c4..53911e52a 100644 --- a/tests/containers/zookeeper_container.py +++ b/tests/containers/kafka/zookeeper_container.py @@ -4,7 +4,7 @@ import docker from tests.containers.permitContainer import PermitContainer -from tests.containers.settings.kafka_broadcast_settings import KafkaBroadcastSettings +from tests.containers.kafka.settings.kafka_broadcast_settings import KafkaBroadcastSettings class ZookeeperContainer(PermitContainer, DockerContainer): diff --git a/tests/containers/postgres_broadcast_container.py b/tests/containers/postgres_broadcast_container.py index bb0764335..c0cb340e1 100644 --- a/tests/containers/postgres_broadcast_container.py +++ b/tests/containers/postgres_broadcast_container.py @@ -40,3 +40,4 @@ def __init__( self.with_network_aliases("broadcast_channel") self.with_name(f"postgres_broadcast_channel") + self.start() diff --git a/tests/fixtures/broadcasters.py b/tests/fixtures/broadcasters.py index 9f62c79bf..a93739aa4 100644 --- a/tests/fixtures/broadcasters.py +++ b/tests/fixtures/broadcasters.py @@ -2,17 +2,17 @@ from testcontainers.core.network import Network from testcontainers.core.utils import setup_logger -from tests.containers.kafka_broadcast_container import KafkaBroadcastContainer -from tests.containers.kafka_ui_container import KafkaUIContainer +from tests.containers.kafka.kafka_broadcast_container import KafkaBroadcastContainer +from tests.containers.kafka.kafka_ui_container import KafkaUIContainer from tests.containers.postgres_broadcast_container import PostgresBroadcastContainer from tests.containers.redis_broadcast_container import RedisBroadcastContainer from tests.containers.redis_ui_container import RedisUIContainer -from tests.containers.settings.kafka_broadcast_settings import KafkaBroadcastSettings +from tests.containers.kafka.settings.kafka_broadcast_settings import KafkaBroadcastSettings from tests.containers.settings.postgres_broadcast_settings import ( PostgresBroadcastSettings, ) from tests.containers.settings.redis_broadcast_settings import RedisBroadcastSettings -from tests.containers.zookeeper_container import ZookeeperContainer +from tests.containers.kafka.zookeeper_container import ZookeeperContainer logger = setup_logger(__name__) @@ -26,12 +26,18 @@ def postgres_broadcast_channel(opal_network: Network): unless an exception is raised during teardown. """ try: - container = PostgresBroadcastContainer( - network=opal_network, - settings=PostgresBroadcastSettings() - ) - yield [container] - + try: + container = PostgresBroadcastContainer( + network=opal_network, + settings=PostgresBroadcastSettings() + ) + yield [container] + except Exception as e: + logger.error( + f"Failed to start container: {container} with error: {e} {e.__traceback__}" + ) + exit(1) + try: if container.get_wrapped_container().status == "running": container.stop() @@ -103,9 +109,4 @@ def broadcast_channel(opal_network: Network, postgres_broadcast_channel): """ yield postgres_broadcast_channel - - try: - postgres_broadcast_channel.stop() - except Exception: - logger.error(f"Failed to stop containers: {postgres_broadcast_channel}") - return + return From 299727f72a1dd4361c2b4bca95d030203707e7a8 Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Thu, 13 Mar 2025 14:36:56 -0700 Subject: [PATCH 04/41] feat: simplify KafkaUIContainer implementation and enhance environment variable configuration --- tests/containers/kafka/kafka_ui_container.py | 54 ++++++-------------- 1 file changed, 15 insertions(+), 39 deletions(-) diff --git a/tests/containers/kafka/kafka_ui_container.py b/tests/containers/kafka/kafka_ui_container.py index 4254f0749..142981651 100644 --- a/tests/containers/kafka/kafka_ui_container.py +++ b/tests/containers/kafka/kafka_ui_container.py @@ -1,45 +1,21 @@ from testcontainers.core.container import DockerContainer from testcontainers.core.network import Network -from tests.containers.kafka.kafka_broadcast_container import KafkaBroadcastContainer -from tests.containers.permitContainer import PermitContainer -from tests.containers.kafka.settings.kafka_broadcast_settings import KafkaBroadcastSettings - -class KafkaUIContainer(PermitContainer, DockerContainer): - def __init__( - self, - network: Network, - kafka_container: KafkaBroadcastContainer, - kafka_settings: KafkaBroadcastSettings, - docker_client_kw: dict | None = None, - **kwargs, - ) -> None: - # Add custom labels to the kwargs - labels = kwargs.get("labels", {}) - labels.update({"com.docker.compose.project": "pytest"}) - kwargs["labels"] = labels - - self.settings = kafka_settings - - self.kafka_container = kafka_container - self.network = network - - self.image = "provectuslabs/kafka-ui:latest" - - PermitContainer.__init__(self) - DockerContainer.__init__( - self, image=self.image, docker_client_kw=docker_client_kw, **kwargs - ) - - - for key, value in self.settings.getKafkaUiEnvVars().items(): +class KafkaUIContainer(DockerContainer): + def __init__(self, network: Network): + self.settings = { + "env": { + "KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS": "kafka0:9092", + } + } + DockerContainer.__init__(self, image="provectuslabs/kafka-ui:latest") + self.with_exposed_ports(8080) + for key, value in self.settings.get("env").items(): self.with_env(key, value) + self.with_name("kafka-ui") + self.with_network(network) + self.with_network_aliases("kafka-ui") - self.with_bind_ports(8080, self.settings.kafka_ui_port) - self.with_name(self.settings.kafka_ui_container_name) - - self.with_network(self.network) - self.with_network_aliases("Kafka_ui") - - self.start() \ No newline at end of file + self.start() + \ No newline at end of file From 5bb78bad2f9cd960527aa9be2276fb0a2d0266a7 Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Thu, 13 Mar 2025 14:37:34 -0700 Subject: [PATCH 05/41] feat: refactor KafkaBroadcastContainer to simplify implementation and enhance environment configuration --- .../kafka/kafka_broadcast_container.py | 65 ++++++++----------- 1 file changed, 28 insertions(+), 37 deletions(-) diff --git a/tests/containers/kafka/kafka_broadcast_container.py b/tests/containers/kafka/kafka_broadcast_container.py index 62ef4c6dc..955f98d6f 100644 --- a/tests/containers/kafka/kafka_broadcast_container.py +++ b/tests/containers/kafka/kafka_broadcast_container.py @@ -1,43 +1,34 @@ -import debugpy +from testcontainers.core.container import DockerContainer from testcontainers.core.network import Network -from testcontainers.kafka import KafkaContainer -from tests.containers.broadcast_container_base import BroadcastContainerBase -from tests.containers.permitContainer import PermitContainer -from tests.containers.kafka.settings import kafka_broadcast_settings -from tests.containers.kafka.zookeeper_container import ZookeeperContainer - - -class KafkaBroadcastContainer(PermitContainer, KafkaContainer): - def __init__( - self, - network: Network, - zookeeper_container: ZookeeperContainer, - kafka_settings: kafka_broadcast_settings, - docker_client_kw: dict | None = None, - **kwargs, - ) -> None: - # Add custom labels to the kwargs - labels = kwargs.get("labels", {}) - labels.update({"com.docker.compose.project": "pytest"}) - kwargs["labels"] = labels - - self.settings = kafka_settings - - self.zookeeper_container = zookeeper_container - self.network = network - - PermitContainer.__init__(self) - KafkaContainer.__init__(self, docker_client_kw=docker_client_kw, **kwargs) - - self.with_network(self.network) - - for key, value in self.settings.getKafkaEnvVars().items(): +class KafkaContainer(DockerContainer): + def __init__(self, network: Network): + # Kafka Broker 0 + self.settings = { + "env": { + "KAFKA_BROKER_ID": "1", + "KAFKA_ZOOKEEPER_CONNECT": "zookeeper:2181", + "KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR": "1", + "KAFKA_LISTENER_SECURITY_PROTOCOL_MAP": "PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT", + "KAFKA_ADVERTISED_LISTENERS": "PLAINTEXT_HOST://localhost:29092,PLAINTEXT://kafka0:9092", + "ALLOW_PLAINTEXT_LISTENER": "yes", + "KAFKA_TOPIC_AUTO_CREATE": "true", + "KAFKA_TRANSACTION_STATE_LOG_MIN_ISR": "1", + "KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR": "1", + } + } + DockerContainer.__init__(self, image="confluentinc/cp-kafka:6.2.0") + self.with_exposed_ports(9092, 29092) + for key, value in self.settings.get("env").items(): self.with_env(key, value) + + self.with_name("kafka0") + self.with_network(network) + self.with_network_aliases("kafka0") - self.with_network_aliases("broadcast_channel") + self.start() - # Add a custom name for the container - self.with_name(self.settings.kafka_container_name) - self.start() + def get_url(self) -> str: + url = "kafka://kafka0:9092" + return url From fdb97f6dfa684ec35010fc397f115f5e37e16212 Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Thu, 13 Mar 2025 15:55:22 -0700 Subject: [PATCH 06/41] feat: refactor ZookeeperContainer to simplify initialization and enhance environment configuration --- tests/containers/kafka/zookeeper_container.py | 62 +++++++------------ 1 file changed, 21 insertions(+), 41 deletions(-) diff --git a/tests/containers/kafka/zookeeper_container.py b/tests/containers/kafka/zookeeper_container.py index 53911e52a..17b179c0c 100644 --- a/tests/containers/kafka/zookeeper_container.py +++ b/tests/containers/kafka/zookeeper_container.py @@ -1,44 +1,24 @@ -import debugpy from testcontainers.core.container import DockerContainer from testcontainers.core.network import Network -import docker -from tests.containers.permitContainer import PermitContainer -from tests.containers.kafka.settings.kafka_broadcast_settings import KafkaBroadcastSettings - - -class ZookeeperContainer(PermitContainer, DockerContainer): - def __init__( - self, - network: Network, - settings: KafkaBroadcastSettings, - docker_client_kw: dict | None = None, - **kwargs: dict, - ) -> None: - # Add custom labels to the kwargs - labels = kwargs.get("labels", {}) - labels.update({"com.docker.compose.project": "pytest"}) - kwargs["labels"] = labels - - self.network = network - - PermitContainer.__init__(self) - DockerContainer.__init__( - self, - image="confluentinc/cp-zookeeper:latest", - docker_client_kw=docker_client_kw, - **kwargs, - ) - - self.with_bind_ports(2181, 2181) - self.with_env("ZOOKEEPER_CLIENT_PORT", "2181") - self.with_env("ZOOKEEPER_TICK_TIME", "2000") - self.with_env("ALLOW_ANONYMOUS_LOGIN", "yes") - - self.with_network(self.network) - - self.with_network_aliases("zookeper") - # Add a custom name for the container - self.with_name(f"zookeeper") - - self.start() \ No newline at end of file +class ZookeeperContainer(DockerContainer): + def __init__(self, network: Network): + + self.settings = { + "env": { + "ZOOKEEPER_CLIENT_PORT": "2181", + "ZOOKEEPER_TICK_TIME": "2000", + "ALLOW_ANONYMOUS_LOGIN": "yes", + } + } + + DockerContainer.__init__(self, image="confluentinc/cp-zookeeper:6.2.0") + self.with_exposed_ports(2181) + + self.with_name("zookeeper") + for key, value in self.settings.get("env").items(): + self.with_env(key, value) + self.with_network(network) + self.with_network_aliases("zookeeper") + + self.start() From 4e9ac896ea8413890aaaf1b68424732bb6e151e0 Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Fri, 14 Mar 2025 13:49:54 -0400 Subject: [PATCH 07/41] feat: refactor Kafka and OPA container imports, enhance logging, and improve container management --- tests/conftest.py | 40 ++++++++++----------- tests/containers/{ => OPA}/opa_container.py | 0 tests/fixtures/broadcasters.py | 13 ++++--- tests/fixtures/policy_stores.py | 2 +- tests/policies/rbac.rego | 14 ++++---- 5 files changed, 35 insertions(+), 34 deletions(-) rename tests/containers/{ => OPA}/opa_container.py (100%) diff --git a/tests/conftest.py b/tests/conftest.py index 931641ffa..702bc2deb 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -17,8 +17,8 @@ from tests.containers.broadcast_container_base import BroadcastContainerBase from tests.containers.cedar_container import CedarContainer from tests.containers.gitea_container import GiteaContainer -from tests.containers.kafka_broadcast_container import KafkaBroadcastContainer -from tests.containers.opa_container import OpaContainer, OpaSettings +from tests.containers.kafka.kafka_broadcast_container import KafkaContainer +from tests.containers.OPA.opa_container import OpaContainer, OpaSettings from tests.containers.opal_client_container import OpalClientContainer from tests.containers.opal_server_container import OpalServerContainer from tests.containers.postgres_broadcast_container import PostgresBroadcastContainer @@ -49,17 +49,17 @@ def cancel_wait_for_client_after_timeout(): time.sleep(debugger_wait_time) debugpy.wait_for_client.cancel() except Exception as e: - print(f"Failed to cancel wait for client: {e}") + logger.debug(f"Failed to cancel wait for client: {e}") try: if pytest_settings.wait_for_debugger: t = threading.Thread(target=cancel_wait_for_client_after_timeout) t.start() - print(f"Waiting for debugger to attach... {debugger_wait_time} seconds timeout") + logger.debug(f"Waiting for debugger to attach... {debugger_wait_time} seconds timeout") debugpy.wait_for_client() except Exception as e: - print(f"Failed to attach debugger: {e}") + logger.debug(f"Failed to attach debugger: {e}") utils.export_env("OPAL_TESTS_DEBUG", "true") # utils.install_opal_server_and_client() @@ -78,12 +78,12 @@ def temp_dir(): exist for the duration of the test session. """ dir_path = tempfile.mkdtemp() - print(f"Temporary directory created: {dir_path}") + logger.debug(f"Temporary directory created: {dir_path}") yield dir_path # Teardown: Clean up the temporary directory shutil.rmtree(dir_path) - print(f"Temporary directory removed: {dir_path}") + logger.debug(f"Temporary directory removed: {dir_path}") @pytest.fixture(scope="session", autouse=True) @@ -96,18 +96,16 @@ def opal_network(): yield network - print("Removing network...") - time.sleep(5) # wait for the containers to stop + logger.info("Removing network...") + time.sleep(3) # wait for the containers to stop try: - for container in network._network.containers: - container.kill() network.remove() logger.info("Network removed.") except Exception as e: if logger.level == "DEBUG": logger.error(f"Failed to remove network: {e}") else: - logger.error(f"Failed to remove network got exception") + logger.error(f"Failed to remove network got exception\n{e}") @pytest.fixture(scope="session") def number_of_opal_servers(): @@ -140,8 +138,8 @@ def opal_servers( number_of_opal_servers: int, opal_server_image: str, topics: dict[str, int], - # broadcast_channel: BroadcastContainerBase, - kafka_broadcast_channel: KafkaBroadcastContainer, + broadcast_channel: BroadcastContainerBase, + # kafka_broadcast_channel: KafkaContainer, # redis_broadcast_channel: RedisBroadcastContainer, session_matrix, ): @@ -170,7 +168,7 @@ def opal_servers( """ # broadcast_channel = redis_broadcast_channel - broadcast_channel = kafka_broadcast_channel + # broadcast_channel = kafka_broadcast_channel broadcast_channel = broadcast_channel[0] if not broadcast_channel: @@ -207,7 +205,7 @@ def opal_servers( ) policy_repo.create_webhook() - print( + logger.info( f"Started container: {container_name}, ID: {container.get_wrapped_container().id}" ) container.wait_for_log("Clone succeeded", timeout=30) @@ -308,7 +306,7 @@ def opal_clients( if not opal_servers or len(opal_servers) == 0: raise ValueError("Missing 'opal_server' container.") - opal_server_url = f"http://{opal_servers[0].settings.container_name}:{opal_servers[0].settings.port}" + opal_server_url = f"http://{opal_servers[0].settings.container_name}:7002"#{opal_servers[0].settings.port}" containers = [] # List to store OpalClientContainer instances @@ -347,7 +345,7 @@ def opal_clients( ) container.start() - print( + logger.info( f"Started OpalClientContainer: {container_name}, ID: {container.get_wrapped_container().id}" ) containers.append(container) @@ -409,7 +407,7 @@ def topiced_clients( if not opal_servers or len(opal_servers) == 0: raise ValueError("Missing 'opal_server' container.") - opal_server_url = f"http://{opal_servers[0].settings.container_name}:{opal_servers[0].settings.port}" + opal_server_url = f"http://{opal_servers[0].settings.container_name}:7002"#{opal_servers[0].settings.port}" containers = {} # List to store OpalClientContainer instances client_token = opal_servers[0].obtain_OPAL_tokens("topiced_opal_client_?x?")[ @@ -480,10 +478,10 @@ def wait_sometime(): """ if os.getenv("GITHUB_ACTIONS") == "true": - print("Running inside GitHub Actions. Sleeping for 30 seconds...") + logger.info("Running inside GitHub Actions. Sleeping for 30 seconds...") time.sleep(3600) # Sleep for 30 seconds else: - print("Running on the local machine. Press Enter to continue...") + logger.info("Running on the local machine. Press Enter to continue...") input() # Wait for key press diff --git a/tests/containers/opa_container.py b/tests/containers/OPA/opa_container.py similarity index 100% rename from tests/containers/opa_container.py rename to tests/containers/OPA/opa_container.py diff --git a/tests/fixtures/broadcasters.py b/tests/fixtures/broadcasters.py index a93739aa4..affe30eb1 100644 --- a/tests/fixtures/broadcasters.py +++ b/tests/fixtures/broadcasters.py @@ -2,7 +2,7 @@ from testcontainers.core.network import Network from testcontainers.core.utils import setup_logger -from tests.containers.kafka.kafka_broadcast_container import KafkaBroadcastContainer +from tests.containers.kafka.kafka_broadcast_container import KafkaContainer from tests.containers.kafka.kafka_ui_container import KafkaUIContainer from tests.containers.postgres_broadcast_container import PostgresBroadcastContainer from tests.containers.redis_broadcast_container import RedisBroadcastContainer @@ -39,7 +39,10 @@ def postgres_broadcast_channel(opal_network: Network): exit(1) try: - if container.get_wrapped_container().status == "running": + if container.get_wrapped_container().status == "running" or container.get_wrapped_container().status == "created": + container.stop() + else: + logger.info(f"Container status is: {container.get_wrapped_container().status}") container.stop() except Exception: logger.error(f"Failed to stop containers: {container}") @@ -64,9 +67,9 @@ def kafka_broadcast_channel(opal_network: Network): stop. """ - zookeeper_container = ZookeeperContainer(opal_network, KafkaBroadcastSettings()) - kafka_container = KafkaBroadcastContainer(opal_network, zookeeper_container, KafkaBroadcastSettings()) - kafka_ui_container = KafkaUIContainer(opal_network, kafka_container, KafkaBroadcastSettings()) + zookeeper_container = ZookeeperContainer(opal_network) + kafka_container = KafkaContainer(opal_network) + kafka_ui_container = KafkaUIContainer(opal_network) containers = [kafka_container, zookeeper_container, kafka_ui_container] yield containers diff --git a/tests/fixtures/policy_stores.py b/tests/fixtures/policy_stores.py index 8838fa833..30116de39 100644 --- a/tests/fixtures/policy_stores.py +++ b/tests/fixtures/policy_stores.py @@ -3,7 +3,7 @@ from testcontainers.core.network import Network from tests.containers.cedar_container import CedarContainer -from tests.containers.opa_container import OpaContainer, OpaSettings +from tests.containers.OPA.opa_container import OpaContainer, OpaSettings from tests.containers.settings.cedar_settings import CedarSettings diff --git a/tests/policies/rbac.rego b/tests/policies/rbac.rego index fa09dc922..c564c0544 100644 --- a/tests/policies/rbac.rego +++ b/tests/policies/rbac.rego @@ -1,9 +1,9 @@ package app.rbac -default allow = false - +import rego.v1 +default allow := false # Allow the action if the user is granted permission to perform the action. -allow { - # unless user location is outside US - country := data.users[input.user].location.country - country == "US" -} +allow if { + # unless user location is outside US + country := data.users[input.user].location.country + country == "US" +} \ No newline at end of file From 4e8125fe7c0884871b5ef950baa26afc60041869 Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Tue, 18 Mar 2025 14:13:18 -0400 Subject: [PATCH 08/41] feat: add OPA settings and enhance broadcaster configuration for improved testing --- a.txt | 10 +++++ tests/conftest.py | 3 ++ tests/containers/OPA/OPA_settings.py | 24 ++++++++++ tests/containers/OPA/opa_container.py | 40 ++++------------- tests/containers/opal_client_container.py | 6 +++ tests/containers/permitContainer.py | 18 +++++--- .../settings/opal_client_settings.py | 4 ++ tests/fixtures/broadcasters.py | 20 ++++++--- tests/fixtures/images.py | 4 ++ tests/fixtures/policy_repos.py | 45 ++++++++++++------- tests/fixtures/policy_stores.py | 18 +++----- tests/settings.py | 2 +- tests/test_app.py | 38 +++++++++------- 13 files changed, 143 insertions(+), 89 deletions(-) create mode 100644 a.txt create mode 100644 tests/containers/OPA/OPA_settings.py diff --git a/a.txt b/a.txt new file mode 100644 index 000000000..cb49702a9 --- /dev/null +++ b/a.txt @@ -0,0 +1,10 @@ +source_repo_owner = "", +local_clone_path = "", +source_repo_name = "", +webhook_secret = "", +ssh_key_path = "", +should_fork = "", +repo_name = "", +password = "", +owner = "", +pat = "", \ No newline at end of file diff --git a/tests/conftest.py b/tests/conftest.py index 702bc2deb..9da2765e2 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -340,6 +340,9 @@ def opal_clients( opal_server_url=opal_server_url, client_token=client_token, default_update_callbacks=callbacks, + # inline_opa_enabled=False, + # policy_store_url=f"http://localhost:8181",#{opa_server.settings.port}", + # opa_port=opa_server.settings.port, ), network=opal_network, ) diff --git a/tests/containers/OPA/OPA_settings.py b/tests/containers/OPA/OPA_settings.py new file mode 100644 index 000000000..599fbfe6f --- /dev/null +++ b/tests/containers/OPA/OPA_settings.py @@ -0,0 +1,24 @@ +from tests import utils +from tests.containers.settings.opal_client_settings import OpalClientSettings + + +class OpaSettings: + def __init__( + self, + image: str | None = None, + port: int | None = None, + container_name: str | None = None, + ) -> None: + self.image = image if image else "openpolicyagent/opa:0.29.0" + self.container_name = "opa" + + if port is None: + self.port = utils.find_available_port(8181) + else: + if utils.is_port_available(port): + self.port = port + else: + self.port = utils.find_available_port(8181) + + def getEnvVars(self): + return {} diff --git a/tests/containers/OPA/opa_container.py b/tests/containers/OPA/opa_container.py index 8227394e2..e091e7451 100644 --- a/tests/containers/OPA/opa_container.py +++ b/tests/containers/OPA/opa_container.py @@ -2,32 +2,9 @@ from testcontainers.core.network import Network from testcontainers.core.utils import setup_logger -from tests import utils from tests.containers.permitContainer import PermitContainer -from tests.containers.settings.opal_client_settings import OpalClientSettings - - -class OpaSettings: - def __init__( - self, - image: str | None = None, - port: int | None = None, - container_name: str | None = None, - ) -> None: - self.image = image if image else "openpolicyagent/opa:0.29.0" - self.container_name = "opa" - - if port is None: - self.port = utils.find_available_port(8181) - else: - if utils.is_port_available(port): - self.port = port - else: - self.port = utils.find_available_port(8181) - - def getEnvVars(self): - return {} +from tests.containers.OPA.OPA_settings import OpaSettings class OpaContainer(PermitContainer, DockerContainer): def __init__( @@ -43,20 +20,19 @@ def __init__( ) self.settings = settings self.network = network - self.logger = setup_logger(__name__) self.configure() + self.start() def configure(self): for key, value in self.settings.getEnvVars().items(): self.with_env(key, value) - self.with_name(self.settings.container_name).with_bind_ports( - 8181, self.settings.port - ).with_network(self.network).with_kwargs( - labels={"com.docker.compose.project": "pytest"} - ).with_network_aliases( - self.settings.container_name - ) + self.with_name(self.settings.container_name) + self.with_bind_ports(8181, self.settings.port) + self.with_network(self.network) + self.with_kwargs(labels={"com.docker.compose.project": "pytest"}) + self.with_network_aliases(self.settings.container_name) + self.with_command("run --log-level debug --server --addr :8181") def reload_with_settings(self, settings: OpaSettings | None = None): self.stop() diff --git a/tests/containers/opal_client_container.py b/tests/containers/opal_client_container.py index dee8d3301..7d7e77074 100644 --- a/tests/containers/opal_client_container.py +++ b/tests/containers/opal_client_container.py @@ -26,8 +26,14 @@ def __init__( def configure(self): for key, value in self.settings.getEnvVars().items(): + if key == "INLINE_OPA_ENABLED": + self.with_env(key, True) + continue self.with_env(key, value) + self.with_env("INLINE_OPA_ENABLED", True) + self.with_env("OPAL_INLINE_OPA_ENABLED", True) + self.with_name(self.settings.container_name).with_bind_ports( 7000, self.settings.port ).with_bind_ports( diff --git a/tests/containers/permitContainer.py b/tests/containers/permitContainer.py index 50858a95e..dd58d5f0d 100644 --- a/tests/containers/permitContainer.py +++ b/tests/containers/permitContainer.py @@ -10,9 +10,7 @@ def __init__(self): self.permitLogger = setup_logger(__name__) # Regex to match any ANSI-escaped timestamp in the format YYYY-MM-DDTHH:MM:SS.mmmmmm+0000 - self.timestamp_with_ansi = ( - r"\x1b\[.*?(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{6}\+\d{4})" - ) + self.timestamp_with_ansi = (r"\x1b\[.*?(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{6}\+\d{4})") self.errors = [] # self.check_errors() @@ -47,11 +45,17 @@ def wait_for_log( # Search for the timestamp in the line match = re.search(self.timestamp_with_ansi, decoded_line) + if (reference_timestamp is None): + if log_str in decoded_line: + log_found = True + break + if match: - log_timestamp_string = match.group(1) - log_timestamp = datetime.strptime( - log_timestamp_string, "%Y-%m-%dT%H:%M:%S.%f%z" - ) + if reference_timestamp is not None: + log_timestamp_string = match.group(1) + log_timestamp = datetime.strptime( + log_timestamp_string, "%Y-%m-%dT%H:%M:%S.%f%z" + ) if (reference_timestamp is None) or ( log_timestamp > reference_timestamp diff --git a/tests/containers/settings/opal_client_settings.py b/tests/containers/settings/opal_client_settings.py index 6e8a8e5bf..e5fd9dd27 100644 --- a/tests/containers/settings/opal_client_settings.py +++ b/tests/containers/settings/opal_client_settings.py @@ -222,6 +222,7 @@ def getEnvVars(self): "UVICORN_NUM_WORKERS": "1", "UVICORN_PORT": str(self.port), "OPAL_AUTH_PUBLIC_KEY": self.public_key, + # "POLICY_STORE_TYPE": "OPA", } if self.tests_debug: @@ -239,6 +240,9 @@ def getEnvVars(self): env_vars["OPAL_INLINE_OPA_EXEC_PATH"] = self.inline_opa_exec_path env_vars["OPAL_INLINE_OPA_CONFIG"] = self.inline_opa_config env_vars["OPAL_INLINE_OPA_LOG_FORMAT"] = self.inline_opa_log_format + else: + env_vars["OPAL_INLINE_OPA_ENABLED"] = self.inline_opa_enabled + if self.iniline_cedar_enabled: env_vars["OPAL_INILINE_CEDAR_ENABLED"] = self.iniline_cedar_enabled diff --git a/tests/fixtures/broadcasters.py b/tests/fixtures/broadcasters.py index affe30eb1..f1cddeb19 100644 --- a/tests/fixtures/broadcasters.py +++ b/tests/fixtures/broadcasters.py @@ -14,10 +14,12 @@ from tests.containers.settings.redis_broadcast_settings import RedisBroadcastSettings from tests.containers.kafka.zookeeper_container import ZookeeperContainer +from tests.settings import session_matrix + logger = setup_logger(__name__) -@pytest.fixture(scope="session") +# @pytest.fixture(scope="session") def postgres_broadcast_channel(opal_network: Network): """Fixture that yields a running Postgres broadcast channel container. @@ -81,7 +83,7 @@ def kafka_broadcast_channel(opal_network: Network): return -@pytest.fixture(scope="session") +# @pytest.fixture(scope="session") def redis_broadcast_channel(opal_network: Network): """Fixture that yields a running redis broadcast channel container. @@ -102,8 +104,14 @@ def redis_broadcast_channel(opal_network: Network): return +broadcasters_map = { + "postgres": postgres_broadcast_channel, + "kafka": kafka_broadcast_channel, + "redis": redis_broadcast_channel +} + @pytest.fixture(scope="session") -def broadcast_channel(opal_network: Network, postgres_broadcast_channel): +def broadcast_channel(opal_network: Network, session_matrix): """Fixture that yields a running broadcast channel container. The container is started once and kept running throughout the entire @@ -111,5 +119,7 @@ def broadcast_channel(opal_network: Network, postgres_broadcast_channel): unless an exception is raised during teardown. """ - yield postgres_broadcast_channel - return + # logger.info(f"Using {session_matrix["broadcaster"]} broadcaster") + # input() + + yield from broadcasters_map[session_matrix["broadcaster"]](opal_network) \ No newline at end of file diff --git a/tests/fixtures/images.py b/tests/fixtures/images.py index fcc7964a7..edb30db78 100644 --- a/tests/fixtures/images.py +++ b/tests/fixtures/images.py @@ -33,6 +33,10 @@ def opa_image(session_matrix): This fixture is used to provide a working OPA image for the tests. """ + image_name = "openpolicyagent/opa" + yield image_name + return + image_name = "opa" yield from utils.build_docker_image("Dockerfile.opa", image_name, session_matrix) diff --git a/tests/fixtures/policy_repos.py b/tests/fixtures/policy_repos.py index d6d08dc52..4fc93fc6b 100644 --- a/tests/fixtures/policy_repos.py +++ b/tests/fixtures/policy_repos.py @@ -83,23 +83,38 @@ def policy_repo( if pytest_settings.policy_repo_provider == SupportedPolicyRepo.GITEA: gitea_server = request.getfixturevalue("gitea_server") + # repo_settings = PolicyRepoSettings( + # temp_dir, + # pytest_settings.repo_owner, + # pytest_settings.repo_name, + # "master", + # gitea_settings.container_name, + # gitea_settings.port_http, + # gitea_settings.port_ssh, + # pytest_settings.repo_password, + # None, + # pytest_settings.ssh_key_path, + # pytest_settings.source_repo_owner, + # pytest_settings.source_repo_name, + # True, + # True, + # pytest_settings.webhook_secret, + # ) + + repo_settings = PolicyRepoSettings( - temp_dir, - pytest_settings.repo_owner, - pytest_settings.repo_name, - "master", - gitea_settings.container_name, - gitea_settings.port_http, - gitea_settings.port_ssh, - pytest_settings.repo_password, - None, - pytest_settings.ssh_key_path, - pytest_settings.source_repo_owner, - pytest_settings.source_repo_name, - True, - True, - pytest_settings.webhook_secret, + source_repo_owner = "", + local_clone_path = "", + source_repo_name = "", + webhook_secret = "", + ssh_key_path = "", + should_fork = "", + repo_name = "", + password = "", + owner = "", + pat = "", ) + policy_repo = PolicyRepoFactory( pytest_settings.policy_repo_provider ).get_policy_repo( diff --git a/tests/fixtures/policy_stores.py b/tests/fixtures/policy_stores.py index 30116de39..725af94f8 100644 --- a/tests/fixtures/policy_stores.py +++ b/tests/fixtures/policy_stores.py @@ -1,3 +1,4 @@ +import datetime import pytest from images import cedar_image, opa_image from testcontainers.core.network import Network @@ -33,19 +34,12 @@ def opa_server(opal_network: Network, opa_image): container : OpaContainer The OPA server container object. """ - with OpaContainer( - settings=OpaSettings( - container_name="opa", - image=opa_image, - ), - network=opal_network, - ) as container: - assert container.wait_for_log( - log_str="Server started", timeout=30 - ), "OPA server did not start." - yield container + container = OpaContainer(settings=OpaSettings(container_name="opa",image=opa_image,),network=opal_network,) + # assert container.wait_for_log(reference_timestamp=datetime.datetime.now(datetime.timezone.utc) - datetime.timedelta(seconds=1000),log_str="msg=Initializing server", timeout=5), "OPA server did not start." + container.permitLogger.info("OPA server started with id: " + container.get_wrapped_container().id) + yield container - container.stop() + container.stop() @pytest.fixture(scope="session") diff --git a/tests/settings.py b/tests/settings.py index 4122ba52a..92609b585 100644 --- a/tests/settings.py +++ b/tests/settings.py @@ -104,7 +104,7 @@ def dump_settings(self): class PyTestSessionSettings(List): repo_providers = ["gitea"] modes = ["without_webhook"] - broadcasters = ["postgres"] + broadcasters = ["postgres", "redis"] broadcaster = "fgsfdg" repo_provider = "fdgdfg" mode = "rgrtre" diff --git a/tests/test_app.py b/tests/test_app.py index 4ca2d6daf..c0c84b548 100644 --- a/tests/test_app.py +++ b/tests/test_app.py @@ -32,6 +32,8 @@ def publish_data_user_location( f"-t {topics} --dst-path /users/{user}/location {DATASOURCE_TOKEN}" ) + # Log the command being executed + logger.info(f"Executing command: {publish_data_user_location_command}") # Execute the command result = subprocess.run(publish_data_user_location_command, shell=True) # Check command execution result @@ -60,11 +62,11 @@ async def data_publish_and_test( ) if allowed_country == user_country: - print( + logger.info( f"{user}'s location set to: {user_country}. current_country is set to: {allowed_country} Expected outcome: ALLOWED." ) else: - print( + logger.info( f"{user}'s location set to: {user_country}. current_country is set to: {allowed_country} Expected outcome: NOT ALLOWED." ) @@ -73,6 +75,7 @@ async def data_publish_and_test( assert await utils.opal_authorize( user, f"http://localhost:{opal_client.settings.opa_port}/v1/data/app/rbac/allow", + # f"http://localhost:8181/v1/data/app/rbac/allow", ) == (allowed_country == user_country) return True @@ -103,6 +106,7 @@ def update_policy( utils.wait_policy_repo_polling_interval(opal_server_container) + def test_topiced_user_location( opal_servers: list[OpalServerContainer], topiced_clients: dict[str, OpalClientContainer], @@ -169,7 +173,7 @@ def test_user_location( async def test_policy_and_data_updates( gitea_server: GiteaContainer, opal_servers: list[OpalServerContainer], - opal_clients: list[OpalClientContainer], + connected_clients: list[OpalClientContainer], temp_dir, ): """This script updates policy configurations and tests access based on @@ -187,10 +191,10 @@ async def test_policy_and_data_updates( for location in locations: # Update policy to allow only non-US users - print(f"Updating policy to allow only users from {location[1]}...") + logger.info(f"Updating policy to allow only users from {location[1]}...") update_policy(gitea_server, server, location[1]) - for client in opal_clients: + for client in connected_clients: assert await data_publish_and_test( "bob", location[1], @@ -211,12 +215,12 @@ def test_read_statistics( """Tests the statistics feature by verifying the number of clients and servers.""" - print("- Testing statistics feature") + logger.info("- Testing statistics feature") time.sleep(15) for server in opal_servers: - print(f"OPAL Server: {server.settings.container_name}:7002") + logger.info(f"OPAL Server: {server.settings.container_name}:7002") # The URL for statistics stats_url = f"http://localhost:{server.settings.port}/stats" @@ -227,7 +231,7 @@ def test_read_statistics( # Repeat the request multiple times for attempt in range(attempts): - print(f"Attempt {attempt + 1}/{attempts} - Checking statistics...") + logger.info(f"Attempt {attempt + 1}/{attempts} - Checking statistics...") try: time.sleep(1) @@ -235,7 +239,7 @@ def test_read_statistics( response = requests.get(stats_url, headers=headers) response.raise_for_status() # Raise an error for HTTP status codes 4xx/5xx - print(f"Response: {response.status_code} {response.text}") + logger.debug(f"Response: {response.status_code} {response.text}") # Look for the expected data in the response stats = utils.get_client_and_server_count(response.text) @@ -246,10 +250,10 @@ def test_read_statistics( client_count = stats["client_count"] server_count = stats["server_count"] - print( + logger.info( f"Number of OPAL servers expected: {number_of_opal_servers}, found: {server_count}" ) - print( + logger.info( f"Number of OPAL clients expected: {number_of_opal_clients}, found: {client_count}" ) @@ -265,17 +269,17 @@ def test_read_statistics( except requests.RequestException as e: if response is not None: - print(f"Request failed: {response.status_code} {response.text}") + logger.error(f"Request failed: {response.status_code} {response.text}") pytest.fail(f"Failed to fetch statistics: {e}") - print("Statistics check passed in all attempts.") + logger.info("Statistics check passed in all attempts.") @pytest.mark.asyncio async def test_policy_update( gitea_server: GiteaContainer, opal_servers: list[OpalServerContainer], - opal_clients: list[OpalClientContainer], + connected_clients: list[OpalClientContainer], temp_dir, ): # Parse locations into separate lists of IPs and countries @@ -287,8 +291,8 @@ async def test_policy_update( for server in opal_servers: # Update policy to allow only non-US users - print(f"Updating policy to allow only users from {location}...") - update_policy(gitea_server, server, "location") + logger.info(f"Updating policy to allow only users from {location}...") + update_policy(gitea_server, server, location) log_found = server.wait_for_log( "Found new commits: old HEAD was", 30, reference_timestamp @@ -298,7 +302,7 @@ async def test_policy_update( log_found ), f"Expected log entry not found in server '{server.settings.container_name}' after the reference timestamp." - for client in opal_clients: + for client in connected_clients: log_found = client.wait_for_log( "Fetching policy bundle from", 30, reference_timestamp ) From 72fd0e4191a393b2988b9ed9e0c63e7bb6ee4167 Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Mon, 24 Mar 2025 20:51:59 -0400 Subject: [PATCH 09/41] feat: remove deprecated .env.example, introduce SupportedPolicyRepo enum, and update policy repo handling in tests --- tests/.env.example | 5 - tests/conftest.py | 42 ++--- tests/containers/gitea_container.py | 4 +- tests/containers/opal_server_container.py | 1 + tests/containers/permitContainer.py | 2 - .../settings/opal_server_settings.py | 10 + tests/fixtures/policy_repos.py | 176 +++++++++++++----- tests/policy_repos/gitea_policy_repo.py | 28 ++- .../gitea_policy_repo_settings.py | 147 +++++++++++++++ tests/policy_repos/github_policy_repo.py | 176 +++++++----------- tests/policy_repos/github_policy_settings.py | 122 ++++++++++++ tests/policy_repos/policy_repo_factory.py | 20 +- tests/policy_repos/supported_policy_repo.py | 9 + tests/settings.py | 5 +- tests/test_app.py | 15 +- 15 files changed, 535 insertions(+), 227 deletions(-) delete mode 100644 tests/.env.example create mode 100644 tests/policy_repos/gitea_policy_repo_settings.py create mode 100644 tests/policy_repos/github_policy_settings.py create mode 100644 tests/policy_repos/supported_policy_repo.py diff --git a/tests/.env.example b/tests/.env.example deleted file mode 100644 index ae9fb7b10..000000000 --- a/tests/.env.example +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env bash - -export OPAL_POLICY_REPO_URL='' -export POLICY_REPO_BRANCH='' -export OPAL_POLICY_REPO_SSH_KEY='' diff --git a/tests/conftest.py b/tests/conftest.py index 9da2765e2..7b1e09cee 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -10,32 +10,14 @@ import pytest from testcontainers.core.network import Network from testcontainers.core.utils import setup_logger -from testcontainers.core.waiting_utils import wait_for_logs -import docker from tests import utils from tests.containers.broadcast_container_base import BroadcastContainerBase -from tests.containers.cedar_container import CedarContainer -from tests.containers.gitea_container import GiteaContainer -from tests.containers.kafka.kafka_broadcast_container import KafkaContainer -from tests.containers.OPA.opa_container import OpaContainer, OpaSettings from tests.containers.opal_client_container import OpalClientContainer from tests.containers.opal_server_container import OpalServerContainer -from tests.containers.postgres_broadcast_container import PostgresBroadcastContainer -from tests.containers.redis_broadcast_container import RedisBroadcastContainer -from tests.containers.settings.cedar_settings import CedarSettings -from tests.containers.settings.gitea_settings import GiteaSettings from tests.containers.settings.opal_client_settings import OpalClientSettings from tests.containers.settings.opal_server_settings import OpalServerSettings -from tests.containers.settings.postgres_broadcast_settings import ( - PostgresBroadcastSettings, -) from tests.policy_repos.policy_repo_base import PolicyRepoBase -from tests.policy_repos.policy_repo_factory import ( - PolicyRepoFactory, - SupportedPolicyRepo, -) -from tests.policy_repos.policy_repo_settings import PolicyRepoSettings from tests.settings import pytest_settings logger = setup_logger(__name__) @@ -77,7 +59,8 @@ def temp_dir(): This fixture is useful for tests that need a temporary directory to exist for the duration of the test session. """ - dir_path = tempfile.mkdtemp() + dir_path = tempfile.mkdtemp(prefix="opal_tests_",suffix=".tmp",dir="/opal.tmp") + os.chmod(dir_path, 0o777) # Set permissions to allow read/write/execute for all users logger.debug(f"Temporary directory created: {dir_path}") yield dir_path @@ -128,13 +111,13 @@ def number_of_opal_servers(): redis_broadcast_channel, ) from tests.fixtures.images import opal_server_image -from tests.fixtures.policy_repos import gitea_server, gitea_settings, policy_repo +from tests.fixtures.policy_repos import policy_repo @pytest.fixture(scope="session") def opal_servers( opal_network: Network, - policy_repo: PolicyRepoBase, + policy_repo, number_of_opal_servers: int, opal_server_image: str, topics: dict[str, int], @@ -180,17 +163,19 @@ def opal_servers( for i in range(number_of_opal_servers): container_name = f"opal_server_{i+1}" + repo_url = policy_repo.get_repo_url() container = OpalServerContainer( OpalServerSettings( broadcast_uri=broadcast_channel.get_url(), container_name=container_name, container_index=i + 1, uvicorn_workers="4", - policy_repo_url=policy_repo.get_repo_url(), + policy_repo_url=repo_url, image=opal_server_image, log_level="DEBUG", data_topics=" ".join(topics.keys()), polling_interval=3, + policy_repo_main_branch=policy_repo.test_branch, ), network=opal_network, ) @@ -200,10 +185,11 @@ def opal_servers( if i == 0: # Only the first server should setup the webhook - policy_repo.setup_webhook( - container.get_container_host_ip(), container.settings.port - ) - policy_repo.create_webhook() + # policy_repo.setup_webhook( + # container.get_container_host_ip(), container.settings.port + # ) + # policy_repo.create_webhook() + pass logger.info( f"Started container: {container_name}, ID: {container.get_wrapped_container().id}" @@ -489,7 +475,7 @@ def wait_sometime(): @pytest.fixture(scope="session", autouse=True) -def setup(opal_clients, session_matrix): +def setup(opal_clients, policy_repo, session_matrix): """A setup fixture that is run once per test session. This fixture is automatically used by all tests, and is used to set up the @@ -511,7 +497,7 @@ def setup(opal_clients, session_matrix): None """ yield - + policy_repo.cleanup(delete_ssh_key=False) if session_matrix["is_final"]: logger.info("Finalizing test session...") utils.remove_env("OPAL_TESTS_DEBUG") diff --git a/tests/containers/gitea_container.py b/tests/containers/gitea_container.py index 83c14e631..5b0399ce5 100644 --- a/tests/containers/gitea_container.py +++ b/tests/containers/gitea_container.py @@ -104,7 +104,7 @@ def create_gitea_admin_token(self): def deploy_gitea(self): """Deploy Gitea container and initialize configuration.""" self.logger.info("Deploying Gitea container...") - # self.start() + self.start() self.wait_for_gitea() self.create_gitea_user() self.access_token = self.create_gitea_admin_token() @@ -353,7 +353,7 @@ def clone_and_update( # Commit the changes if there are modifications if repo.is_dirty(): print(f"Committing changes for branch {branch}...") - repo.index.commit(COMMIT_MESSAGE) + repo.git.commit(COMMIT_MESSAGE) # Push changes to the remote repository print(f"Pushing changes for branch {branch}...") diff --git a/tests/containers/opal_server_container.py b/tests/containers/opal_server_container.py index 71c6ce50b..292b647e8 100644 --- a/tests/containers/opal_server_container.py +++ b/tests/containers/opal_server_container.py @@ -32,6 +32,7 @@ def configure(self): for key, value in self.settings.getEnvVars().items(): self.with_env(key, value) + # Configure network and other settings self.with_name(self.settings.container_name).with_bind_ports( 7002, self.settings.port diff --git a/tests/containers/permitContainer.py b/tests/containers/permitContainer.py index dd58d5f0d..f5ec0956f 100644 --- a/tests/containers/permitContainer.py +++ b/tests/containers/permitContainer.py @@ -126,9 +126,7 @@ async def check_errors(self): self.permitLogger.info(f"Checking log line: {decoded_line}") self.permitLogger.info(f"scanning line: {decoded_line}") if log_str in decoded_line: - self.permitLogger.error("\n\n\n\n") self.permitLogger.error(f"error found: {decoded_line}") - self.permitLogger.error("\n\n\n\n") self.errors.append(decoded_line) def __del__(self): diff --git a/tests/containers/settings/opal_server_settings.py b/tests/containers/settings/opal_server_settings.py index 059866bd4..352e8284d 100644 --- a/tests/containers/settings/opal_server_settings.py +++ b/tests/containers/settings/opal_server_settings.py @@ -40,6 +40,7 @@ def __init__( all_data_url: str = None, policy_repo_reuse_clone_path: bool = None, container_index: int = 1, + POLICY_REPO_SSH_KEY: str = None, **kwargs, ): """Initialize the OPAL Server with the provided parameters. @@ -149,6 +150,12 @@ def __init__( self.webhook_secret = webhook_secret if webhook_secret else self.webhook_secret self.webhook_params = webhook_params if webhook_params else self.webhook_params + self.POLICY_REPO_SSH_KEY = ( + POLICY_REPO_SSH_KEY if POLICY_REPO_SSH_KEY else self.POLICY_REPO_SSH_KEY + ) + + # Update with additional keyword arguments + self.__dict__.update(kwargs) self.validate_dependencies() @@ -185,6 +192,7 @@ def getEnvVars(self): "UVICORN_PORT": self.uvicorn_port, "OPAL_ALL_DATA_URL": self.all_data_url, "OPAL_POLICY_REPO_REUSE_CLONE_PATH": self.policy_repo_reuse_clone_path, + "OPAL_POLICY_REPO_SSH_KEY": self.POLICY_REPO_SSH_KEY } if pytest_settings.use_webhook: @@ -258,5 +266,7 @@ def load_from_env(self): ) self.uvicorn_port = os.getenv("OPAL_SERVER_UVICORN_PORT", "7002") + self.POLICY_REPO_SSH_KEY = os.getenv("OPAL_POLICY_REPO_SSH_KEY", None) + if not self.private_key or not self.public_key: self.private_key, self.public_key = utils.generate_ssh_key_pair() diff --git a/tests/fixtures/policy_repos.py b/tests/fixtures/policy_repos.py index 4fc93fc6b..ad101517b 100644 --- a/tests/fixtures/policy_repos.py +++ b/tests/fixtures/policy_repos.py @@ -6,6 +6,10 @@ from tests.containers.gitea_container import GiteaContainer from tests.containers.settings.gitea_settings import GiteaSettings +from tests.policy_repos.gitea_policy_repo import GiteaPolicyRepo +from tests.policy_repos.gitea_policy_repo_settings import GiteaPolicyRepoSettings +from tests.policy_repos.github_policy_repo import GithubPolicyRepo +from tests.policy_repos.github_policy_settings import GithubPolicyRepoSettings from tests.policy_repos.policy_repo_base import PolicyRepoBase from tests.policy_repos.policy_repo_factory import ( PolicyRepoFactory, @@ -17,7 +21,7 @@ logger = setup_logger(__name__) -@pytest.fixture(scope="session") +# @pytest.fixture(scope="session") def gitea_settings(): """Returns a GiteaSettings object with default values for the Gitea container name, repository name, temporary directory, and data directory. @@ -35,8 +39,8 @@ def gitea_settings(): ) -@pytest.fixture(scope="session") -def gitea_server(opal_network: Network, gitea_settings: GiteaSettings): +# @pytest.fixture(scope="session") +def gitea_server(opal_network: Network): """Creates a Gitea container and initializes a test repository. The Gitea container is created with the default settings for the @@ -51,19 +55,116 @@ def gitea_server(opal_network: Network, gitea_settings: GiteaSettings): :param gitea_settings: The settings for the Gitea container. :return: The GiteaContainer object. """ - with GiteaContainer( - settings=gitea_settings, - network=opal_network, - ) as gitea_container: - gitea_container.deploy_gitea() - gitea_container.init_repo() - yield gitea_container + gitea_container = GiteaContainer(settings=gitea_settings(), network=opal_network) + + gitea_container.deploy_gitea() + gitea_container.init_repo() + + yield gitea_container + +def create_github_policy_repo_settings(temp_dir: str, session_matrix): + return GithubPolicyRepoSettings( + temp_dir = temp_dir, + source_repo_owner = "ariWeinberg", + local_clone_path = temp_dir, + source_repo_name = "opal-tests-policy-repo", + webhook_secret = "Aa15317701", + ssh_key_path = "/home/ari/.ssh/github", + should_fork = False, + repo_name = "opal-tests-policy-repo1", + password = "", + owner = "ariWeinberg", + pat = session_matrix["github_pat"], + ) + +def create_gitea_policy_repo_settings(temp_dir: str, session_matrix, gitea_settings: GiteaSettings): + + """ + Creates a Gitea policy repository settings object. + + This method creates a Gitea policy repository settings object based on the + given parameters. The object is initialized with the default values for + the local clone path, owner, repository name, branch name, container name, + repository host, repository ports, password, SSH key path, source + repository owner and name, whether to fork the repository, whether to + create the repository, and webhook secret. + + :param temp_dir: The temporary directory to use for the policy repository. + :param session_matrix: The session matrix to use for the policy repository. + :param gitea_settings: The settings for the Gitea container. + :return: The GiteaPolicyRepoSettings object. + """ + + return GiteaPolicyRepoSettings( + local_clone_path = temp_dir, + owner = gitea_settings.username, + repo_name = gitea_settings.repo_name, + branch_name = "master", + container_name = gitea_settings.container_name, + repo_host = "localhost", + repo_port_http = gitea_settings.port_http, + repo_port_ssh = gitea_settings.port_ssh, + password = gitea_settings.password, + pat = None, + ssh_key_path = pytest_settings.ssh_key_path, + source_repo_owner = gitea_settings.username, + source_repo_name = gitea_settings.repo_name, + should_fork = False, + should_create_repo = True, + webhook_secret = pytest_settings.webhook_secret + ) + + return GiteaPolicyRepoSettings( + temp_dir, + pytest_settings.repo_owner, + pytest_settings.repo_name, + "master", + gitea_settings.container_name, + gitea_settings.port_http, + gitea_settings.port_ssh, + pytest_settings.repo_password, + None, + pytest_settings.ssh_key_path, + pytest_settings.source_repo_owner, + pytest_settings.source_repo_name, + True, + True, + pytest_settings.webhook_secret, + ) + + +# @pytest.fixture(scope="session") +def policy_repo_settings(temp_dir: str, session_matrix, opal_network, policy_repo_type = SupportedPolicyRepo.GITHUB): + + """ + Creates a policy repository settings object based on the specified type of policy repository. + + This method takes in the following parameters: + + :param temp_dir: The temporary directory to use for the policy repository. + :param session_matrix: The session matrix to use for the test. + :param opal_network: The network to create the container on. + :param policy_repo_type: The type of policy repository to create. Defaults to SupportedPolicyRepo.GITHUB. + + :return: The policy repository settings object. + """ + gitea_server_settings = None + + if policy_repo_type == SupportedPolicyRepo.GITEA: + # gitea_server = request.getfixturevalue("gitea_server") + gitea_server_settings = next(gitea_server(opal_network)) + + + map = { + SupportedPolicyRepo.GITHUB: create_github_policy_repo_settings, + SupportedPolicyRepo.GITEA: lambda temp_dir, session_matrix: create_gitea_policy_repo_settings(temp_dir, session_matrix, gitea_server_settings.settings), + } + + yield (map[policy_repo_type](temp_dir, session_matrix), gitea_server_settings) @pytest.fixture(scope="session") -def policy_repo( - gitea_settings: GiteaSettings, temp_dir: str, request -) -> PolicyRepoBase: +def policy_repo(temp_dir: str, session_matrix, opal_network, request) -> GithubPolicyRepo | GiteaPolicyRepo: """Creates a policy repository for testing. This fixture creates a policy repository based on the configuration @@ -80,47 +181,20 @@ def policy_repo( :param request: The pytest request object. :return: The PolicyRepoBase object. """ - if pytest_settings.policy_repo_provider == SupportedPolicyRepo.GITEA: - gitea_server = request.getfixturevalue("gitea_server") - - # repo_settings = PolicyRepoSettings( - # temp_dir, - # pytest_settings.repo_owner, - # pytest_settings.repo_name, - # "master", - # gitea_settings.container_name, - # gitea_settings.port_http, - # gitea_settings.port_ssh, - # pytest_settings.repo_password, - # None, - # pytest_settings.ssh_key_path, - # pytest_settings.source_repo_owner, - # pytest_settings.source_repo_name, - # True, - # True, - # pytest_settings.webhook_secret, - # ) - - - repo_settings = PolicyRepoSettings( - source_repo_owner = "", - local_clone_path = "", - source_repo_name = "", - webhook_secret = "", - ssh_key_path = "", - should_fork = "", - repo_name = "", - password = "", - owner = "", - pat = "", - ) + + settings, server = next(policy_repo_settings(temp_dir, session_matrix, opal_network, session_matrix["repo_provider"])) policy_repo = PolicyRepoFactory( - pytest_settings.policy_repo_provider + session_matrix["repo_provider"], ).get_policy_repo( - repo_settings, + settings, logger, ) - policy_repo.setup(gitea_settings) - return policy_repo + # policy_repo.setup(gitea_settings) + policy_repo.setup() + yield policy_repo + + if server is not None: + server.get_wrapped_container().kill() + server.get_wrapped_container().remove(v=True, force=True) diff --git a/tests/policy_repos/gitea_policy_repo.py b/tests/policy_repos/gitea_policy_repo.py index e3efa161c..44371203c 100644 --- a/tests/policy_repos/gitea_policy_repo.py +++ b/tests/policy_repos/gitea_policy_repo.py @@ -1,26 +1,34 @@ import codecs import os +import shutil from git import GitCommandError, Repo from tests.containers.settings.gitea_settings import GiteaSettings +from tests.policy_repos.gitea_policy_repo_settings import GiteaPolicyRepoSettings from tests.policy_repos.policy_repo_base import PolicyRepoBase from tests.policy_repos.policy_repo_settings import PolicyRepoSettings +from testcontainers.core.utils import setup_logger + class GiteaPolicyRepo(PolicyRepoBase): - def __init__(self, settings: PolicyRepoSettings, *args): + def __init__(self, settings: GiteaPolicyRepoSettings, *args): super().__init__() + self.logger = setup_logger(__name__) self.settings = settings - def setup(self, settings: PolicyRepoSettings): - self.settings = settings + # def setup(self, settings: PolicyRepoSettings): + # self.settings = settings + + def setup(self): + self.test_branch = self.settings.branch_name def get_repo_url(self): if self.settings is None: raise Exception("Gitea settings not set") - return f"http://{self.settings.container_name}:{self.settings.port_http}/{self.settings.username}/{self.settings.repo_name}.git" + return f"http://{self.settings.container_name}:{self.settings.repo_port_http}/{self.settings.username}/{self.settings.repo_name}.git" def clone_and_update( self, @@ -33,7 +41,7 @@ def clone_and_update( ): """Clone the repository, update the specified branch, and push changes.""" - self.prepare_directory(CLONE_DIR) # Clean up and prepare the directory + # self.prepare_directory(CLONE_DIR) # Clean up and prepare the directory print(f"Processing branch: {branch}") # Clone the repository for the specified branch @@ -60,9 +68,13 @@ def clone_and_update( repo.git.push(authenticated_url, branch) except GitCommandError as e: print(f"Error pushing branch {branch}: {e}") + finally: + repo.close() + shutil.rmtree(CLONE_DIR) - def update_branch(self, branch, file_name, file_content): + def update_branch(self, file_name, file_content): temp_dir = self.settings.local_clone_path + branch = self.test_branch self.logger.info( f"Updating branch '{branch}' with file '{file_name}' content..." @@ -71,7 +83,7 @@ def update_branch(self, branch, file_name, file_content): # Decode escape sequences in the file content file_content = codecs.decode(file_content, "unicode_escape") - GITEA_REPO_URL = f"http://localhost:{self.settings.repo_port}/{self.settings.owner}/{self.settings.repo_name}.git" + GITEA_REPO_URL = f"http://localhost:{self.settings.repo_port_http}/{self.settings.username}/{self.settings.repo_name}.git" username = self.settings.owner PASSWORD = self.settings.password CLONE_DIR = os.path.join(temp_dir, "branch_update") @@ -96,7 +108,7 @@ def update_branch(self, branch, file_name, file_content): # Ensure cleanup is performed regardless of success or failure self.cleanup(CLONE_DIR) - def cleanup(self): + def cleanup(self, *args, **kwargs): return super().cleanup() def setup_webhook(self, host, port): diff --git a/tests/policy_repos/gitea_policy_repo_settings.py b/tests/policy_repos/gitea_policy_repo_settings.py new file mode 100644 index 000000000..515f26342 --- /dev/null +++ b/tests/policy_repos/gitea_policy_repo_settings.py @@ -0,0 +1,147 @@ +import os +from tests.policy_repos.supported_policy_repo import SupportedPolicyRepo +from testcontainers.core.utils import setup_logger + +class GiteaPolicyRepoSettings: + def __init__( + self, + local_clone_path: str | None = None, + owner: str | None = None, + username: str | None = None, + repo_name: str | None = None, + branch_name: str | None = None, + container_name: str | None = None, + repo_host: str | None = None, + repo_port_http: int | None = None, + repo_port_ssh: int | None = None, + password: str | None = None, + pat: str | None = None, + ssh_key_path: str | None = None, + source_repo_owner: str | None = None, + source_repo_name: str | None = None, + should_fork: bool = False, + should_create_repo: bool = False, # if True, will create the repo, if the should_fork is False. + # If should_fork is True, it will fork and not create the repo from scratch. + # if False, the an existing repository is expected + webhook_secret: str | None = None, + ): + + """ + GiteaPolicyRepoSettings initialization. + + This method is used to initialize the Gitea policy repository settings. + It takes in the following parameters: + + Args: + local_clone_path (str, optional): The local path to clone the repo to. Defaults to None. + owner (str, optional): The owner of the repo. Defaults to None. + repo_name (str, optional): The name of the repo. Defaults to None. + branch_name (str, optional): The name of the branch. Defaults to None. + repo_host (str, optional): The host of the repo. Defaults to None. + repo_port_http (int, optional): The HTTP port of the repo. Defaults to None. + repo_port_ssh (int, optional): The SSH port of the repo. Defaults to None. + password (str, optional): The password to use for authentication. Defaults to None. + pat (str, optional): The personal access token to use for authentication. Defaults to None. + ssh_key_path (str, optional): The path to the SSH key to use for authentication. Defaults to None. + source_repo_owner (str, optional): The owner of the source repo. Defaults to None. + source_repo_name (str, optional): The name of the source repo. Defaults to None. + should_fork (bool, optional): Whether to fork the repo. Defaults to False. + should_create_repo (bool, optional): Whether to create the repo. Defaults to False. + webhook_secret (str, optional): The secret to use for the webhook. Defaults to None. + + This method sets the following attributes of the Gitea policy repository settings: + local_clone_path, owner, repo_name, branch_name, repo_host, repo_port_http, repo_port_ssh, password, pat, ssh_key_path, source_repo_owner, source_repo_name, should_fork, should_create_repo, webhook_secret. + """ + + self.policy_repo_type = SupportedPolicyRepo.GITEA + + self.logger = setup_logger(__name__) + + # Load from environment variables + self.load_from_env() + + # Set attributes + self.username = username if username else self.username + self.local_clone_path = local_clone_path if local_clone_path else self.local_clone_path + self.owner = owner if owner else self.owner + self.repo_name = repo_name if repo_name else self.repo_name + self.branch_name = branch_name if branch_name else self.branch_name + self.container_name = container_name if container_name else self.container_name + self.repo_host = repo_host if repo_host else self.repo_host + self.repo_port_http = repo_port_http if repo_port_http else self.repo_port_http + self.repo_port_ssh = repo_port_ssh if repo_port_ssh else self.repo_port_ssh + self.password = password if password else self.password + self.pat = pat if pat else self.pat + self.ssh_key_path = ssh_key_path if ssh_key_path else self.ssh_key_path + self.source_repo_owner = source_repo_owner if source_repo_owner else self.source_repo_owner + self.source_repo_name = source_repo_name if source_repo_name else self.source_repo_name + self.should_fork = should_fork if should_fork else self.should_fork + self.should_create_repo = should_create_repo if should_create_repo else self.should_create_repo + self.webhook_secret = webhook_secret if webhook_secret else self.webhook_secret + + self.validate_dependencies() + + def load_from_env(self): + """ + Loads environment variables into the Gitea policy repository settings. + + This method retrieves various environment variables required for configuring + the Gitea policy repository settings and assigns them to the corresponding + attributes of the settings object. It provides flexibility to configure + repository details, authentication credentials, and other configurations + through environment variables. + + Attributes set by this method: + - owner: The owner of the target repository. + - github_pat: The GitHub personal access token for accessing the repository. + - ssh_key_path: The path to the SSH key used for repository access. + - repo: The name of the target repository. + - source_repo_owner: The owner of the source repository. + - source_repo_name: The name of the source repository. + - webhook_secret: The secret used for authenticating webhooks. + - repo_host: The host address for the Gitea server. + - repo_port_http: The HTTP port for the Gitea server. + - repo_port_ssh: The SSH port for the Gitea server. + - password: The password for accessing the Gitea repository. + - pat: The personal access token for the Gitea repository. + - branch_name: The name of the branch in the Gitea repository. + - should_fork: Whether to fork the Gitea repository. + - should_create_repo: Whether to create the Gitea repository. + - local_clone_path: The local path to clone the Gitea repository. + """ + + # Load from environment variables + self.username = os.getenv("gitea_username", "permitAdmin") + self.owner = os.getenv("OPAL_TARGET_ACCOUNT", None) + self.github_pat = os.getenv("OPAL_GITHUB_PAT", None) + self.ssh_key_path = os.getenv("OPAL_PYTEST_POLICY_REPO_SSH_KEY_PATH", "~/.ssh/id_rsa") + self.repo = os.getenv("OPAL_TARGET_REPO_NAME", "opal-example-policy-repo") + self.source_repo_owner = os.getenv("OPAL_SOURCE_ACCOUNT", "permitio") + self.source_repo_name = os.getenv("OPAL_SOURCE_REPO_NAME", "opal-example-policy-repo") + self.webhook_secret: str = os.getenv("OPAL_WEBHOOK_SECRET", "xxxxx") + + self.repo_host = os.getenv("OPAL_GITEA_HOST", "127.0.0.1") + self.repo_port_http = os.getenv("OPAL_GITEA_PORT_HTTP", 3000) + self.repo_port_ssh = os.getenv("OPAL_GITEA_PORT_SSH", 22) + + self.container_name = os.getenv("OPAL_GITEA_CONTAINER_NAME", "gitea_server") + + self.password = os.getenv("OPAL_GITEA_PASSWORD", "password") + self.pat = os.getenv("OPAL_GITEA_PAT", "pat") + self.ssh_key_path = os.getenv("OPAL_GITEA_SSH_KEY_PATH", "~/.ssh/id_rsa") + self.branch_name = os.getenv("OPAL_GITEA_BRANCH_NAME", "main") + self.should_fork = os.getenv("OPAL_GITEA_FORK", False) + self.should_create_repo = os.getenv("OPAL_GITEA_CREATE_REPO", False) + + self.webhook_secret: str = os.getenv("OPAL_WEBHOOK_SECRET", "xxxxx") + + self.local_clone_path = os.getenv("OPAL_GITEA_LOCAL_CLONE_PATH", None) + + self.source_repo_owner = os.getenv("OPAL_SOURCE_ACCOUNT", "permitio") + self.source_repo_name = os.getenv("OPAL_SOURCE_REPO_NAME", "opal-example-policy-repo") + + def validate_dependencies(self): + """Validate required dependencies before starting the server.""" + if not self.local_clone_path: + raise ValueError("OPAL_GITEA_LOCAL_CLONE_PATH is required.") + self.logger.info(f"Gitea policy repo settings | Dependencies validated successfully.") \ No newline at end of file diff --git a/tests/policy_repos/github_policy_repo.py b/tests/policy_repos/github_policy_repo.py index 5b7f44eaf..51ee0c35e 100644 --- a/tests/policy_repos/github_policy_repo.py +++ b/tests/policy_repos/github_policy_repo.py @@ -11,6 +11,7 @@ from testcontainers.core.utils import setup_logger from tests import utils +from tests.policy_repos.github_policy_settings import GithubPolicyRepoSettings from tests.policy_repos.policy_repo_base import PolicyRepoBase from tests.policy_repos.policy_repo_settings import PolicyRepoSettings @@ -18,68 +19,20 @@ class GithubPolicyRepo(PolicyRepoBase): def __init__( self, - settings: PolicyRepoSettings, + settings: GithubPolicyRepoSettings, logger: logging.Logger = setup_logger(__name__), ): self.logger = logger - self.load_from_env() - - self.protocol = "git" - self.host = "github.com" - self.port = 22 - self.temp_dir = settings.local_clone_path - self.ssh_key_name = "OPAL_PYTEST" - - self.owner = settings.owner if settings.owner else self.owner - self.password = settings.password - self.github_pat = settings.pat if settings.pat else self.github_pat - self.repo = settings.repo_name if settings.repo_name else self.repo - - self.source_repo_owner = ( - settings.source_repo_owner - if settings.source_repo_owner - else self.source_repo_owner - ) - self.source_repo_name = ( - settings.source_repo_name - if settings.source_repo_name - else self.source_repo_name - ) - - self.local_repo_path = os.path.join(self.temp_dir, self.source_repo_name) - self.ssh_key_path = ( - settings.ssh_key_path if settings.ssh_key_path else self.ssh_key_path - ) - self.should_fork = settings.should_fork - self.webhook_secret = ( - settings.webhook_secret if settings.webhook_secret else self.webhook_secret - ) - - if not self.password and not self.github_pat and not self.ssh_key_path: - self.logger.error("No password or Github PAT or SSH key provided.") - raise Exception("No authentication method provided.") + self.settings = settings self.load_ssh_key() - def load_from_env(self): - self.owner = os.getenv("OPAL_TARGET_ACCOUNT", None) - self.github_pat = os.getenv("OPAL_GITHUB_PAT", None) - self.ssh_key_path = os.getenv( - "OPAL_PYTEST_POLICY_REPO_SSH_KEY_PATH", "~/.ssh/id_rsa" - ) - self.repo = os.getenv("OPAL_TARGET_REPO_NAME", "opal-example-policy-repo") - self.source_repo_owner = os.getenv("OPAL_SOURCE_ACCOUNT", "permitio") - self.source_repo_name = os.getenv( - "OPAL_SOURCE_REPO_NAME", "opal-example-policy-repo" - ) - self.webhook_secret: str = os.getenv("OPAL_WEBHOOK_SECRET", "xxxxx") - def load_ssh_key(self): - if self.ssh_key_path.startswith("~"): - self.ssh_key_path = os.path.expanduser("~/.ssh/id_rsa") + if self.settings.ssh_key_path.startswith("~"): + self.settings.ssh_key_path = os.path.expanduser("~/.ssh/id_rsa") - if not os.path.exists(self.ssh_key_path): - self.logger.debug(f"SSH key file not found at {self.ssh_key_path}") + if not os.path.exists(self.settings.ssh_key_path): + self.logger.debug(f"SSH key file not found at {self.settings.ssh_key_path}") self.logger.debug("Generating new SSH key...") ssh_keys = utils.generate_ssh_key_pair() @@ -87,7 +40,7 @@ def load_ssh_key(self): self.private_key = ssh_keys["private"] try: - with open(self.ssh_key_path, "r") as ssh_key_file: + with open(self.settings.ssh_key_path, "r") as ssh_key_file: self.ssh_key = ssh_key_file.read().strip() os.environ["OPAL_POLICY_REPO_SSH_KEY"] = self.ssh_key @@ -108,37 +61,37 @@ def set_envvars(self): env_file.write(f'OPAL_POLICY_REPO_SSH_KEY="{self.ssh_key}"\n') def get_repo_url(self): - return self.build_repo_url(self.owner, self.repo) + return self.build_repo_url(self.settings.owner, self.settings.repo) def build_repo_url(self, owner, repo) -> str: if owner is None: raise Exception("Owner not set") - if self.protocol == "ssh" or self.protocol == "git": - return f"git@{self.host}:{owner}/{repo}.git" + if self.settings.protocol == "ssh" or self.settings.protocol == "git": + return f"git@{self.settings.host}:{owner}/{repo}.git" - if self.protocol == "http" or self.protocol == "https": - if self.github_pat: - return f"{self.protocol}://{self.host}/{owner}/{repo}.git" + if self.settings.protocol == "http" or self.settings.protocol == "https": + if self.settings.github_pat: + return f"{self.settings.protocol}://{self.settings.host}/{owner}/{repo}.git" - if self.password is None and self.github_pat is None and self.ssh_key is None: + if self.settings.password is None and self.settings.github_pat is None and self.ssh_key is None: raise Exception("No authentication method set") - return f"{self.protocol}://{self.owner}:{self.password}@{self.host}:{self.port}/{owner}/{repo}" + return f"{self.settings.protocol}://{self.settings.owner}:{self.settings.password}@{self.settings.host}:{self.settings.port}/{owner}/{repo}" def get_source_repo_url(self): - return self.build_repo_url(self.source_repo_owner, self.source_repo_name) + return self.build_repo_url(self.settings.source_repo_owner, self.settings.source_repo_name) def clone_initial_repo(self): - Repo.clone_from(self.get_source_repo_url(), self.local_repo_path) + Repo.clone_from(self.get_source_repo_url(), self.settings.local_repo_path) def check_repo_exists(self): try: - gh = Github(auth=Auth.Token(self.github_pat)) + gh = Github(auth=Auth.Token(self.settings.github_pat)) repo_list = gh.get_user().get_repos() for repo in repo_list: - if repo.full_name == self.repo: - self.logger.debug(f"Repository {self.repo} already exists.") + if repo.full_name == self.settings.repo: + self.logger.debug(f"Repository {self.settings.repo} already exists.") return True except Exception as e: @@ -151,9 +104,9 @@ def create_target_repo(self): return try: - gh = Github(auth=Auth.Token(self.github_pat)) - gh.get_user().create_repo(self.repo) - self.logger.info(f"Repository {self.repo} created successfully.") + gh = Github(auth=Auth.Token(self.settings.github_pat)) + gh.get_user().create_repo(self.settings.repo) + self.logger.info(f"Repository {self.settings.repo} created successfully.") except Exception as e: self.logger.error(f"Error creating repository: {e}") @@ -161,14 +114,14 @@ def fork_target_repo(self): if self.check_repo_exists(): return - self.logger.debug(f"Forking repository {self.source_repo_name}...") + self.logger.debug(f"Forking repository {self.settings.source_repo_name}...") - if self.github_pat is None: + if self.settings.github_pat is None: try: - gh = Github(auth=Auth.Token(self.github_pat)) - gh.get_user().create_fork(self.source_repo_owner, self.source_repo_name) + gh = Github(auth=Auth.Token(self.settings.github_pat)) + gh.get_user().create_fork(self.settings.source_repo_owner, self.settings.source_repo_name) self.logger.info( - f"Repository {self.source_repo_name} forked successfully." + f"Repository {self.settings.source_repo_name} forked successfully." ) except Exception as e: self.logger.error(f"Error forking repository: {e}") @@ -176,9 +129,9 @@ def fork_target_repo(self): # Try with PAT try: - headers = {"Authorization": f"token {self.github_pat}"} + headers = {"Authorization": f"token {self.settings.github_pat}"} response = requests.post( - f"https://api.github.com/repos/{self.source_repo_owner}/{self.source_repo_name}/forks", + f"https://api.github.com/repos/{self.settings.source_repo_owner}/{self.settings.source_repo_name}/forks", headers=headers, ) if response.status_code == 202: @@ -198,13 +151,13 @@ def delete_test_branches(self): repository.""" try: - self.logger.info(f"Deleting test branches from {self.repo}...") + self.logger.info(f"Deleting test branches from {self.settings.repo}...") # Initialize Github API - gh = Github(auth=Auth.Token(self.github_pat)) + gh = Github(auth=Auth.Token(self.settings.github_pat)) # Get the repository - repo = gh.get_user().get_repo(self.repo) + repo = gh.get_user().get_repo(self.settings.repo) # Enumerate branches and delete pytest- branches branches = repo.get_branches() @@ -231,7 +184,7 @@ def generate_test_branch(self): def create_test_branch(self): try: # Initialize the repository - repo = Repo(self.local_repo_path) + repo = Repo(self.settings.local_repo_path) # Ensure the repository is clean if repo.is_dirty(untracked_files=True): @@ -240,7 +193,7 @@ def create_test_branch(self): ) # Set the origin remote URL - remote_url = f"https://github.com/{self.owner}/{self.repo}.git" + remote_url = f"https://github.com/{self.settings.owner}/{self.settings.repo}.git" if "origin" in repo.remotes: origin = repo.remote(name="origin") origin.set_url(remote_url) # Update origin URL if it exists @@ -267,7 +220,7 @@ def create_test_branch(self): self.logger.error(f"An error occurred: {e}") def cleanup(self, delete_repo=True, delete_ssh_key=True): - subprocess.run(["rm", "-rf", "./opal-example-policy-repo"], check=True) + subprocess.run(["rm", "-rf", self.settings.local_repo_path], check=True) self.delete_test_branches() @@ -278,11 +231,11 @@ def cleanup(self, delete_repo=True, delete_ssh_key=True): self.delete_ssh_key() def delete_ssh_key(self): - gh = Github(auth=Auth.Token(self.github_pat)) + gh = Github(auth=Auth.Token(self.settings.github_pat)) user = gh.get_user() keys = user.get_keys() for key in keys: - if key.title == self.ssh_key_name: + if key.title == self.settings.ssh_key_name: key.delete() self.logger.debug(f"SSH key deleted: {key.title}") break @@ -293,17 +246,17 @@ def delete_ssh_key(self): def delete_repo(self): try: - gh = Github(auth=Auth.Token(self.github_pat)) - repo = gh.get_user().get_repo(self.repo) + gh = Github(auth=Auth.Token(self.settings.github_pat)) + repo = gh.get_user().get_repo(self.settings.repo) repo.delete() - self.logger.debug(f"Repository {self.repo} deleted successfully.") + self.logger.debug(f"Repository {self.settings.repo} deleted successfully.") except Exception as e: self.logger.error(f"Error deleting repository: {e}") def setup(self): self.clone_initial_repo() - if self.should_fork: + if self.settings.should_fork: self.fork_target_repo() else: self.create_target_repo() @@ -312,23 +265,23 @@ def setup(self): self.create_test_branch() def add_ssh_key(self): - gh = Github(auth=Auth.Token(self.github_pat)) + gh = Github(auth=Auth.Token(self.settings.github_pat)) user = gh.get_user() keys = user.get_keys() for key in keys: - if key.title == self.ssh_key_name: + if key.title == self.settings.ssh_key_name: return - key = user.create_key(self.ssh_key_name, self.ssh_key) + key = user.create_key(self.settings.ssh_key_name, self.ssh_key) self.logger.info(f"SSH key added: {key.title}") def create_webhook(self): try: - gh = Github(auth=Auth.Token(self.github_pat)) + gh = Github(auth=Auth.Token(self.settings.github_pat)) self.logger.info( - f"Creating webhook for repository {self.owner}/{self.repo}" + f"Creating webhook for repository {self.settings.owner}/{self.settings.repo}" ) - repo = gh.get_user().get_repo(f"{self.repo}") + repo = gh.get_user().get_repo(f"{self.settings.repo}") url = utils.create_localtunnel(self.webhook_port) self.logger.info(f"Webhook URL: {url}") self.github_webhook = repo.create_hook( @@ -348,8 +301,8 @@ def create_webhook(self): def delete_webhook(self): try: - gh = Github(auth=Auth.Token(self.github_pat)) - repo = gh.get_user().get_repo(f"{self.repo}") + gh = Github(auth=Auth.Token(self.settings.github_pat)) + repo = gh.get_user().get_repo(f"{self.settings.repo}") repo.delete_hook(self.github_webhook.id) self.logger.info("Webhook deleted successfully.") except Exception as e: @@ -365,31 +318,32 @@ def update_branch(self, file_name, file_content): file_content = codecs.decode(file_content, "unicode_escape") # Create or update the specified file with the provided content - file_path = os.path.join(self.local_repo_path, file_name) + file_path = os.path.join(self.settings.local_repo_path, file_name) with open(file_path, "w") as f: f.write(file_content) - if file_content is None: - with open(file_path, "r") as f: - file_content = f.read() + # if file_content is None: + # with open(file_path, "r") as f: + # file_content = f.read() try: # Stage the changes self.logger.debug(f"Staging changes for branch {self.test_branch}...") - gh = Github(auth=Auth.Token(self.github_pat)) - repo = gh.get_user().get_repo(self.repo) + gh = Github(auth=Auth.Token(self.settings.github_pat)) + repo = gh.get_user().get_repo(self.settings.repo) branch_ref = f"heads/{self.test_branch}" ref = repo.get_git_ref(branch_ref) latest_commit = repo.get_git_commit(ref.object.sha) - base_tree = latest_commit.commit.tree + base_tree = latest_commit.tree + from github.InputGitTreeElement import InputGitTreeElement new_tree = repo.create_git_tree( [ - { - "path": file_name, - "mode": "100644", - "type": "blob", - "content": file_content, - } + InputGitTreeElement( + path=file_name, + mode="100644", + type="blob", + content=file_content, + ) ], base_tree, ) diff --git a/tests/policy_repos/github_policy_settings.py b/tests/policy_repos/github_policy_settings.py new file mode 100644 index 000000000..7c7c108a3 --- /dev/null +++ b/tests/policy_repos/github_policy_settings.py @@ -0,0 +1,122 @@ +from testcontainers.core.utils import setup_logger +import os + +from tests.policy_repos.supported_policy_repo import SupportedPolicyRepo + +class GithubPolicyRepoSettings: + """ + GithubPolicyRepoSettings class. + + This class is used to store the settings for the GithubPolicyRepo. + It is initialized with the following parameters: + + Args: + temp_dir (str): Path of the temporary directory to clone the repo to. + local_clone_path (str, optional): The local path to clone the repo to. Defaults to None. + owner (str, optional): The owner of the repo. Defaults to None. + password (str, optional): The password to use for authentication. Defaults to None. + pat (str, optional): The personal access token to use for authentication. Defaults to None. + repo_name (str, optional): The name of the repo. Defaults to None. + source_repo_owner (str, optional): The owner of the source repo. Defaults to None. + source_repo_name (str, optional): The name of the source repo. Defaults to None. + ssh_key_path (str, optional): The path to the SSH key to use for authentication. Defaults to None. + should_fork (bool, optional): Whether to fork the repo. Defaults to False. + webhook_secret (str, optional): The secret to use for the webhook. Defaults to None. + """ + def __init__( + self, + temp_dir: str, + local_clone_path: str | None = None, + owner: str | None = None, + password: str | None = None, + pat: str | None = None, + repo_name: str | None = None, + source_repo_owner: str | None = None, + source_repo_name: str | None = None, + ssh_key_path: str | None = None, + should_fork: bool | None = False, + webhook_secret: str | None = None + ): + + """ + GithubPolicyRepoSettings initialization. + + This class is used to store the settings for the GithubPolicyRepo. + It is initialized with the following parameters: + + Args: + temp_dir (str): Path of the temporary directory to clone the repo to. + local_clone_path (str, optional): The local path to clone the repo to. Defaults to None. + owner (str, optional): The owner of the repo. Defaults to None. + password (str, optional): The password to use for authentication. Defaults to None. + pat (str, optional): The personal access token to use for authentication. Defaults to None. + repo_name (str, optional): The name of the repo. Defaults to None. + source_repo_owner (str, optional): The owner of the source repo. Defaults to None. + source_repo_name (str, optional): The name of the source repo. Defaults to None. + ssh_key_path (str, optional): The path to the SSH key to use for authentication. Defaults to None. + should_fork (bool, optional): Whether to fork the repo. Defaults to False. + webhook_secret (str, optional): The secret to use for the webhook. Defaults to None. + """ + + self.policy_repo_type: str = SupportedPolicyRepo.GITHUB + + # Set the logger + self.logger = setup_logger(__name__) + + # Load environment variables + self.load_from_env() + + # Set the protocol, host, and port for the repo + self.protocol = "git" + self.host = "github.com" + self.port = 22 + + # Set the temporary directory to clone the repo to + self.temp_dir = temp_dir + + # Set the name of the SSH key to use + self.ssh_key_name = "OPAL_PYTEST" + + # Set the owner and password of the repo + self.owner = owner if owner else self.owner + self.password = password + + # Set the personal access token to use for authentication + self.github_pat = pat if pat else self.github_pat + + # Set the name of the repo + self.repo = repo_name if repo_name else self.repo + + # Set the owner and name of the source repo + self.source_repo_owner = source_repo_owner if source_repo_owner else self.source_repo_owner + self.source_repo_name = source_repo_name if source_repo_name else self.source_repo_name + + # Set the local path to clone the repo to + self.local_repo_path = os.path.join(temp_dir, self.source_repo_name) + + # Set the path to the SSH key to use for authentication + self.ssh_key_path = ssh_key_path if ssh_key_path else self.ssh_key_path + + # Set whether to fork the repo + self.should_fork = should_fork + + # Set the secret to use for the webhook + self.webhook_secret = webhook_secret if webhook_secret else self.webhook_secret + + + # Validate the dependencies and load the SSH key + self.validate_dependencies() + + def load_from_env(self): + self.owner = os.getenv("OPAL_TARGET_ACCOUNT", None) + self.github_pat = os.getenv("OPAL_GITHUB_PAT", None) + self.ssh_key_path = os.getenv("OPAL_PYTEST_POLICY_REPO_SSH_KEY_PATH", "~/.ssh/id_rsa") + self.repo = os.getenv("OPAL_TARGET_REPO_NAME", "opal-example-policy-repo") + self.source_repo_owner = os.getenv("OPAL_SOURCE_ACCOUNT", "permitio") + self.source_repo_name = os.getenv("OPAL_SOURCE_REPO_NAME", "opal-example-policy-repo") + self.webhook_secret: str = os.getenv("OPAL_WEBHOOK_SECRET", "xxxxx") + + def validate_dependencies(self): + if not self.password and not self.github_pat and not self.ssh_key_path: + self.logger.error("No password or Github PAT or SSH key provided.") + raise Exception("No authentication method provided.") diff --git a/tests/policy_repos/policy_repo_factory.py b/tests/policy_repos/policy_repo_factory.py index e3fe2aa5e..b5a3f33f0 100644 --- a/tests/policy_repos/policy_repo_factory.py +++ b/tests/policy_repos/policy_repo_factory.py @@ -4,21 +4,13 @@ from testcontainers.core.utils import setup_logger +from tests.policy_repos.supported_policy_repo import SupportedPolicyRepo from tests.policy_repos.gitea_policy_repo import GiteaPolicyRepo from tests.policy_repos.github_policy_repo import GithubPolicyRepo from tests.policy_repos.gitlab_policy_repo import GitlabPolicyRepo from tests.policy_repos.policy_repo_base import PolicyRepoBase from tests.policy_repos.policy_repo_settings import PolicyRepoSettings - -class SupportedPolicyRepo(Enum): - GITEA = "Gitea" - GITHUB = "Github" - GITLAB = "Gitlab" - # BITBUCKET = "Bitbucket" - # AZURE_DEVOPS = "AzureDevOps" - - # Factory class to create a policy repository object based on the type of policy repository. class PolicyRepoFactory: def __init__(self, policy_repo: str = SupportedPolicyRepo.GITEA): @@ -31,15 +23,21 @@ def __init__(self, policy_repo: str = SupportedPolicyRepo.GITEA): def get_policy_repo( self, - settings: PolicyRepoSettings, + settings, logger: logging.Logger = setup_logger(__name__), - ) -> PolicyRepoBase: + ) -> GithubPolicyRepo | GiteaPolicyRepo: factory = { SupportedPolicyRepo.GITEA: GiteaPolicyRepo, SupportedPolicyRepo.GITHUB: GithubPolicyRepo, SupportedPolicyRepo.GITLAB: GitlabPolicyRepo, } + assert settings is not None, "Settings must be provided" + assert settings.policy_repo_type == self.policy_repo, ( + f"Settings policy_repo_type must be {self.policy_repo}, " + f"but got {settings.policy_repo_type}" + ) + return factory[SupportedPolicyRepo(self.policy_repo)](settings) def assert_exists(self, policy_repo: str) -> bool: diff --git a/tests/policy_repos/supported_policy_repo.py b/tests/policy_repos/supported_policy_repo.py new file mode 100644 index 000000000..1ad894130 --- /dev/null +++ b/tests/policy_repos/supported_policy_repo.py @@ -0,0 +1,9 @@ +from enum import Enum + + +class SupportedPolicyRepo(Enum): + GITEA = "Gitea" + GITHUB = "Github" + GITLAB = "Gitlab" + # BITBUCKET = "Bitbucket" + # AZURE_DEVOPS = "AzureDevOps" diff --git a/tests/settings.py b/tests/settings.py index 92609b585..9087f3224 100644 --- a/tests/settings.py +++ b/tests/settings.py @@ -102,9 +102,9 @@ def dump_settings(self): class PyTestSessionSettings(List): - repo_providers = ["gitea"] + repo_providers = [SupportedPolicyRepo.GITHUB]#, SupportedPolicyRepo.GITEA] modes = ["without_webhook"] - broadcasters = ["postgres", "redis"] + broadcasters = ["postgres"]#, "redis"] broadcaster = "fgsfdg" repo_provider = "fdgdfg" mode = "rgrtre" @@ -161,6 +161,7 @@ def __next__(self): "mode": self.mode, "is_final": (self.current_broadcaster >= len(self.broadcasters)), "is_first": is_first, + "github_pat":pytest_settings.github_pat, } print("Finished iterating over PyTestSessionSettings...") diff --git a/tests/test_app.py b/tests/test_app.py index c0c84b548..54b1aa525 100644 --- a/tests/test_app.py +++ b/tests/test_app.py @@ -13,6 +13,7 @@ from tests.containers.gitea_container import GiteaContainer from tests.containers.opal_client_container import OpalClientContainer, PermitContainer from tests.containers.opal_server_container import OpalServerContainer +from tests.policy_repos.policy_repo_base import PolicyRepoBase from tests.policy_repos.policy_repo_factory import SupportedPolicyRepo from tests.settings import PyTestSessionSettings, session_matrix @@ -81,14 +82,14 @@ async def data_publish_and_test( def update_policy( - gitea_container: GiteaContainer, + policy_repo: PolicyRepoBase, opal_server_container: OpalServerContainer, country_value, ): """Update the policy file dynamically.""" - gitea_container.update_branch( - opal_server_container.settings.policy_repo_main_branch, + policy_repo.update_branch( + # opal_server_container.settings.policy_repo_main_branch, "rbac.rego", ( "package app.rbac\n" @@ -171,7 +172,7 @@ def test_user_location( # @pytest.mark.parametrize("location", ["CN", "US", "SE"]) @pytest.mark.asyncio async def test_policy_and_data_updates( - gitea_server: GiteaContainer, + policy_repo, opal_servers: list[OpalServerContainer], connected_clients: list[OpalClientContainer], temp_dir, @@ -192,7 +193,7 @@ async def test_policy_and_data_updates( for location in locations: # Update policy to allow only non-US users logger.info(f"Updating policy to allow only users from {location[1]}...") - update_policy(gitea_server, server, location[1]) + update_policy(policy_repo, server, location[1]) for client in connected_clients: assert await data_publish_and_test( @@ -277,7 +278,7 @@ def test_read_statistics( @pytest.mark.asyncio async def test_policy_update( - gitea_server: GiteaContainer, + policy_repo, opal_servers: list[OpalServerContainer], connected_clients: list[OpalClientContainer], temp_dir, @@ -292,7 +293,7 @@ async def test_policy_update( for server in opal_servers: # Update policy to allow only non-US users logger.info(f"Updating policy to allow only users from {location}...") - update_policy(gitea_server, server, location) + update_policy(policy_repo, server, location) log_found = server.wait_for_log( "Found new commits: old HEAD was", 30, reference_timestamp From bef0d99a92068bab5b1b6955cc64ce74cf5ee449 Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Mon, 24 Mar 2025 21:03:37 -0400 Subject: [PATCH 10/41] feat: update repo_providers and broadcasters in PyTestSessionSettings for enhanced testing support --- tests/settings.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/settings.py b/tests/settings.py index 9087f3224..b7eec11cc 100644 --- a/tests/settings.py +++ b/tests/settings.py @@ -102,9 +102,9 @@ def dump_settings(self): class PyTestSessionSettings(List): - repo_providers = [SupportedPolicyRepo.GITHUB]#, SupportedPolicyRepo.GITEA] + repo_providers = [SupportedPolicyRepo.GITHUB, SupportedPolicyRepo.GITEA] modes = ["without_webhook"] - broadcasters = ["postgres"]#, "redis"] + broadcasters = ["postgres", "redis"] broadcaster = "fgsfdg" repo_provider = "fdgdfg" mode = "rgrtre" From 8f6f2ed4d73c61be068599e06b51f6ad988b017a Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Mon, 24 Mar 2025 21:48:06 -0400 Subject: [PATCH 11/41] feat: enhance temp_dir fixture to create a persistent temporary directory with proper permissions --- tests/conftest.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 7b1e09cee..5fff557e5 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -59,13 +59,20 @@ def temp_dir(): This fixture is useful for tests that need a temporary directory to exist for the duration of the test session. """ - dir_path = tempfile.mkdtemp(prefix="opal_tests_",suffix=".tmp",dir="/opal.tmp") + from pathlib import Path + + path = Path("~/opal.tmp").expanduser() + path.mkdir(parents=True, exist_ok=True) + os.chmod(path, 0o777) # Set permissions to allow read/write/execute for all users + + dir_path = tempfile.mkdtemp(prefix="opal_tests_",suffix=".tmp",dir=str(path)) os.chmod(dir_path, 0o777) # Set permissions to allow read/write/execute for all users logger.debug(f"Temporary directory created: {dir_path}") yield dir_path # Teardown: Clean up the temporary directory shutil.rmtree(dir_path) + shutil.rmtree(path) logger.debug(f"Temporary directory removed: {dir_path}") From 72fede2e49041d5cf11e277885f8183fe36fb4c9 Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Tue, 25 Mar 2025 00:39:37 -0400 Subject: [PATCH 12/41] feat: update GithubPolicyRepoSettings to utilize session matrix for dynamic configuration --- tests/fixtures/policy_repos.py | 25 +++++++++--- tests/policy_repos/github_policy_settings.py | 3 ++ tests/settings.py | 43 +++++++++++++------- 3 files changed, 51 insertions(+), 20 deletions(-) diff --git a/tests/fixtures/policy_repos.py b/tests/fixtures/policy_repos.py index ad101517b..159f5124f 100644 --- a/tests/fixtures/policy_repos.py +++ b/tests/fixtures/policy_repos.py @@ -64,18 +64,31 @@ def gitea_server(opal_network: Network): yield gitea_container def create_github_policy_repo_settings(temp_dir: str, session_matrix): - return GithubPolicyRepoSettings( + # logger.info("Creating GithubPolicyRepoSettings...") + # logger.info("\n\n\n\n") + # logger.info(session_matrix) + # logger.info("\n\n\n\n") + + # input() + + + return GithubPolicyRepoSettings( temp_dir = temp_dir, - source_repo_owner = "ariWeinberg", local_clone_path = temp_dir, + + source_repo_owner = "ariWeinberg", source_repo_name = "opal-tests-policy-repo", - webhook_secret = "Aa15317701", - ssh_key_path = "/home/ari/.ssh/github", - should_fork = False, repo_name = "opal-tests-policy-repo1", - password = "", owner = "ariWeinberg", + + ssh_key_path = session_matrix["ssh_key_path"], + pat = session_matrix["github_pat"], + + webhook_secret = session_matrix["webhook_secret"], + should_fork = False, + # should_fork = session_matrix["should_fork"], + password = session_matrix["repo_password"], ) def create_gitea_policy_repo_settings(temp_dir: str, session_matrix, gitea_settings: GiteaSettings): diff --git a/tests/policy_repos/github_policy_settings.py b/tests/policy_repos/github_policy_settings.py index 7c7c108a3..cf8ae21b7 100644 --- a/tests/policy_repos/github_policy_settings.py +++ b/tests/policy_repos/github_policy_settings.py @@ -103,6 +103,9 @@ def __init__( # Set the secret to use for the webhook self.webhook_secret = webhook_secret if webhook_secret else self.webhook_secret + # self.logger.info(f"\n\n\nPolicyRepoSettings: {self.__dict__}\n\n\n") + # input() + # Validate the dependencies and load the SSH key self.validate_dependencies() diff --git a/tests/settings.py b/tests/settings.py index b7eec11cc..626783358 100644 --- a/tests/settings.py +++ b/tests/settings.py @@ -61,33 +61,29 @@ def load_from_env(self): load_dotenv() - self.policy_repo_provider = os.getenv( - "OPAL_PYTEST_POLICY_REPO_PROVIDER", SupportedPolicyRepo.GITEA - ) + self.policy_repo_provider = os.getenv("OPAL_PYTEST_POLICY_REPO_PROVIDER", SupportedPolicyRepo.GITEA) self.repo_owner = os.getenv("OPAL_PYTEST_REPO_OWNER", "iwphonedo") self.repo_name = os.getenv("OPAL_PYTEST_REPO_NAME", "opal-example-policy-repo") self.repo_password = os.getenv("OPAL_PYTEST_REPO_PASSWORD") self.github_pat = os.getenv("OPAL_PYTEST_GITHUB_PAT") self.ssh_key_path = os.getenv("OPAL_PYTEST_SSH_KEY_PATH") self.source_repo_owner = os.getenv("OPAL_PYTEST_SOURCE_ACCOUNT", "permitio") - self.source_repo_name = os.getenv( - "OPAL_PYTEST_SOURCE_REPO", "opal-example-policy-repo" - ) + self.source_repo_name = os.getenv("OPAL_PYTEST_SOURCE_REPO", "opal-example-policy-repo") self.webhook_secret = os.getenv("OPAL_PYTEST_WEBHOOK_SECRET", "xxxxx") - self.should_fork = os.getenv("OPAL_PYTEST_SHOULD_FORK", "true") - self.use_webhook = os.getenv("OPAL_PYTEST_USE_WEBHOOK", "true") - self.wait_for_debugger = os.getenv("OPAL_PYTEST_WAIT_FOR_DEBUGGER", False) + self.should_fork = bool(os.getenv("OPAL_PYTEST_SHOULD_FORK", True)) + self.use_webhook = bool(os.getenv("OPAL_PYTEST_USE_WEBHOOK", True)) + self.wait_for_debugger = bool(os.getenv("OPAL_PYTEST_WAIT_FOR_DEBUGGER", False)) # This will fallback to the official permitio images of opal-server and opal-client, you could use it to fallback also opa and cedar - self.do_not_build_images = os.getenv("OPAL_PYTEST_DO_NOT_BUILD_IMAGES", False) + self.do_not_build_images = bool(os.getenv("OPAL_PYTEST_DO_NOT_BUILD_IMAGES", False)) # This will use the same image between test sessions. Otherwise, it will rebuild the images with every execution. # Don't use it if you changed the code, as your changes won't be deployed. # In order to use this flag, you should first set the keep_images flag to true, and for the following execution you will have the images. - self.skip_rebuild_images = os.getenv("OPAL_PYTEST_SKIP_REBUILD_IMAGES", False) + self.skip_rebuild_images = bool(os.getenv("OPAL_PYTEST_SKIP_REBUILD_IMAGES", False)) # This will keep the images after the test session. If you use it, you will be able to use skip_rebuild_images the next time. - self.keep_images = os.getenv("OPAL_PYTEST_KEEP_IMAGES", True) + self.keep_images = bool(os.getenv("OPAL_PYTEST_KEEP_IMAGES", True)) def dump_settings(self): with open(f"pytest_{self.session_id}.env", "w") as envfile: @@ -102,9 +98,9 @@ def dump_settings(self): class PyTestSessionSettings(List): - repo_providers = [SupportedPolicyRepo.GITHUB, SupportedPolicyRepo.GITEA] + repo_providers = [SupportedPolicyRepo.GITHUB]#, SupportedPolicyRepo.GITEA] modes = ["without_webhook"] - broadcasters = ["postgres", "redis"] + broadcasters = ["postgres"]#, "redis"] broadcaster = "fgsfdg" repo_provider = "fdgdfg" mode = "rgrtre" @@ -159,9 +155,28 @@ def __next__(self): "repo_provider": self.repo_provider, "broadcaster": self.broadcaster, "mode": self.mode, + + "is_final": (self.current_broadcaster >= len(self.broadcasters)), "is_first": is_first, + + "github_pat":pytest_settings.github_pat, + "repo_owner": pytest_settings.repo_owner, + "repo_name": pytest_settings.repo_name, + "repo_password": pytest_settings.repo_password, + "github_pat": pytest_settings.github_pat, + "ssh_key_path": pytest_settings.ssh_key_path, + "source_repo_owner": pytest_settings.source_repo_owner, + "source_repo_name": pytest_settings.source_repo_name, + "webhook_secret": pytest_settings.webhook_secret, + "should_fork": pytest_settings.should_fork, + "use_webhook": pytest_settings.use_webhook, + "wait_for_debugger": pytest_settings.wait_for_debugger, + "do_not_build_images": pytest_settings.do_not_build_images, + "skip_rebuild_images": pytest_settings.skip_rebuild_images, + "keep_images": pytest_settings.keep_images, + } print("Finished iterating over PyTestSessionSettings...") From a1c4bd373236999e6efe1d30e278f63eb2802829 Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Tue, 25 Mar 2025 00:42:44 -0400 Subject: [PATCH 13/41] feat: add .env.example for environment variable configuration in tests --- tests/.env.example | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 tests/.env.example diff --git a/tests/.env.example b/tests/.env.example new file mode 100644 index 000000000..1a31d8086 --- /dev/null +++ b/tests/.env.example @@ -0,0 +1,17 @@ +#!/usr/bin/env bash + +OPAL_PYTEST_REPO_OWNER="" +OPAL_PYTEST_REPO_NAME="" +OPAL_PYTEST_REPO_PASSWORD="" +OPAL_PYTEST_SSH_KEY_PATH="" +OPAL_PYTEST_SOURCE_ACCOUNT="" +OPAL_PYTEST_SOURCE_REPO="" +OPAL_PYTEST_WEBHOOK_SECRET="" +OPAL_PYTEST_SHOULD_FORK="" +OPAL_PYTEST_USE_WEBHOOK="" +OPAL_PYTEST_WAIT_FOR_DEBUGGER="" +OPAL_PYTEST_DO_NOT_BUILD_IMAGES="" +OPAL_PYTEST_SKIP_REBUILD_IMAGES="" +OPAL_PYTEST_KEEP_IMAGES="" +OPAL_PYTEST_GITHUB_PAT="" +OPAL_POLICY_REPO_SSH_KEY="" \ No newline at end of file From 3aeee0d39a3ba83ddc539c50525aa9d9254e8149 Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Wed, 26 Mar 2025 12:52:42 -0400 Subject: [PATCH 14/41] fix: update SSH key path handling to correctly expand user directory --- tests/policy_repos/github_policy_repo.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/policy_repos/github_policy_repo.py b/tests/policy_repos/github_policy_repo.py index 51ee0c35e..cec920f2a 100644 --- a/tests/policy_repos/github_policy_repo.py +++ b/tests/policy_repos/github_policy_repo.py @@ -29,7 +29,7 @@ def __init__( def load_ssh_key(self): if self.settings.ssh_key_path.startswith("~"): - self.settings.ssh_key_path = os.path.expanduser("~/.ssh/id_rsa") + self.settings.ssh_key_path = os.path.expanduser(self.settings.ssh_key_path) if not os.path.exists(self.settings.ssh_key_path): self.logger.debug(f"SSH key file not found at {self.settings.ssh_key_path}") From 46207fcde385d89d050fabe8eaad2229b6c2d5b1 Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Wed, 26 Mar 2025 13:10:21 -0400 Subject: [PATCH 15/41] feat: refactor create_github_policy_repo_settings to use session matrix for dynamic repo configuration --- tests/fixtures/policy_repos.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/fixtures/policy_repos.py b/tests/fixtures/policy_repos.py index 159f5124f..28f6e084a 100644 --- a/tests/fixtures/policy_repos.py +++ b/tests/fixtures/policy_repos.py @@ -76,10 +76,10 @@ def create_github_policy_repo_settings(temp_dir: str, session_matrix): temp_dir = temp_dir, local_clone_path = temp_dir, - source_repo_owner = "ariWeinberg", - source_repo_name = "opal-tests-policy-repo", - repo_name = "opal-tests-policy-repo1", - owner = "ariWeinberg", + source_repo_owner = session_matrix["source_repo_owner"], + source_repo_name = session_matrix["source_repo_name"], + repo_name = session_matrix["repo_name"], + owner = session_matrix["repo_owner"], ssh_key_path = session_matrix["ssh_key_path"], From 34679316c1fecf2fbbd419860954261305fcfc76 Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Wed, 26 Mar 2025 13:33:58 -0400 Subject: [PATCH 16/41] feat: add Gitea support to repo_providers and include Redis in broadcasters --- tests/settings.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/settings.py b/tests/settings.py index 626783358..c26fd1e53 100644 --- a/tests/settings.py +++ b/tests/settings.py @@ -98,9 +98,9 @@ def dump_settings(self): class PyTestSessionSettings(List): - repo_providers = [SupportedPolicyRepo.GITHUB]#, SupportedPolicyRepo.GITEA] + repo_providers = [SupportedPolicyRepo.GITHUB, SupportedPolicyRepo.GITEA] modes = ["without_webhook"] - broadcasters = ["postgres"]#, "redis"] + broadcasters = ["postgres", "redis"] broadcaster = "fgsfdg" repo_provider = "fdgdfg" mode = "rgrtre" From a616465c74ee7f8f42af7736ca089b0830fcb249 Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Wed, 26 Mar 2025 22:23:07 -0400 Subject: [PATCH 17/41] feat: update test workflow and refactor OPAL server/client configurations to use session matrix --- .github/workflows/tests.yml | 4 +-- tests/conftest.py | 58 ++++--------------------------------- tests/settings.py | 3 ++ tests/test_app.py | 14 ++++----- 4 files changed, 18 insertions(+), 61 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 92767b793..950ef41d2 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -5,9 +5,9 @@ name: Tests on: push: - branches: [ master ] + branches: [ e2e-add-kafka-fix ] pull_request: - branches: [ master ] + branches: [ e2e-add-kafka-fix ] # Allows you to run this workflow manually from the Actions tab workflow_dispatch: diff --git a/tests/conftest.py b/tests/conftest.py index 5fff557e5..90a5c7402 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -97,20 +97,6 @@ def opal_network(): else: logger.error(f"Failed to remove network got exception\n{e}") -@pytest.fixture(scope="session") -def number_of_opal_servers(): - """The number of OPAL servers to start. - - This fixture is used to determine how many OPAL servers to start for - the tests. The default value is 2, but it can be overridden by setting - the environment variable OPAL_TESTS_NUMBER_OF_OPAL_SERVERS. - - Returns: - int: The number of OPAL servers to start. - """ - return 2 - - from tests.fixtures.broadcasters import ( broadcast_channel, kafka_broadcast_channel, @@ -125,9 +111,7 @@ def number_of_opal_servers(): def opal_servers( opal_network: Network, policy_repo, - number_of_opal_servers: int, opal_server_image: str, - topics: dict[str, int], broadcast_channel: BroadcastContainerBase, # kafka_broadcast_channel: KafkaContainer, # redis_broadcast_channel: RedisBroadcastContainer, @@ -167,7 +151,7 @@ def opal_servers( containers = [] # List to store container instances - for i in range(number_of_opal_servers): + for i in range(session_matrix["number_of_opal_servers"]): container_name = f"opal_server_{i+1}" repo_url = policy_repo.get_repo_url() @@ -180,7 +164,7 @@ def opal_servers( policy_repo_url=repo_url, image=opal_server_image, log_level="DEBUG", - data_topics=" ".join(topics.keys()), + data_topics=" ".join(session_matrix["topics"].keys()), polling_interval=3, policy_repo_main_branch=policy_repo.test_branch, ), @@ -209,18 +193,6 @@ def opal_servers( for container in containers: container.stop() - -@pytest.fixture(scope="session") -def number_of_opal_clients(): - """The number of OPAL clients to start. - - This fixture is used to determine how many OPAL clients to start for - the tests. The default value is 2, but it can be overridden by - setting the environment variable OPAL_TESTS_NUMBER_OF_OPAL_CLIENTS. - """ - return 2 - - @pytest.fixture(scope="session") def connected_clients(opal_clients: List[OpalClientContainer]): """A fixture that waits for all OPAL clients to connect to the PubSub @@ -264,7 +236,7 @@ def opal_clients( # opa_server: OpaContainer, # cedar_server: CedarContainer, request, - number_of_opal_clients: int, + session_matrix, opal_client_with_opa_image, ): """A fixture that starts and manages multiple OPAL client containers. @@ -303,7 +275,7 @@ def opal_clients( containers = [] # List to store OpalClientContainer instances - for i in range(number_of_opal_clients): + for i in range(session_matrix["number_of_opal_clients"]): container_name = f"opal_client_{i+1}" # Unique name for each client client_token = opal_servers[0].obtain_OPAL_tokens(container_name)["client"] @@ -355,26 +327,8 @@ def opal_clients( logger.error(f"Failed to stop containers: {container}") pass - @pytest.fixture(scope="session") -def topics(): - """A fixture that returns a dictionary mapping topic names to the number of - OpalClientContainer instances that should subscribe to each topic. - - Returns - ------- - dict - A dictionary mapping topic names to the number of OpalClientContainer - instances that should subscribe to each topic. - """ - topics = {"topic_1": 1, "topic_2": 1} - return topics - - -@pytest.fixture(scope="session") -def topiced_clients( - topics, opal_network: Network, opal_servers: list[OpalServerContainer] -): +def topiced_clients(opal_network: Network, opal_servers: list[OpalServerContainer], session_matrix): """Fixture that starts and manages multiple OPAL client containers, each subscribing to a different topic. @@ -427,7 +381,7 @@ def topiced_clients( } ) - for topic, number_of_clients in topics.items(): + for topic, number_of_clients in session_matrix["topics"].items(): for i in range(number_of_clients): container_name = f"opal_client_{topic}_{i+1}" # Unique name for each client diff --git a/tests/settings.py b/tests/settings.py index c26fd1e53..9eab128de 100644 --- a/tests/settings.py +++ b/tests/settings.py @@ -177,6 +177,9 @@ def __next__(self): "skip_rebuild_images": pytest_settings.skip_rebuild_images, "keep_images": pytest_settings.keep_images, + "number_of_opal_servers": 2, + "number_of_opal_clients": 2, + "topics": {"topic_1": 1, "topic_2": 1}, } print("Finished iterating over PyTestSessionSettings...") diff --git a/tests/test_app.py b/tests/test_app.py index 54b1aa525..5614aa39f 100644 --- a/tests/test_app.py +++ b/tests/test_app.py @@ -207,18 +207,18 @@ async def test_policy_and_data_updates( @pytest.mark.parametrize("attempts", [10]) # Number of attempts to repeat the check -def test_read_statistics( - attempts, - opal_servers: list[OpalServerContainer], - number_of_opal_servers: int, - number_of_opal_clients: int, -): +def test_read_statistics(attempts, opal_servers: list[OpalServerContainer], session_matrix,): """Tests the statistics feature by verifying the number of clients and servers.""" logger.info("- Testing statistics feature") - time.sleep(15) + + + number_of_opal_servers = session_matrix["number_of_opal_servers"] + number_of_opal_clients = session_matrix["number_of_opal_clients"] + + time.sleep(5) for server in opal_servers: logger.info(f"OPAL Server: {server.settings.container_name}:7002") From 751305870ad4a60738d3d2f18cdf012d98ad20b6 Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Wed, 26 Mar 2025 22:33:37 -0400 Subject: [PATCH 18/41] feat: update test workflows to trigger on master branch and add pytest testing workflow --- .github/workflows/pytest-tests.yml | 22 ++++++++++++++++++++++ .github/workflows/tests.yml | 4 ++-- 2 files changed, 24 insertions(+), 2 deletions(-) create mode 100644 .github/workflows/pytest-tests.yml diff --git a/.github/workflows/pytest-tests.yml b/.github/workflows/pytest-tests.yml new file mode 100644 index 000000000..32189bc4c --- /dev/null +++ b/.github/workflows/pytest-tests.yml @@ -0,0 +1,22 @@ +name: Pytest Tests + +on: + push: + branches: + - e2e-add-kafka-fix + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Install Python + uses: actions/setup-python@v2 + with: + python-version: "3.9" + - name: Install dependencies + run: pip install -r requirements.txt && pip install -r ./tests/requirements.txt + - name: Run tests + working-directory: ./tests + run: ./run.sh + diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 950ef41d2..92767b793 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -5,9 +5,9 @@ name: Tests on: push: - branches: [ e2e-add-kafka-fix ] + branches: [ master ] pull_request: - branches: [ e2e-add-kafka-fix ] + branches: [ master ] # Allows you to run this workflow manually from the Actions tab workflow_dispatch: From fe84864d0565bdbea9303956e5dfb04758f3f632 Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Wed, 26 Mar 2025 22:39:38 -0400 Subject: [PATCH 19/41] feat: update Python version in pytest workflow to 3.10.12 --- .github/workflows/pytest-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pytest-tests.yml b/.github/workflows/pytest-tests.yml index 32189bc4c..030856a17 100644 --- a/.github/workflows/pytest-tests.yml +++ b/.github/workflows/pytest-tests.yml @@ -13,7 +13,7 @@ jobs: - name: Install Python uses: actions/setup-python@v2 with: - python-version: "3.9" + python-version: "3.10.12" - name: Install dependencies run: pip install -r requirements.txt && pip install -r ./tests/requirements.txt - name: Run tests From b29e7d2c51e207cb42ad3738c2b254c0099dabca Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Wed, 26 Mar 2025 22:50:17 -0400 Subject: [PATCH 20/41] fix: correct sleep duration in GitHub Actions to 30 seconds --- tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 90a5c7402..09eec9ece 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -429,7 +429,7 @@ def wait_sometime(): if os.getenv("GITHUB_ACTIONS") == "true": logger.info("Running inside GitHub Actions. Sleeping for 30 seconds...") - time.sleep(3600) # Sleep for 30 seconds + time.sleep(30) # Sleep for 30 seconds else: logger.info("Running on the local machine. Press Enter to continue...") input() # Wait for key press From 430f07dfaccbb049df304d289e8bcb074e4e3db3 Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Wed, 26 Mar 2025 23:09:41 -0400 Subject: [PATCH 21/41] fix: exclude ddtrace plugins from pytest execution in run.sh --- tests/run.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/run.sh b/tests/run.sh index e41371dfc..46db4bf4c 100755 --- a/tests/run.sh +++ b/tests/run.sh @@ -39,7 +39,7 @@ function main { python3 -Xfrozen_modules=off -m debugpy --listen 5678 -m pytest -s "$@" else echo "Running all tests..." - python3 -Xfrozen_modules=off -m debugpy --listen 5678 -m pytest -s + python3 -Xfrozen_modules=off -m debugpy --listen 5678 -m pytest -s -p no:ddtrace -p no:ddtrace.pytest_bdd -p no:ddtrace.pytest_benchmark fi echo "Done!" From 550784760fa095beb5a6d83c5ba0bf2a7217f10c Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Wed, 26 Mar 2025 23:10:36 -0400 Subject: [PATCH 22/41] feat: add environment specification for OPAL_pytest in pytest workflow --- .github/workflows/pytest-tests.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/pytest-tests.yml b/.github/workflows/pytest-tests.yml index 030856a17..f5fee6f5c 100644 --- a/.github/workflows/pytest-tests.yml +++ b/.github/workflows/pytest-tests.yml @@ -8,6 +8,7 @@ on: jobs: build: runs-on: ubuntu-latest + environment: OPAL_pytest steps: - uses: actions/checkout@v2 - name: Install Python From eaa91d7b2b37496cdfe0a871f56f0907891fe076 Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Wed, 26 Mar 2025 23:17:05 -0400 Subject: [PATCH 23/41] refactor: remove unused return statement in create_gitea_policy_repo_settings function --- tests/fixtures/policy_repos.py | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/tests/fixtures/policy_repos.py b/tests/fixtures/policy_repos.py index 28f6e084a..16e6c23a6 100644 --- a/tests/fixtures/policy_repos.py +++ b/tests/fixtures/policy_repos.py @@ -128,24 +128,6 @@ def create_gitea_policy_repo_settings(temp_dir: str, session_matrix, gitea_setti webhook_secret = pytest_settings.webhook_secret ) - return GiteaPolicyRepoSettings( - temp_dir, - pytest_settings.repo_owner, - pytest_settings.repo_name, - "master", - gitea_settings.container_name, - gitea_settings.port_http, - gitea_settings.port_ssh, - pytest_settings.repo_password, - None, - pytest_settings.ssh_key_path, - pytest_settings.source_repo_owner, - pytest_settings.source_repo_name, - True, - True, - pytest_settings.webhook_secret, - ) - # @pytest.fixture(scope="session") def policy_repo_settings(temp_dir: str, session_matrix, opal_network, policy_repo_type = SupportedPolicyRepo.GITHUB): From 5287be896f1dc9a813cce0f46d0f1817dc692f79 Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Wed, 26 Mar 2025 23:49:44 -0400 Subject: [PATCH 24/41] feat: enhance wait_sometime function to provide countdown during sleep in GitHub Actions --- tests/conftest.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 09eec9ece..6c3069f63 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -429,7 +429,9 @@ def wait_sometime(): if os.getenv("GITHUB_ACTIONS") == "true": logger.info("Running inside GitHub Actions. Sleeping for 30 seconds...") - time.sleep(30) # Sleep for 30 seconds + for secconds_ellapsed in range(30): + time.sleep(1) + print(f"Sleeping for \033[91m{29 - secconds_ellapsed}\033[0m seconds... \r",end="\r" if secconds_ellapsed < 29 else "\n") else: logger.info("Running on the local machine. Press Enter to continue...") input() # Wait for key press From a66d1e4eea539088e271baf7a672c9046318ef42 Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Wed, 26 Mar 2025 23:52:10 -0400 Subject: [PATCH 25/41] refactor: return SSH keys as a dictionary in generate_ssh_key_pair function --- tests/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/utils.py b/tests/utils.py index d58d24c37..a7b782f2a 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -142,7 +142,7 @@ def generate_ssh_key_pair(): ) # Return the keys as strings - return private_key_pem.decode("utf-8"), public_key_openssh.decode("utf-8") + return {"private": private_key_pem.decode("utf-8"), "public": public_key_openssh.decode("utf-8")} async def opal_authorize(user: str, policy_url: str): From e940dea19292988ece3f431b8a96911ab557afdc Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Wed, 26 Mar 2025 23:54:06 -0400 Subject: [PATCH 26/41] fix: add return statement in GithubPolicyRepo to ensure proper flow after generating SSH keys --- tests/policy_repos/github_policy_repo.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/policy_repos/github_policy_repo.py b/tests/policy_repos/github_policy_repo.py index cec920f2a..33eb06223 100644 --- a/tests/policy_repos/github_policy_repo.py +++ b/tests/policy_repos/github_policy_repo.py @@ -38,6 +38,7 @@ def load_ssh_key(self): ssh_keys = utils.generate_ssh_key_pair() self.ssh_key = ssh_keys["public"] self.private_key = ssh_keys["private"] + return try: with open(self.settings.ssh_key_path, "r") as ssh_key_file: From ea92dd6c1f2ae096cd0a7a316832c9dfba4f40cf Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Thu, 27 Mar 2025 02:15:27 -0400 Subject: [PATCH 27/41] feat: add SSH key support for OPAL policy repository in test settings --- tests/conftest.py | 1 + tests/containers/settings/opal_server_settings.py | 1 + tests/fixtures/policy_repos.py | 3 +++ tests/policy_repos/github_policy_repo.py | 6 ++++++ tests/policy_repos/github_policy_settings.py | 11 +++++++---- tests/settings.py | 7 +++++++ 6 files changed, 25 insertions(+), 4 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 6c3069f63..fdc81c523 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -167,6 +167,7 @@ def opal_servers( data_topics=" ".join(session_matrix["topics"].keys()), polling_interval=3, policy_repo_main_branch=policy_repo.test_branch, + POLICY_REPO_SSH_KEY=session_matrix["opal_policy_repo_ssh_key_private"], ), network=opal_network, ) diff --git a/tests/containers/settings/opal_server_settings.py b/tests/containers/settings/opal_server_settings.py index 352e8284d..065ca8998 100644 --- a/tests/containers/settings/opal_server_settings.py +++ b/tests/containers/settings/opal_server_settings.py @@ -181,6 +181,7 @@ def getEnvVars(self): "OPAL_POLICY_REPO_MAIN_BRANCH": self.policy_repo_main_branch, "OPAL_POLICY_REPO_POLLING_INTERVAL": self.polling_interval, "OPAL_AUTH_PRIVATE_KEY": self.private_key, + "OPAL_AUTH_PRIVATE_KEY_FORMAT": "ssh", "OPAL_AUTH_PUBLIC_KEY": self.public_key, "OPAL_AUTH_MASTER_TOKEN": self.master_token, "OPAL_DATA_CONFIG_SOURCES": f"""{{"config":{{"entries":[{{"url":"http://{self.container_name}:7002/policy-data","topics":["{self.data_topics}"],"dst_path":"/static"}}]}}}}""", diff --git a/tests/fixtures/policy_repos.py b/tests/fixtures/policy_repos.py index 16e6c23a6..eb27f6278 100644 --- a/tests/fixtures/policy_repos.py +++ b/tests/fixtures/policy_repos.py @@ -89,6 +89,9 @@ def create_github_policy_repo_settings(temp_dir: str, session_matrix): should_fork = False, # should_fork = session_matrix["should_fork"], password = session_matrix["repo_password"], + + opal_policy_repo_ssh_key_public = session_matrix["opal_policy_repo_ssh_key_public"], + opal_policy_repo_ssh_key_private = session_matrix["opal_policy_repo_ssh_key_private"], ) def create_gitea_policy_repo_settings(temp_dir: str, session_matrix, gitea_settings: GiteaSettings): diff --git a/tests/policy_repos/github_policy_repo.py b/tests/policy_repos/github_policy_repo.py index 33eb06223..95c041d62 100644 --- a/tests/policy_repos/github_policy_repo.py +++ b/tests/policy_repos/github_policy_repo.py @@ -28,6 +28,12 @@ def __init__( self.load_ssh_key() def load_ssh_key(self): + + if self.settings.opal_policy_repo_ssh_key_public and self.settings.opal_policy_repo_ssh_key_private: + self.ssh_key = self.settings.opal_policy_repo_ssh_key_public + self.private_key = self.settings.opal_policy_repo_ssh_key_private + return + if self.settings.ssh_key_path.startswith("~"): self.settings.ssh_key_path = os.path.expanduser(self.settings.ssh_key_path) diff --git a/tests/policy_repos/github_policy_settings.py b/tests/policy_repos/github_policy_settings.py index cf8ae21b7..90ea102cb 100644 --- a/tests/policy_repos/github_policy_settings.py +++ b/tests/policy_repos/github_policy_settings.py @@ -35,7 +35,9 @@ def __init__( source_repo_name: str | None = None, ssh_key_path: str | None = None, should_fork: bool | None = False, - webhook_secret: str | None = None + webhook_secret: str | None = None, + opal_policy_repo_ssh_key_public: str | None = None, + opal_policy_repo_ssh_key_private: str | None = None, ): """ @@ -103,8 +105,9 @@ def __init__( # Set the secret to use for the webhook self.webhook_secret = webhook_secret if webhook_secret else self.webhook_secret - # self.logger.info(f"\n\n\nPolicyRepoSettings: {self.__dict__}\n\n\n") - # input() + # Set the public and private SSH keys + self.opal_policy_repo_ssh_key_public = opal_policy_repo_ssh_key_public + self.opal_policy_repo_ssh_key_private = opal_policy_repo_ssh_key_private # Validate the dependencies and load the SSH key @@ -120,6 +123,6 @@ def load_from_env(self): self.webhook_secret: str = os.getenv("OPAL_WEBHOOK_SECRET", "xxxxx") def validate_dependencies(self): - if not self.password and not self.github_pat and not self.ssh_key_path: + if not self.password and not self.github_pat and not self.ssh_key_path and not self.opal_policy_repo_ssh_key_private: self.logger.error("No password or Github PAT or SSH key provided.") raise Exception("No authentication method provided.") diff --git a/tests/settings.py b/tests/settings.py index 9eab128de..d935fa59a 100644 --- a/tests/settings.py +++ b/tests/settings.py @@ -74,6 +74,10 @@ def load_from_env(self): self.use_webhook = bool(os.getenv("OPAL_PYTEST_USE_WEBHOOK", True)) self.wait_for_debugger = bool(os.getenv("OPAL_PYTEST_WAIT_FOR_DEBUGGER", False)) + + self.opal_policy_repo_ssh_key_private = os.getenv("OPAL_PYTEST_POLICY_REPO_SSH_PRIVATE_KEY", None) + self.opal_policy_repo_ssh_key_public = os.getenv("OPAL_PYTEST_POLICY_REPO_SSH_PUBLIC_KEY", None) + # This will fallback to the official permitio images of opal-server and opal-client, you could use it to fallback also opa and cedar self.do_not_build_images = bool(os.getenv("OPAL_PYTEST_DO_NOT_BUILD_IMAGES", False)) @@ -177,6 +181,9 @@ def __next__(self): "skip_rebuild_images": pytest_settings.skip_rebuild_images, "keep_images": pytest_settings.keep_images, + "opal_policy_repo_ssh_key_private": pytest_settings.opal_policy_repo_ssh_key_private, + "opal_policy_repo_ssh_key_public": pytest_settings.opal_policy_repo_ssh_key_public, + "number_of_opal_servers": 2, "number_of_opal_clients": 2, "topics": {"topic_1": 1, "topic_2": 1}, From 3e2bb9bf1d7d08c979db5f6487a2e9b1e37f5ae9 Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Thu, 27 Mar 2025 23:24:21 -0400 Subject: [PATCH 28/41] chore: update pytest logging level to DEBUG for enhanced output --- tests/pytest.ini | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/pytest.ini b/tests/pytest.ini index 87ffbfda9..7ff4a738c 100644 --- a/tests/pytest.ini +++ b/tests/pytest.ini @@ -1,8 +1,8 @@ [pytest] asyncio_default_fixture_loop_scope = function log_cli = true -log_level = INFO -log_cli_level = INFO +log_level = DEBUG +log_cli_level = DEBUG log_file = pytest_logs.log log_file_level = DEBUG pythonpath = fixtures From 50887971b36804e9e176c155a82dea5a1224b251 Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Sat, 29 Mar 2025 21:41:18 -0400 Subject: [PATCH 29/41] feat: update source_repo_owner environment variable to 'ariWeinberg' in multiple settings files and add logging for test session initialization --- tests/conftest.py | 11 +++++++++++ tests/policy_repos/gitea_policy_repo_settings.py | 4 ++-- tests/policy_repos/github_policy_settings.py | 2 +- tests/settings.py | 2 +- 4 files changed, 15 insertions(+), 4 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index fdc81c523..b3ad36360 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -460,6 +460,17 @@ def setup(opal_clients, policy_repo, session_matrix): ------ None """ + + + logger.info("\n\nInitializing test session...\n\n") + + for key, val in session_matrix.items(): + logger.info(f"{key}: {val}") + + logger.info("\n\nLoading environment variables...\n\n") + for key, val in os.environ.items(): + logger.debug(f"{key}: {val}") + yield policy_repo.cleanup(delete_ssh_key=False) if session_matrix["is_final"]: diff --git a/tests/policy_repos/gitea_policy_repo_settings.py b/tests/policy_repos/gitea_policy_repo_settings.py index 515f26342..eb6dbf42c 100644 --- a/tests/policy_repos/gitea_policy_repo_settings.py +++ b/tests/policy_repos/gitea_policy_repo_settings.py @@ -116,7 +116,7 @@ def load_from_env(self): self.github_pat = os.getenv("OPAL_GITHUB_PAT", None) self.ssh_key_path = os.getenv("OPAL_PYTEST_POLICY_REPO_SSH_KEY_PATH", "~/.ssh/id_rsa") self.repo = os.getenv("OPAL_TARGET_REPO_NAME", "opal-example-policy-repo") - self.source_repo_owner = os.getenv("OPAL_SOURCE_ACCOUNT", "permitio") + self.source_repo_owner = os.getenv("OPAL_SOURCE_ACCOUNT", "ariWeinberg") self.source_repo_name = os.getenv("OPAL_SOURCE_REPO_NAME", "opal-example-policy-repo") self.webhook_secret: str = os.getenv("OPAL_WEBHOOK_SECRET", "xxxxx") @@ -137,7 +137,7 @@ def load_from_env(self): self.local_clone_path = os.getenv("OPAL_GITEA_LOCAL_CLONE_PATH", None) - self.source_repo_owner = os.getenv("OPAL_SOURCE_ACCOUNT", "permitio") + self.source_repo_owner = os.getenv("OPAL_SOURCE_ACCOUNT", "ariWeinberg") self.source_repo_name = os.getenv("OPAL_SOURCE_REPO_NAME", "opal-example-policy-repo") def validate_dependencies(self): diff --git a/tests/policy_repos/github_policy_settings.py b/tests/policy_repos/github_policy_settings.py index 90ea102cb..70e9f384d 100644 --- a/tests/policy_repos/github_policy_settings.py +++ b/tests/policy_repos/github_policy_settings.py @@ -118,7 +118,7 @@ def load_from_env(self): self.github_pat = os.getenv("OPAL_GITHUB_PAT", None) self.ssh_key_path = os.getenv("OPAL_PYTEST_POLICY_REPO_SSH_KEY_PATH", "~/.ssh/id_rsa") self.repo = os.getenv("OPAL_TARGET_REPO_NAME", "opal-example-policy-repo") - self.source_repo_owner = os.getenv("OPAL_SOURCE_ACCOUNT", "permitio") + self.source_repo_owner = os.getenv("OPAL_SOURCE_ACCOUNT", "ariWeinberg") self.source_repo_name = os.getenv("OPAL_SOURCE_REPO_NAME", "opal-example-policy-repo") self.webhook_secret: str = os.getenv("OPAL_WEBHOOK_SECRET", "xxxxx") diff --git a/tests/settings.py b/tests/settings.py index d935fa59a..7023311d0 100644 --- a/tests/settings.py +++ b/tests/settings.py @@ -67,7 +67,7 @@ def load_from_env(self): self.repo_password = os.getenv("OPAL_PYTEST_REPO_PASSWORD") self.github_pat = os.getenv("OPAL_PYTEST_GITHUB_PAT") self.ssh_key_path = os.getenv("OPAL_PYTEST_SSH_KEY_PATH") - self.source_repo_owner = os.getenv("OPAL_PYTEST_SOURCE_ACCOUNT", "permitio") + self.source_repo_owner = os.getenv("OPAL_PYTEST_SOURCE_ACCOUNT", "ariWeinberg") self.source_repo_name = os.getenv("OPAL_PYTEST_SOURCE_REPO", "opal-example-policy-repo") self.webhook_secret = os.getenv("OPAL_PYTEST_WEBHOOK_SECRET", "xxxxx") self.should_fork = bool(os.getenv("OPAL_PYTEST_SHOULD_FORK", True)) From 642a599a81cb96ddd8b2834421b96f74519356cc Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Sat, 29 Mar 2025 22:10:58 -0400 Subject: [PATCH 30/41] feat: update setup fixture to load environment variables and add a dummy test for pytest execution --- tests/conftest.py | 6 +++++- tests/test_app.py | 7 +++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index b3ad36360..77ea1ee76 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -439,7 +439,7 @@ def wait_sometime(): @pytest.fixture(scope="session", autouse=True) -def setup(opal_clients, policy_repo, session_matrix): +def setup(session_matrix, policy_repo): """A setup fixture that is run once per test session. This fixture is automatically used by all tests, and is used to set up the @@ -468,6 +468,10 @@ def setup(opal_clients, policy_repo, session_matrix): logger.info(f"{key}: {val}") logger.info("\n\nLoading environment variables...\n\n") + import dotenv + + dotenv.load_dotenv() + for key, val in os.environ.items(): logger.debug(f"{key}: {val}") diff --git a/tests/test_app.py b/tests/test_app.py index 5614aa39f..afdbc7226 100644 --- a/tests/test_app.py +++ b/tests/test_app.py @@ -311,3 +311,10 @@ async def test_policy_update( assert ( log_found ), f"Expected log entry not found in client '{client.settings.container_name}' after the reference timestamp." + + + +def test_dummy_test(setup): + """Dummy test to ensure pytest runs.""" + print("Dummy test running...") + return \ No newline at end of file From d0ca91faf80fdeea22157a6a3fe0bc96ffe8dc7a Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Sat, 29 Mar 2025 22:14:54 -0400 Subject: [PATCH 31/41] feat: update setup fixture to include opal_clients and remove dummy test --- tests/conftest.py | 12 ++---------- tests/test_app.py | 7 ------- 2 files changed, 2 insertions(+), 17 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 77ea1ee76..feebdc521 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -439,7 +439,7 @@ def wait_sometime(): @pytest.fixture(scope="session", autouse=True) -def setup(session_matrix, policy_repo): +def setup(opal_clients, policy_repo, session_matrix): """A setup fixture that is run once per test session. This fixture is automatically used by all tests, and is used to set up the @@ -462,19 +462,11 @@ def setup(session_matrix, policy_repo): """ - logger.info("\n\nInitializing test session...\n\n") + logger.info("Initializing test session...") for key, val in session_matrix.items(): logger.info(f"{key}: {val}") - logger.info("\n\nLoading environment variables...\n\n") - import dotenv - - dotenv.load_dotenv() - - for key, val in os.environ.items(): - logger.debug(f"{key}: {val}") - yield policy_repo.cleanup(delete_ssh_key=False) if session_matrix["is_final"]: diff --git a/tests/test_app.py b/tests/test_app.py index afdbc7226..5614aa39f 100644 --- a/tests/test_app.py +++ b/tests/test_app.py @@ -311,10 +311,3 @@ async def test_policy_update( assert ( log_found ), f"Expected log entry not found in client '{client.settings.container_name}' after the reference timestamp." - - - -def test_dummy_test(setup): - """Dummy test to ensure pytest runs.""" - print("Dummy test running...") - return \ No newline at end of file From 013f00ba4c3ad363eba0500c63dbe01c640dfd6d Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Sun, 30 Mar 2025 11:04:11 -0400 Subject: [PATCH 32/41] feat: add str2bool utility function for string to boolean conversion --- tests/utils.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/utils.py b/tests/utils.py index a7b782f2a..aff7b25a7 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -504,3 +504,6 @@ def global_exception_handler(exc_type, exc_value, exc_traceback): # Set the global exception handler sys.excepthook = global_exception_handler + +def str2bool(val: str) -> bool: + return str(val).lower() in ("true", "1", "yes") \ No newline at end of file From 66f2a4a5d20f78d6bcf42d4b81e1c4ff140d965f Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Sun, 30 Mar 2025 11:12:51 -0400 Subject: [PATCH 33/41] feat: remove GITEA support from repo_providers and redis from broadcasters in PyTestSessionSettings --- tests/settings.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/settings.py b/tests/settings.py index 7023311d0..436fa89b4 100644 --- a/tests/settings.py +++ b/tests/settings.py @@ -102,9 +102,9 @@ def dump_settings(self): class PyTestSessionSettings(List): - repo_providers = [SupportedPolicyRepo.GITHUB, SupportedPolicyRepo.GITEA] + repo_providers = [SupportedPolicyRepo.GITHUB]#, SupportedPolicyRepo.GITEA] modes = ["without_webhook"] - broadcasters = ["postgres", "redis"] + broadcasters = ["postgres"]#, "redis"] broadcaster = "fgsfdg" repo_provider = "fdgdfg" mode = "rgrtre" From ab568bf44ef66d513c0c7a382dd9778ecb4016cc Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Sun, 30 Mar 2025 11:15:26 -0400 Subject: [PATCH 34/41] feat: add logging for policy repo creation and session matrix details --- tests/fixtures/policy_repos.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tests/fixtures/policy_repos.py b/tests/fixtures/policy_repos.py index eb27f6278..2539f488f 100644 --- a/tests/fixtures/policy_repos.py +++ b/tests/fixtures/policy_repos.py @@ -180,6 +180,13 @@ def policy_repo(temp_dir: str, session_matrix, opal_network, request) -> GithubP :return: The PolicyRepoBase object. """ + logger.info("Creating policy repo...") + logger.info("\n\nusing session matrix:") + for key in session_matrix: + logger.info(f"{key}: {session_matrix[key]}") + logger.info("\n\n") + + settings, server = next(policy_repo_settings(temp_dir, session_matrix, opal_network, session_matrix["repo_provider"])) policy_repo = PolicyRepoFactory( From a263d087c9b1080d53d91d93328863260405a15d Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Sun, 30 Mar 2025 11:18:33 -0400 Subject: [PATCH 35/41] feat: update logger name in policy_repos fixture for clarity --- tests/fixtures/policy_repos.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/fixtures/policy_repos.py b/tests/fixtures/policy_repos.py index 2539f488f..5ba1eb7ca 100644 --- a/tests/fixtures/policy_repos.py +++ b/tests/fixtures/policy_repos.py @@ -18,7 +18,7 @@ from tests.policy_repos.policy_repo_settings import PolicyRepoSettings from tests.settings import pytest_settings -logger = setup_logger(__name__) +logger = setup_logger("policy_repos") # @pytest.fixture(scope="session") From 7427d3b4e3ef1a501b9ec5b4c12adb09c9bf289a Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Sun, 30 Mar 2025 11:25:37 -0400 Subject: [PATCH 36/41] feat: add SSH keys to environment variables in pytest workflow --- .github/workflows/pytest-tests.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/pytest-tests.yml b/.github/workflows/pytest-tests.yml index f5fee6f5c..756fdf043 100644 --- a/.github/workflows/pytest-tests.yml +++ b/.github/workflows/pytest-tests.yml @@ -1,5 +1,9 @@ name: Pytest Tests +env: + OPAL_PYTEST_POLICY_REPO_SSH_PRIVATE_KEY: ${{ vars.OPAL_PYTEST_POLICY_REPO_SSH_PRIVATE_KEY }} + OPAL_PYTEST_POLICY_REPO_SSH_PUBLIC_KEY: ${{ vars.OPAL_PYTEST_POLICY_REPO_SSH_PUBLIC_KEY }} + on: push: branches: From 5717770278d93963ad8885f9309c54bdee75af37 Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Sun, 30 Mar 2025 11:30:52 -0400 Subject: [PATCH 37/41] feat: add additional environment variables for pytest workflow and update .env.example --- .github/workflows/pytest-tests.yml | 20 ++++++++++++++++-- tests/.env.example | 34 +++++++++++++++++------------- 2 files changed, 37 insertions(+), 17 deletions(-) diff --git a/.github/workflows/pytest-tests.yml b/.github/workflows/pytest-tests.yml index 756fdf043..e25a4c21f 100644 --- a/.github/workflows/pytest-tests.yml +++ b/.github/workflows/pytest-tests.yml @@ -1,8 +1,24 @@ name: Pytest Tests env: - OPAL_PYTEST_POLICY_REPO_SSH_PRIVATE_KEY: ${{ vars.OPAL_PYTEST_POLICY_REPO_SSH_PRIVATE_KEY }} - OPAL_PYTEST_POLICY_REPO_SSH_PUBLIC_KEY: ${{ vars.OPAL_PYTEST_POLICY_REPO_SSH_PUBLIC_KEY }} + OPAL_PYTEST_REPO_OWNER: ${{vars.OPAL_PYTEST_REPO_OWNER}} + OPAL_PYTEST_REPO_NAME: ${{vars.OPAL_PYTEST_REPO_NAME}} + OPAL_PYTEST_REPO_PASSWORD: ${{vars.OPAL_PYTEST_REPO_PASSWORD}} + OPAL_PYTEST_SSH_KEY_PATH: ${{vars.OPAL_PYTEST_SSH_KEY_PATH}} + OPAL_PYTEST_SOURCE_ACCOUNT: ${{vars.OPAL_PYTEST_SOURCE_ACCOUNT}} + OPAL_PYTEST_SOURCE_REPO: ${{vars.OPAL_PYTEST_SOURCE_REPO}} + OPAL_PYTEST_WEBHOOK_SECRET: ${{vars.OPAL_PYTEST_WEBHOOK_SECRET}} + OPAL_PYTEST_SHOULD_FORK: ${{vars.OPAL_PYTEST_SHOULD_FORK}} + OPAL_PYTEST_USE_WEBHOOK: ${{vars.OPAL_PYTEST_USE_WEBHOOK}} + OPAL_PYTEST_WAIT_FOR_DEBUGGER: ${{vars.OPAL_PYTEST_WAIT_FOR_DEBUGGER}} + OPAL_PYTEST_DO_NOT_BUILD_IMAGES: ${{vars.OPAL_PYTEST_DO_NOT_BUILD_IMAGES}} + OPAL_PYTEST_SKIP_REBUILD_IMAGES: ${{vars.OPAL_PYTEST_SKIP_REBUILD_IMAGES}} + OPAL_PYTEST_KEEP_IMAGES: ${{vars.OPAL_PYTEST_KEEP_IMAGES}} + OPAL_PYTEST_GITHUB_PAT: ${{vars.OPAL_PYTEST_GITHUB_PAT}} + OPAL_PYTEST_POLICY_REPO_SSH_PRIVATE_KEY: ${{vars.OPAL_PYTEST_POLICY_REPO_SSH_PRIVATE_KEY}} + OPAL_PYTEST_POLICY_REPO_SSH_PUBLIC_KEY: ${{vars.OPAL_PYTEST_POLICY_REPO_SSH_PUBLIC_KEY}} + OPAL_AUTH_PRIVATE_KEY: ${{vars.OPAL_AUTH_PRIVATE_KEY}} + OPAL_AUTH_PUBLIC_KEY: ${{vars.OPAL_AUTH_PUBLIC_KEY}} on: push: diff --git a/tests/.env.example b/tests/.env.example index 1a31d8086..dc2d48795 100644 --- a/tests/.env.example +++ b/tests/.env.example @@ -1,17 +1,21 @@ #!/usr/bin/env bash -OPAL_PYTEST_REPO_OWNER="" -OPAL_PYTEST_REPO_NAME="" -OPAL_PYTEST_REPO_PASSWORD="" -OPAL_PYTEST_SSH_KEY_PATH="" -OPAL_PYTEST_SOURCE_ACCOUNT="" -OPAL_PYTEST_SOURCE_REPO="" -OPAL_PYTEST_WEBHOOK_SECRET="" -OPAL_PYTEST_SHOULD_FORK="" -OPAL_PYTEST_USE_WEBHOOK="" -OPAL_PYTEST_WAIT_FOR_DEBUGGER="" -OPAL_PYTEST_DO_NOT_BUILD_IMAGES="" -OPAL_PYTEST_SKIP_REBUILD_IMAGES="" -OPAL_PYTEST_KEEP_IMAGES="" -OPAL_PYTEST_GITHUB_PAT="" -OPAL_POLICY_REPO_SSH_KEY="" \ No newline at end of file +OPAL_PYTEST_REPO_OWNER= +OPAL_PYTEST_REPO_NAME= +OPAL_PYTEST_REPO_PASSWORD= +OPAL_PYTEST_SSH_KEY_PATH= +OPAL_PYTEST_SOURCE_ACCOUNT= +OPAL_PYTEST_SOURCE_REPO= +OPAL_PYTEST_WEBHOOK_SECRET= +OPAL_PYTEST_SHOULD_FORK= +OPAL_PYTEST_USE_WEBHOOK= +OPAL_PYTEST_WAIT_FOR_DEBUGGER= +OPAL_PYTEST_DO_NOT_BUILD_IMAGES= +OPAL_PYTEST_SKIP_REBUILD_IMAGES= +OPAL_PYTEST_KEEP_IMAGES= +OPAL_PYTEST_GITHUB_PAT= +OPAL_PYTEST_POLICY_REPO_SSH_PRIVATE_KEY= +OPAL_PYTEST_POLICY_REPO_SSH_PUBLIC_KEY= + +OPAL_AUTH_PRIVATE_KEY= +OPAL_AUTH_PUBLIC_KEY= \ No newline at end of file From 15984ae928265f77c907076899db4e954f6f7fed Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Sun, 30 Mar 2025 11:57:44 -0400 Subject: [PATCH 38/41] feat: refactor GithubPolicyRepo by removing SSH key handling and cleanup methods --- tests/policy_repos/github_policy_repo copy.py | 372 ++++++++++++++++++ tests/policy_repos/github_policy_repo.py | 213 ++-------- 2 files changed, 399 insertions(+), 186 deletions(-) create mode 100644 tests/policy_repos/github_policy_repo copy.py diff --git a/tests/policy_repos/github_policy_repo copy.py b/tests/policy_repos/github_policy_repo copy.py new file mode 100644 index 000000000..6cc5f0258 --- /dev/null +++ b/tests/policy_repos/github_policy_repo copy.py @@ -0,0 +1,372 @@ +# import codecs +# import logging +# import os +# import random +# import shutil +# import subprocess + +# import requests +# from git import GitCommandError, Repo +# from github import Auth, Github +# from testcontainers.core.utils import setup_logger + +# from tests import utils +# from tests.policy_repos.github_policy_settings import GithubPolicyRepoSettings +# from tests.policy_repos.policy_repo_base import PolicyRepoBase +# from tests.policy_repos.policy_repo_settings import PolicyRepoSettings + + +# class GithubPolicyRepo(PolicyRepoBase): +# def __init__( +# self, +# settings: GithubPolicyRepoSettings, +# logger: logging.Logger = setup_logger(__name__), +# ): +# self.logger = logger +# self.settings = settings + +# self.load_ssh_key() + +# def load_ssh_key(self): + +# if self.settings.opal_policy_repo_ssh_key_public and self.settings.opal_policy_repo_ssh_key_private: +# self.ssh_key = self.settings.opal_policy_repo_ssh_key_public +# self.private_key = self.settings.opal_policy_repo_ssh_key_private +# return + +# if self.settings.ssh_key_path.startswith("~"): +# self.settings.ssh_key_path = os.path.expanduser(self.settings.ssh_key_path) + +# if not os.path.exists(self.settings.ssh_key_path): +# self.logger.debug(f"SSH key file not found at {self.settings.ssh_key_path}") + +# self.logger.debug("Generating new SSH key...") +# ssh_keys = utils.generate_ssh_key_pair() +# self.ssh_key = ssh_keys["public"] +# self.private_key = ssh_keys["private"] +# return + +# try: +# with open(self.settings.ssh_key_path, "r") as ssh_key_file: +# self.ssh_key = ssh_key_file.read().strip() + +# os.environ["OPAL_POLICY_REPO_SSH_KEY"] = self.ssh_key +# except Exception as e: +# self.logger.error(f"Error loading SSH key: {e}") + +# def setup_webhook(self, host, port): +# self.webhook_host = host +# self.webhook_port = port + +# def set_envvars(self): +# # Update .env file +# with open(".env", "a") as env_file: +# env_file.write(f'OPAL_POLICY_REPO_URL="{self.get_repo_url()}"\n') +# env_file.write(f'OPAL_POLICY_REPO_BRANCH="{self.test_branch}"\n') + +# with open(".env", "a") as env_file: +# env_file.write(f'OPAL_POLICY_REPO_SSH_KEY="{self.ssh_key}"\n') + +# def get_repo_url(self): +# return self.build_repo_url(self.settings.owner, self.settings.repo) + +# def build_repo_url(self, owner, repo) -> str: +# if owner is None: +# raise Exception("Owner not set") + +# if self.settings.protocol == "ssh" or self.settings.protocol == "git": +# return f"git@{self.settings.host}:{owner}/{repo}.git" + +# if self.settings.protocol == "http" or self.settings.protocol == "https": +# if self.settings.github_pat: +# return f"{self.settings.protocol}://{self.settings.host}/{owner}/{repo}.git" + +# if self.settings.password is None and self.settings.github_pat is None and self.ssh_key is None: +# raise Exception("No authentication method set") + +# return f"{self.settings.protocol}://{self.settings.owner}:{self.settings.password}@{self.settings.host}:{self.settings.port}/{owner}/{repo}" + +# def get_source_repo_url(self): +# return self.build_repo_url(self.settings.source_repo_owner, self.settings.source_repo_name) + +# def clone_initial_repo(self): +# Repo.clone_from(self.get_source_repo_url(), self.settings.local_repo_path) + +# def check_repo_exists(self): +# try: +# gh = Github(auth=Auth.Token(self.settings.github_pat)) +# repo_list = gh.get_user().get_repos() +# for repo in repo_list: +# if repo.full_name == self.settings.repo: +# self.logger.debug(f"Repository {self.settings.repo} already exists.") +# return True + +# except Exception as e: +# self.logger.error(f"Error checking repository existence: {e}") + +# return False + +# def create_target_repo(self): +# if self.check_repo_exists(): +# return + +# try: +# gh = Github(auth=Auth.Token(self.settings.github_pat)) +# gh.get_user().create_repo(self.settings.repo) +# self.logger.info(f"Repository {self.settings.repo} created successfully.") +# except Exception as e: +# self.logger.error(f"Error creating repository: {e}") + +# def fork_target_repo(self): +# if self.check_repo_exists(): +# return + +# self.logger.debug(f"Forking repository {self.settings.source_repo_name}...") + +# if self.settings.github_pat is None: +# try: +# gh = Github(auth=Auth.Token(self.settings.github_pat)) +# gh.get_user().create_fork(self.settings.source_repo_owner, self.settings.source_repo_name) +# self.logger.info( +# f"Repository {self.settings.source_repo_name} forked successfully." +# ) +# except Exception as e: +# self.logger.error(f"Error forking repository: {e}") +# return + +# # Try with PAT +# try: +# headers = {"Authorization": f"token {self.settings.github_pat}"} +# response = requests.post( +# f"https://api.github.com/repos/{self.settings.source_repo_owner}/{self.settings.source_repo_name}/forks", +# headers=headers, +# ) +# if response.status_code == 202: +# self.logger.info("Fork created successfully!") +# else: +# self.logger.error(f"Error creating fork: {response.status_code}") +# self.logger.debug(response.json()) + +# except Exception as e: +# self.logger.error(f"Error forking repository: {str(e)}") + +# def cleanup(self): +# self.delete_test_branches() + +# def delete_test_branches(self): +# """Deletes all branches starting with 'test-' from the specified +# repository.""" + +# try: +# self.logger.info(f"Deleting test branches from {self.settings.repo}...") + +# # Initialize Github API +# gh = Github(auth=Auth.Token(self.settings.github_pat)) + +# # Get the repository +# repo = gh.get_user().get_repo(self.settings.repo) + +# # Enumerate branches and delete pytest- branches +# branches = repo.get_branches() +# for branch in branches: +# if branch.name.startswith("test-"): +# ref = f"heads/{branch.name}" +# repo.get_git_ref(ref).delete() +# self.logger.info(f"Deleted branch: {branch.name}") +# else: +# self.logger.info(f"Skipping branch: {branch.name}") + +# self.logger.info("All test branches have been deleted successfully.") +# except Exception as e: +# self.logger.error(f"An error occurred: {e}") + +# return + +# def generate_test_branch(self): +# self.test_branch = ( +# f"test-{random.randint(1000, 9999)}{random.randint(1000, 9999)}" +# ) +# os.environ["OPAL_POLICY_REPO_BRANCH"] = self.test_branch + +# def create_test_branch(self): +# try: +# # Initialize the repository +# repo = Repo(self.settings.local_repo_path) + +# # Ensure the repository is clean +# if repo.is_dirty(untracked_files=True): +# raise RuntimeError( +# "The repository has uncommitted changes. Commit or stash them before proceeding." +# ) + +# # Set the origin remote URL +# remote_url = f"https://github.com/{self.settings.owner}/{self.settings.repo}.git" +# if "origin" in repo.remotes: +# origin = repo.remote(name="origin") +# origin.set_url(remote_url) # Update origin URL if it exists +# else: +# origin = repo.create_remote( +# "origin", remote_url +# ) # Create origin remote if it doesn't exist + +# self.logger.debug(f"Origin set to: {remote_url}") + +# # Create and checkout the new branch +# new_branch = repo.create_head(self.test_branch) # Create branch +# new_branch.checkout() # Switch to the new branch + +# # Push the new branch to the remote +# origin.push(refspec=f"{self.test_branch}:{self.test_branch}") + +# self.logger.info( +# f"Branch '{self.test_branch}' successfully created and pushed." +# ) +# except GitCommandError as e: +# self.logger.error(f"Git command failed: {e}") +# except Exception as e: +# self.logger.error(f"An error occurred: {e}") + +# def cleanup(self, delete_repo=True, delete_ssh_key=True): +# subprocess.run(["rm", "-rf", self.settings.local_repo_path], check=True) + +# self.delete_test_branches() + +# if delete_repo: +# self.delete_repo() + +# if delete_ssh_key: +# self.delete_ssh_key() + +# def delete_ssh_key(self): +# gh = Github(auth=Auth.Token(self.settings.github_pat)) +# user = gh.get_user() +# keys = user.get_keys() +# for key in keys: +# if key.title == self.settings.ssh_key_name: +# key.delete() +# self.logger.debug(f"SSH key deleted: {key.title}") +# break + +# self.logger.debug("All OPAL SSH keys have been deleted successfully.") + +# return + +# def delete_repo(self): +# try: +# gh = Github(auth=Auth.Token(self.settings.github_pat)) +# repo = gh.get_user().get_repo(self.settings.repo) +# repo.delete() +# self.logger.debug(f"Repository {self.settings.repo} deleted successfully.") +# except Exception as e: +# self.logger.error(f"Error deleting repository: {e}") + +# def setup(self): +# self.clone_initial_repo() + +# if self.settings.should_fork: +# self.fork_target_repo() +# else: +# self.create_target_repo() + +# self.generate_test_branch() +# self.create_test_branch() + +# def add_ssh_key(self): +# gh = Github(auth=Auth.Token(self.settings.github_pat)) +# user = gh.get_user() +# keys = user.get_keys() +# for key in keys: +# if key.title == self.settings.ssh_key_name: +# return + +# key = user.create_key(self.settings.ssh_key_name, self.ssh_key) +# self.logger.info(f"SSH key added: {key.title}") + +# def create_webhook(self): +# try: +# gh = Github(auth=Auth.Token(self.settings.github_pat)) +# self.logger.info( +# f"Creating webhook for repository {self.settings.owner}/{self.settings.repo}" +# ) +# repo = gh.get_user().get_repo(f"{self.settings.repo}") +# url = utils.create_localtunnel(self.webhook_port) +# self.logger.info(f"Webhook URL: {url}") +# self.github_webhook = repo.create_hook( +# "web", +# { +# "url": f"{url}/webhook", +# "content_type": "json", +# f"secret": "abc123", +# "insecure_ssl": "1", +# }, +# events=["push"], +# active=True, +# ) +# self.logger.info("Webhook created successfully.") +# except Exception as e: +# self.logger.error(f"Error creating webhook: {e}") + +# def delete_webhook(self): +# try: +# gh = Github(auth=Auth.Token(self.settings.github_pat)) +# repo = gh.get_user().get_repo(f"{self.settings.repo}") +# repo.delete_hook(self.github_webhook.id) +# self.logger.info("Webhook deleted successfully.") +# except Exception as e: +# self.logger.error(f"Error deleting webhook: {e}") + +# def update_branch(self, file_name, file_content): +# self.logger.info( +# f"Updating branch '{self.test_branch}' with file '{file_name}' content..." +# ) + +# # Decode escape sequences in the file content +# if file_content is not None: +# file_content = codecs.decode(file_content, "unicode_escape") + +# # Create or update the specified file with the provided content +# file_path = os.path.join(self.settings.local_repo_path, file_name) +# with open(file_path, "w") as f: +# f.write(file_content) + +# # if file_content is None: +# # with open(file_path, "r") as f: +# # file_content = f.read() + +# try: +# # Stage the changes +# self.logger.debug(f"Staging changes for branch {self.test_branch}...") +# gh = Github(auth=Auth.Token(self.settings.github_pat)) +# repo = gh.get_user().get_repo(self.settings.repo) +# branch_ref = f"heads/{self.test_branch}" +# ref = repo.get_git_ref(branch_ref) +# latest_commit = repo.get_git_commit(ref.object.sha) +# base_tree = latest_commit.tree +# from github.InputGitTreeElement import InputGitTreeElement +# new_tree = repo.create_git_tree( +# [ +# InputGitTreeElement( +# path=file_name, +# mode="100644", +# type="blob", +# content=file_content, +# ) +# ], +# base_tree, +# ) +# new_commit = repo.create_git_commit( +# f"Commit changes for branch {self.test_branch}", +# new_tree, +# [latest_commit], +# ) +# ref.edit(new_commit.sha) +# self.logger.debug(f"Changes pushed for branch {self.test_branch}.") + +# except Exception as e: +# self.logger.error(f"Error updating branch: {e}") +# return False + +# return True + +# def remove_webhook(self): +# self.github_webhook.delete() diff --git a/tests/policy_repos/github_policy_repo.py b/tests/policy_repos/github_policy_repo.py index 95c041d62..6f0301d18 100644 --- a/tests/policy_repos/github_policy_repo.py +++ b/tests/policy_repos/github_policy_repo.py @@ -25,48 +25,6 @@ def __init__( self.logger = logger self.settings = settings - self.load_ssh_key() - - def load_ssh_key(self): - - if self.settings.opal_policy_repo_ssh_key_public and self.settings.opal_policy_repo_ssh_key_private: - self.ssh_key = self.settings.opal_policy_repo_ssh_key_public - self.private_key = self.settings.opal_policy_repo_ssh_key_private - return - - if self.settings.ssh_key_path.startswith("~"): - self.settings.ssh_key_path = os.path.expanduser(self.settings.ssh_key_path) - - if not os.path.exists(self.settings.ssh_key_path): - self.logger.debug(f"SSH key file not found at {self.settings.ssh_key_path}") - - self.logger.debug("Generating new SSH key...") - ssh_keys = utils.generate_ssh_key_pair() - self.ssh_key = ssh_keys["public"] - self.private_key = ssh_keys["private"] - return - - try: - with open(self.settings.ssh_key_path, "r") as ssh_key_file: - self.ssh_key = ssh_key_file.read().strip() - - os.environ["OPAL_POLICY_REPO_SSH_KEY"] = self.ssh_key - except Exception as e: - self.logger.error(f"Error loading SSH key: {e}") - - def setup_webhook(self, host, port): - self.webhook_host = host - self.webhook_port = port - - def set_envvars(self): - # Update .env file - with open(".env", "a") as env_file: - env_file.write(f'OPAL_POLICY_REPO_URL="{self.get_repo_url()}"\n') - env_file.write(f'OPAL_POLICY_REPO_BRANCH="{self.test_branch}"\n') - - with open(".env", "a") as env_file: - env_file.write(f'OPAL_POLICY_REPO_SSH_KEY="{self.ssh_key}"\n') - def get_repo_url(self): return self.build_repo_url(self.settings.owner, self.settings.repo) @@ -74,17 +32,11 @@ def build_repo_url(self, owner, repo) -> str: if owner is None: raise Exception("Owner not set") - if self.settings.protocol == "ssh" or self.settings.protocol == "git": - return f"git@{self.settings.host}:{owner}/{repo}.git" - - if self.settings.protocol == "http" or self.settings.protocol == "https": + if self.settings.protocol in ("http", "https"): if self.settings.github_pat: - return f"{self.settings.protocol}://{self.settings.host}/{owner}/{repo}.git" - - if self.settings.password is None and self.settings.github_pat is None and self.ssh_key is None: - raise Exception("No authentication method set") + return f"{self.settings.protocol}://{self.settings.owner}:{self.settings.github_pat}@{self.settings.host}/{owner}/{repo}.git" - return f"{self.settings.protocol}://{self.settings.owner}:{self.settings.password}@{self.settings.host}:{self.settings.port}/{owner}/{repo}" + raise Exception("No valid authentication method set") def get_source_repo_url(self): return self.build_repo_url(self.settings.source_repo_owner, self.settings.source_repo_name) @@ -117,56 +69,37 @@ def create_target_repo(self): except Exception as e: self.logger.error(f"Error creating repository: {e}") - def fork_target_repo(self): - if self.check_repo_exists(): - return + def cleanup(self, delete_repo=True): + try: + if os.path.exists(self.settings.local_repo_path): + shutil.rmtree(self.settings.local_repo_path) + self.logger.info(f"Local repository at {self.settings.local_repo_path} deleted.") + except Exception as e: + self.logger.error(f"Failed to delete local repo path: {e}") - self.logger.debug(f"Forking repository {self.settings.source_repo_name}...") + try: + self.delete_test_branches() + except Exception as e: + self.logger.error(f"Failed to delete test branches: {e}") - if self.settings.github_pat is None: + if delete_repo: try: - gh = Github(auth=Auth.Token(self.settings.github_pat)) - gh.get_user().create_fork(self.settings.source_repo_owner, self.settings.source_repo_name) - self.logger.info( - f"Repository {self.settings.source_repo_name} forked successfully." - ) + self.delete_repo() except Exception as e: - self.logger.error(f"Error forking repository: {e}") - return - - # Try with PAT - try: - headers = {"Authorization": f"token {self.settings.github_pat}"} - response = requests.post( - f"https://api.github.com/repos/{self.settings.source_repo_owner}/{self.settings.source_repo_name}/forks", - headers=headers, - ) - if response.status_code == 202: - self.logger.info("Fork created successfully!") - else: - self.logger.error(f"Error creating fork: {response.status_code}") - self.logger.debug(response.json()) - - except Exception as e: - self.logger.error(f"Error forking repository: {str(e)}") + self.logger.error(f"Failed to delete remote repo: {e}") - def cleanup(self): self.delete_test_branches() - def delete_test_branches(self): - """Deletes all branches starting with 'test-' from the specified - repository.""" + if delete_repo: + self.delete_repo() + def delete_test_branches(self): try: self.logger.info(f"Deleting test branches from {self.settings.repo}...") - # Initialize Github API gh = Github(auth=Auth.Token(self.settings.github_pat)) - - # Get the repository repo = gh.get_user().get_repo(self.settings.repo) - # Enumerate branches and delete pytest- branches branches = repo.get_branches() for branch in branches: if branch.name.startswith("test-"): @@ -190,32 +123,25 @@ def generate_test_branch(self): def create_test_branch(self): try: - # Initialize the repository repo = Repo(self.settings.local_repo_path) - # Ensure the repository is clean if repo.is_dirty(untracked_files=True): raise RuntimeError( "The repository has uncommitted changes. Commit or stash them before proceeding." ) - # Set the origin remote URL - remote_url = f"https://github.com/{self.settings.owner}/{self.settings.repo}.git" + remote_url = self.get_repo_url() if "origin" in repo.remotes: origin = repo.remote(name="origin") - origin.set_url(remote_url) # Update origin URL if it exists + origin.set_url(remote_url) else: - origin = repo.create_remote( - "origin", remote_url - ) # Create origin remote if it doesn't exist + origin = repo.create_remote("origin", remote_url) self.logger.debug(f"Origin set to: {remote_url}") - # Create and checkout the new branch - new_branch = repo.create_head(self.test_branch) # Create branch - new_branch.checkout() # Switch to the new branch + new_branch = repo.create_head(self.test_branch) + new_branch.checkout() - # Push the new branch to the remote origin.push(refspec=f"{self.test_branch}:{self.test_branch}") self.logger.info( @@ -226,31 +152,6 @@ def create_test_branch(self): except Exception as e: self.logger.error(f"An error occurred: {e}") - def cleanup(self, delete_repo=True, delete_ssh_key=True): - subprocess.run(["rm", "-rf", self.settings.local_repo_path], check=True) - - self.delete_test_branches() - - if delete_repo: - self.delete_repo() - - if delete_ssh_key: - self.delete_ssh_key() - - def delete_ssh_key(self): - gh = Github(auth=Auth.Token(self.settings.github_pat)) - user = gh.get_user() - keys = user.get_keys() - for key in keys: - if key.title == self.settings.ssh_key_name: - key.delete() - self.logger.debug(f"SSH key deleted: {key.title}") - break - - self.logger.debug("All OPAL SSH keys have been deleted successfully.") - - return - def delete_repo(self): try: gh = Github(auth=Auth.Token(self.settings.github_pat)) @@ -262,86 +163,29 @@ def delete_repo(self): def setup(self): self.clone_initial_repo() - - if self.settings.should_fork: - self.fork_target_repo() - else: - self.create_target_repo() - + self.create_target_repo() self.generate_test_branch() self.create_test_branch() - def add_ssh_key(self): - gh = Github(auth=Auth.Token(self.settings.github_pat)) - user = gh.get_user() - keys = user.get_keys() - for key in keys: - if key.title == self.settings.ssh_key_name: - return - - key = user.create_key(self.settings.ssh_key_name, self.ssh_key) - self.logger.info(f"SSH key added: {key.title}") - - def create_webhook(self): - try: - gh = Github(auth=Auth.Token(self.settings.github_pat)) - self.logger.info( - f"Creating webhook for repository {self.settings.owner}/{self.settings.repo}" - ) - repo = gh.get_user().get_repo(f"{self.settings.repo}") - url = utils.create_localtunnel(self.webhook_port) - self.logger.info(f"Webhook URL: {url}") - self.github_webhook = repo.create_hook( - "web", - { - "url": f"{url}/webhook", - "content_type": "json", - f"secret": "abc123", - "insecure_ssl": "1", - }, - events=["push"], - active=True, - ) - self.logger.info("Webhook created successfully.") - except Exception as e: - self.logger.error(f"Error creating webhook: {e}") - - def delete_webhook(self): - try: - gh = Github(auth=Auth.Token(self.settings.github_pat)) - repo = gh.get_user().get_repo(f"{self.settings.repo}") - repo.delete_hook(self.github_webhook.id) - self.logger.info("Webhook deleted successfully.") - except Exception as e: - self.logger.error(f"Error deleting webhook: {e}") - def update_branch(self, file_name, file_content): self.logger.info( f"Updating branch '{self.test_branch}' with file '{file_name}' content..." ) - # Decode escape sequences in the file content if file_content is not None: file_content = codecs.decode(file_content, "unicode_escape") - # Create or update the specified file with the provided content file_path = os.path.join(self.settings.local_repo_path, file_name) with open(file_path, "w") as f: f.write(file_content) - # if file_content is None: - # with open(file_path, "r") as f: - # file_content = f.read() - try: - # Stage the changes self.logger.debug(f"Staging changes for branch {self.test_branch}...") gh = Github(auth=Auth.Token(self.settings.github_pat)) repo = gh.get_user().get_repo(self.settings.repo) branch_ref = f"heads/{self.test_branch}" ref = repo.get_git_ref(branch_ref) latest_commit = repo.get_git_commit(ref.object.sha) - base_tree = latest_commit.tree from github.InputGitTreeElement import InputGitTreeElement new_tree = repo.create_git_tree( [ @@ -352,7 +196,7 @@ def update_branch(self, file_name, file_content): content=file_content, ) ], - base_tree, + latest_commit.tree, ) new_commit = repo.create_git_commit( f"Commit changes for branch {self.test_branch}", @@ -367,6 +211,3 @@ def update_branch(self, file_name, file_content): return False return True - - def remove_webhook(self): - self.github_webhook.delete() From d0bfbb9fe583aced1e8ebdc96f9a6bb095554349 Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Sun, 30 Mar 2025 12:25:56 -0400 Subject: [PATCH 39/41] feat: refactor GithubPolicyRepo cleanup method and improve webhook setup structure --- tests/conftest.py | 2 +- tests/policy_repos/github_policy_repo.py | 19 ++++++++++--------- 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index feebdc521..a2fcbccac 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -468,7 +468,7 @@ def setup(opal_clients, policy_repo, session_matrix): logger.info(f"{key}: {val}") yield - policy_repo.cleanup(delete_ssh_key=False) + policy_repo.cleanup() if session_matrix["is_final"]: logger.info("Finalizing test session...") utils.remove_env("OPAL_TESTS_DEBUG") diff --git a/tests/policy_repos/github_policy_repo.py b/tests/policy_repos/github_policy_repo.py index 6f0301d18..773bd9635 100644 --- a/tests/policy_repos/github_policy_repo.py +++ b/tests/policy_repos/github_policy_repo.py @@ -17,6 +17,11 @@ class GithubPolicyRepo(PolicyRepoBase): + def setup_webhook(self, host, port): + pass + + def create_webhook(self): + pass def __init__( self, settings: GithubPolicyRepoSettings, @@ -32,9 +37,10 @@ def build_repo_url(self, owner, repo) -> str: if owner is None: raise Exception("Owner not set") - if self.settings.protocol in ("http", "https"): - if self.settings.github_pat: - return f"{self.settings.protocol}://{self.settings.owner}:{self.settings.github_pat}@{self.settings.host}/{owner}/{repo}.git" + protocol = "https" + pat = self.settings.github_pat + if pat: + return f"{protocol}://{pat}@{self.settings.host}/{owner}/{repo}.git" raise Exception("No valid authentication method set") @@ -88,11 +94,6 @@ def cleanup(self, delete_repo=True): except Exception as e: self.logger.error(f"Failed to delete remote repo: {e}") - self.delete_test_branches() - - if delete_repo: - self.delete_repo() - def delete_test_branches(self): try: self.logger.info(f"Deleting test branches from {self.settings.repo}...") @@ -162,8 +163,8 @@ def delete_repo(self): self.logger.error(f"Error deleting repository: {e}") def setup(self): - self.clone_initial_repo() self.create_target_repo() + self.clone_initial_repo() self.generate_test_branch() self.create_test_branch() From c702a908eb55deeda7d5a65e843a1786b44f64e6 Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Sun, 30 Mar 2025 13:40:50 -0400 Subject: [PATCH 40/41] feat: enhance policy repo fixture with debugger wait and execution pause logic --- tests/conftest.py | 420 ++------------------------------- tests/fixtures/opal.py | 358 ++++++++++++++++++++++++++++ tests/fixtures/policy_repos.py | 9 +- tests/utils.py | 51 +++- 4 files changed, 423 insertions(+), 415 deletions(-) create mode 100644 tests/fixtures/opal.py diff --git a/tests/conftest.py b/tests/conftest.py index a2fcbccac..5557bfa70 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,51 +1,20 @@ -import json import os -import shutil -import tempfile -import threading import time -from typing import List - -import debugpy +import shutil import pytest -from testcontainers.core.network import Network -from testcontainers.core.utils import setup_logger - +import tempfile from tests import utils -from tests.containers.broadcast_container_base import BroadcastContainerBase -from tests.containers.opal_client_container import OpalClientContainer -from tests.containers.opal_server_container import OpalServerContainer -from tests.containers.settings.opal_client_settings import OpalClientSettings -from tests.containers.settings.opal_server_settings import OpalServerSettings -from tests.policy_repos.policy_repo_base import PolicyRepoBase -from tests.settings import pytest_settings - -logger = setup_logger(__name__) - -# wait some seconds for the debugger to attach -debugger_wait_time = 5 # seconds - - -def cancel_wait_for_client_after_timeout(): - try: - time.sleep(debugger_wait_time) - debugpy.wait_for_client.cancel() - except Exception as e: - logger.debug(f"Failed to cancel wait for client: {e}") +from testcontainers.core.utils import setup_logger +from tests.fixtures.opal import opal_clients, opal_servers, opal_network, connected_clients, topiced_clients +from tests.fixtures.policy_repos import policy_repo +from tests.fixtures.images import opa_image, cedar_image, opal_client_image, opal_client_with_opa_image, opal_server_image +from tests.fixtures.broadcasters import broadcast_channel, postgres_broadcast_channel, redis_broadcast_channel, kafka_broadcast_channel -try: - if pytest_settings.wait_for_debugger: - t = threading.Thread(target=cancel_wait_for_client_after_timeout) - t.start() - logger.debug(f"Waiting for debugger to attach... {debugger_wait_time} seconds timeout") - debugpy.wait_for_client() -except Exception as e: - logger.debug(f"Failed to attach debugger: {e}") +logger = setup_logger("conftest") -utils.export_env("OPAL_TESTS_DEBUG", "true") -# utils.install_opal_server_and_client() +utils.pre_set() @pytest.fixture(scope="session") def temp_dir(): @@ -76,368 +45,6 @@ def temp_dir(): logger.debug(f"Temporary directory removed: {dir_path}") -@pytest.fixture(scope="session", autouse=True) -def opal_network(): - """Creates a Docker network and yields it. - - The network is cleaned up after all tests have finished running. - """ - network = Network().create() - - yield network - - logger.info("Removing network...") - time.sleep(3) # wait for the containers to stop - try: - network.remove() - logger.info("Network removed.") - except Exception as e: - if logger.level == "DEBUG": - logger.error(f"Failed to remove network: {e}") - else: - logger.error(f"Failed to remove network got exception\n{e}") - -from tests.fixtures.broadcasters import ( - broadcast_channel, - kafka_broadcast_channel, - postgres_broadcast_channel, - redis_broadcast_channel, -) -from tests.fixtures.images import opal_server_image -from tests.fixtures.policy_repos import policy_repo - - -@pytest.fixture(scope="session") -def opal_servers( - opal_network: Network, - policy_repo, - opal_server_image: str, - broadcast_channel: BroadcastContainerBase, - # kafka_broadcast_channel: KafkaContainer, - # redis_broadcast_channel: RedisBroadcastContainer, - session_matrix, -): - """Fixture that initializes and manages OPAL server containers for testing. - - This fixture sets up a specified number of OPAL server containers, each - connected to the provided Docker network and using the specified broadcast - channel. The first server container sets up and creates a webhook for the - policy repository. All containers are started and their logs are monitored - for successful cloning of the policy repository. The containers are stopped - after the test session is complete. - - Args: - opal_network (Network): The Docker network to which the containers are connected. - broadcast_channel (BroadcastContainerBase): The broadcast channel container. - policy_repo (PolicyRepoBase): The policy repository to be used. - number_of_opal_servers (int): The number of OPAL server containers to start. - opal_server_image (str): The Docker image used for the OPAL servers. - topics (dict[str, int]): The topics for OPAL data configuration. - kafka_broadcast_channel (KafkaBroadcastContainer): The Kafka broadcast channel container. - redis_broadcast_channel (RedisBroadcastContainer): The Redis broadcast channel container. - session_matrix: The session matrix used for the test configuration. - - Yields: - List[OpalServerContainer]: A list of running OPAL server containers. - """ - - # broadcast_channel = redis_broadcast_channel - # broadcast_channel = kafka_broadcast_channel - broadcast_channel = broadcast_channel[0] - - if not broadcast_channel: - raise ValueError("Missing 'broadcast_channel' container.") - - containers = [] # List to store container instances - - - for i in range(session_matrix["number_of_opal_servers"]): - container_name = f"opal_server_{i+1}" - - repo_url = policy_repo.get_repo_url() - container = OpalServerContainer( - OpalServerSettings( - broadcast_uri=broadcast_channel.get_url(), - container_name=container_name, - container_index=i + 1, - uvicorn_workers="4", - policy_repo_url=repo_url, - image=opal_server_image, - log_level="DEBUG", - data_topics=" ".join(session_matrix["topics"].keys()), - polling_interval=3, - policy_repo_main_branch=policy_repo.test_branch, - POLICY_REPO_SSH_KEY=session_matrix["opal_policy_repo_ssh_key_private"], - ), - network=opal_network, - ) - - container.start() - container.get_wrapped_container().reload() - - if i == 0: - # Only the first server should setup the webhook - # policy_repo.setup_webhook( - # container.get_container_host_ip(), container.settings.port - # ) - # policy_repo.create_webhook() - pass - - logger.info( - f"Started container: {container_name}, ID: {container.get_wrapped_container().id}" - ) - container.wait_for_log("Clone succeeded", timeout=30) - containers.append(container) - - yield containers - - for container in containers: - container.stop() - -@pytest.fixture(scope="session") -def connected_clients(opal_clients: List[OpalClientContainer]): - """A fixture that waits for all OPAL clients to connect to the PubSub - server before yielding them. - - This fixture takes a list of OPAL clients as input and waits for each of them - to connect to the PubSub server before yielding them. The fixture is used to - ensure that all OPAL clients are connected and ready to receive messages - before the tests are executed. - - Parameters - ---------- - opal_clients : List[OpalClientContainer] - A list of OPAL client containers. - - Yields - ------ - List[OpalClientContainer] - A list of connected OPAL client containers. - """ - for client in opal_clients: - assert client.wait_for_log( - log_str="Connected to PubSub server", timeout=30 - ), f"Client {client.settings.container_name} did not connect to PubSub server." - yield opal_clients - - -from tests.fixtures.images import ( - cedar_image, - opa_image, - opal_client_image, - opal_client_with_opa_image, -) -from tests.fixtures.policy_stores import cedar_server, opa_server - - -@pytest.fixture(scope="session") -def opal_clients( - opal_network: Network, - opal_servers: List[OpalServerContainer], - # opa_server: OpaContainer, - # cedar_server: CedarContainer, - request, - session_matrix, - opal_client_with_opa_image, -): - """A fixture that starts and manages multiple OPAL client containers. - - This fixture takes a list of OPAL server containers as input and starts a - specified number of OPAL client containers, each connected to the first - OPAL server container. The fixture yields the list of started OPAL client - containers. - - Parameters - ---------- - opal_network : Network - The Docker network to which the containers are connected. - opal_servers : List[OpalServerContainer] - A list of OPAL server containers. - #opa_server : OpaContainer - # The OPA server container. - cedar_server : CedarContainer - The Cedar server container. - request - The pytest request object. - number_of_opal_clients : int - The number of OPAL clients to start. - opal_client_image - The Docker image used for the OPAL clients. - - Yields - ------ - List[OpalClientContainer] - A list of started OPAL client containers. - """ - if not opal_servers or len(opal_servers) == 0: - raise ValueError("Missing 'opal_server' container.") - - opal_server_url = f"http://{opal_servers[0].settings.container_name}:7002"#{opal_servers[0].settings.port}" - - containers = [] # List to store OpalClientContainer instances - - for i in range(session_matrix["number_of_opal_clients"]): - container_name = f"opal_client_{i+1}" # Unique name for each client - - client_token = opal_servers[0].obtain_OPAL_tokens(container_name)["client"] - callbacks = json.dumps( - { - "callbacks": [ - [ - f"{opal_server_url}/data/callback_report", - { - "method": "post", - "process_data": False, - "headers": { - "Authorization": f"Bearer {client_token}", - "content-type": "application/json", - }, - }, - ] - ] - } - ) - - container = OpalClientContainer( - OpalClientSettings( - image=opal_client_with_opa_image, - container_name=container_name, - container_index=i + 1, - opal_server_url=opal_server_url, - client_token=client_token, - default_update_callbacks=callbacks, - # inline_opa_enabled=False, - # policy_store_url=f"http://localhost:8181",#{opa_server.settings.port}", - # opa_port=opa_server.settings.port, - ), - network=opal_network, - ) - - container.start() - logger.info( - f"Started OpalClientContainer: {container_name}, ID: {container.get_wrapped_container().id}" - ) - containers.append(container) - - yield containers - - try: - for container in containers: - container.stop() - except Exception: - logger.error(f"Failed to stop containers: {container}") - pass - -@pytest.fixture(scope="session") -def topiced_clients(opal_network: Network, opal_servers: list[OpalServerContainer], session_matrix): - """Fixture that starts and manages multiple OPAL client containers, each - subscribing to a different topic. - - The fixture takes a dictionary of topics and the number of clients to - subscribe to each topic. It starts the specified number of OPAL client - containers, each connected to the first OPAL server container, and each - subscribing to the specified topic. The fixture yields the list of started - OPAL client containers, organized by topic. - - Parameters - ---------- - topics : dict - A dictionary mapping topic names to the number of OpalClientContainer - instances that should subscribe to each topic. - opal_network : Network - The Docker network to which the containers are connected. - opal_servers : list[OpalServerContainer] - A list of OPAL server containers. - - Yields - ------ - dict - A dictionary mapping topic names to a list of OpalClientContainer - instances that are subscribed to the topic. - """ - if not opal_servers or len(opal_servers) == 0: - raise ValueError("Missing 'opal_server' container.") - - opal_server_url = f"http://{opal_servers[0].settings.container_name}:7002"#{opal_servers[0].settings.port}" - containers = {} # List to store OpalClientContainer instances - - client_token = opal_servers[0].obtain_OPAL_tokens("topiced_opal_client_?x?")[ - "client" - ] - callbacks = json.dumps( - { - "callbacks": [ - [ - f"{opal_server_url}/data/callback_report", - { - "method": "post", - "process_data": False, - "headers": { - "Authorization": f"Bearer {client_token}", - "content-type": "application/json", - }, - }, - ] - ] - } - ) - - for topic, number_of_clients in session_matrix["topics"].items(): - for i in range(number_of_clients): - container_name = f"opal_client_{topic}_{i+1}" # Unique name for each client - - container = OpalClientContainer( - OpalClientSettings( - image="permitio/opal-client:latest", - container_name=container_name, - container_index=i + 1, - opal_server_url=opal_server_url, - client_token=client_token, - default_update_callbacks=callbacks, - topics=topic, - ), - network=opal_network, - ) - - container.start() - logger.info( - f"Started OpalClientContainer: {container_name}, ID: {container.get_wrapped_container().id} - on topic: {topic}" - ) - containers[topic] = containers.get(topic, []) - - assert container.wait_for_log( - log_str="Connected to PubSub server", timeout=30 - ), f"Client {client.settings.container_name} did not connect to PubSub server." - - containers[topic].append(container) - - yield containers - - for _, clients in containers.items(): - for client in clients: - client.stop() - - -def wait_sometime(): - """Pauses execution based on the environment. - - If the code is running inside GitHub Actions, it pauses execution - for 30 seconds. Otherwise, it waits for user input to continue. - - This can be used to control the flow of execution depending on the - environment in which the code is being executed. - """ - - if os.getenv("GITHUB_ACTIONS") == "true": - logger.info("Running inside GitHub Actions. Sleeping for 30 seconds...") - for secconds_ellapsed in range(30): - time.sleep(1) - print(f"Sleeping for \033[91m{29 - secconds_ellapsed}\033[0m seconds... \r",end="\r" if secconds_ellapsed < 29 else "\n") - else: - logger.info("Running on the local machine. Press Enter to continue...") - input() # Wait for key press - - @pytest.fixture(scope="session", autouse=True) def setup(opal_clients, policy_repo, session_matrix): """A setup fixture that is run once per test session. @@ -463,13 +70,14 @@ def setup(opal_clients, policy_repo, session_matrix): logger.info("Initializing test session...") - - for key, val in session_matrix.items(): - logger.info(f"{key}: {val}") + logger.debug("\n\nusing session matrix:") + for key in session_matrix: + logger.debug(f"{key}: {session_matrix[key]}") + logger.debug("\n\n") yield policy_repo.cleanup() if session_matrix["is_final"]: logger.info("Finalizing test session...") utils.remove_env("OPAL_TESTS_DEBUG") - wait_sometime() + utils.wait_sometime() diff --git a/tests/fixtures/opal.py b/tests/fixtures/opal.py new file mode 100644 index 000000000..d433c7e3a --- /dev/null +++ b/tests/fixtures/opal.py @@ -0,0 +1,358 @@ +from testcontainers.core.network import Network +import json +from tests.containers.broadcast_container_base import BroadcastContainerBase +from tests.containers.opal_client_container import OpalClientContainer +from typing import List +from tests.containers.opal_server_container import OpalServerContainer +from tests.containers.settings.opal_client_settings import OpalClientSettings +from tests.containers.settings.opal_server_settings import OpalServerSettings +from tests.policy_repos.policy_repo_base import PolicyRepoBase +import time +from testcontainers.core.utils import setup_logger +import pytest + + + +logger = setup_logger("opal_fixtures") + +@pytest.fixture(scope="session", autouse=True) +def opal_network(): + """Creates a Docker network and yields it. + + The network is cleaned up after all tests have finished running. + """ + network = Network().create() + + yield network + + logger.info("Removing network...") + time.sleep(3) # wait for the containers to stop + try: + network.remove() + logger.info("Network removed.") + except Exception as e: + if logger.level == "DEBUG": + logger.error(f"Failed to remove network: {e}") + else: + logger.error(f"Failed to remove network got exception\n{e}") + +from tests.fixtures.broadcasters import ( + broadcast_channel, + kafka_broadcast_channel, + postgres_broadcast_channel, + redis_broadcast_channel, +) +from tests.fixtures.images import opal_server_image +from tests.fixtures.policy_repos import policy_repo + + +@pytest.fixture(scope="session") +def opal_servers( + opal_network: Network, + policy_repo, + opal_server_image: str, + broadcast_channel: BroadcastContainerBase, + # kafka_broadcast_channel: KafkaContainer, + # redis_broadcast_channel: RedisBroadcastContainer, + session_matrix, +): + """Fixture that initializes and manages OPAL server containers for testing. + + This fixture sets up a specified number of OPAL server containers, each + connected to the provided Docker network and using the specified broadcast + channel. The first server container sets up and creates a webhook for the + policy repository. All containers are started and their logs are monitored + for successful cloning of the policy repository. The containers are stopped + after the test session is complete. + + Args: + opal_network (Network): The Docker network to which the containers are connected. + broadcast_channel (BroadcastContainerBase): The broadcast channel container. + policy_repo (PolicyRepoBase): The policy repository to be used. + number_of_opal_servers (int): The number of OPAL server containers to start. + opal_server_image (str): The Docker image used for the OPAL servers. + topics (dict[str, int]): The topics for OPAL data configuration. + kafka_broadcast_channel (KafkaBroadcastContainer): The Kafka broadcast channel container. + redis_broadcast_channel (RedisBroadcastContainer): The Redis broadcast channel container. + session_matrix: The session matrix used for the test configuration. + + Yields: + List[OpalServerContainer]: A list of running OPAL server containers. + """ + + # broadcast_channel = redis_broadcast_channel + # broadcast_channel = kafka_broadcast_channel + broadcast_channel = broadcast_channel[0] + + if not broadcast_channel: + raise ValueError("Missing 'broadcast_channel' container.") + + containers = [] # List to store container instances + + + for i in range(session_matrix["number_of_opal_servers"]): + container_name = f"opal_server_{i+1}" + + repo_url = policy_repo.get_repo_url() + container = OpalServerContainer( + OpalServerSettings( + broadcast_uri=broadcast_channel.get_url(), + container_name=container_name, + container_index=i + 1, + uvicorn_workers="4", + policy_repo_url=repo_url, + image=opal_server_image, + log_level="DEBUG", + data_topics=" ".join(session_matrix["topics"].keys()), + polling_interval=3, + policy_repo_main_branch=policy_repo.test_branch, + POLICY_REPO_SSH_KEY=session_matrix["opal_policy_repo_ssh_key_private"], + ), + network=opal_network, + ) + + container.start() + container.get_wrapped_container().reload() + + if i == 0: + # Only the first server should setup the webhook + # policy_repo.setup_webhook( + # container.get_container_host_ip(), container.settings.port + # ) + # policy_repo.create_webhook() + pass + + logger.info( + f"Started container: {container_name}, ID: {container.get_wrapped_container().id}" + ) + container.wait_for_log("Clone succeeded", timeout=30) + containers.append(container) + + yield containers + + for container in containers: + container.stop() + +@pytest.fixture(scope="session") +def connected_clients(opal_clients: List[OpalClientContainer]): + """A fixture that waits for all OPAL clients to connect to the PubSub + server before yielding them. + + This fixture takes a list of OPAL clients as input and waits for each of them + to connect to the PubSub server before yielding them. The fixture is used to + ensure that all OPAL clients are connected and ready to receive messages + before the tests are executed. + + Parameters + ---------- + opal_clients : List[OpalClientContainer] + A list of OPAL client containers. + + Yields + ------ + List[OpalClientContainer] + A list of connected OPAL client containers. + """ + for client in opal_clients: + assert client.wait_for_log( + log_str="Connected to PubSub server", timeout=30 + ), f"Client {client.settings.container_name} did not connect to PubSub server." + yield opal_clients + + +from tests.fixtures.images import ( + cedar_image, + opa_image, + opal_client_image, + opal_client_with_opa_image, +) +from tests.fixtures.policy_stores import cedar_server, opa_server + + +@pytest.fixture(scope="session") +def opal_clients( + opal_network: Network, + opal_servers: List[OpalServerContainer], + # opa_server: OpaContainer, + # cedar_server: CedarContainer, + request, + session_matrix, + opal_client_with_opa_image, +): + """A fixture that starts and manages multiple OPAL client containers. + + This fixture takes a list of OPAL server containers as input and starts a + specified number of OPAL client containers, each connected to the first + OPAL server container. The fixture yields the list of started OPAL client + containers. + + Parameters + ---------- + opal_network : Network + The Docker network to which the containers are connected. + opal_servers : List[OpalServerContainer] + A list of OPAL server containers. + #opa_server : OpaContainer + # The OPA server container. + cedar_server : CedarContainer + The Cedar server container. + request + The pytest request object. + number_of_opal_clients : int + The number of OPAL clients to start. + opal_client_image + The Docker image used for the OPAL clients. + + Yields + ------ + List[OpalClientContainer] + A list of started OPAL client containers. + """ + if not opal_servers or len(opal_servers) == 0: + raise ValueError("Missing 'opal_server' container.") + + opal_server_url = f"http://{opal_servers[0].settings.container_name}:7002"#{opal_servers[0].settings.port}" + + containers = [] # List to store OpalClientContainer instances + + for i in range(session_matrix["number_of_opal_clients"]): + container_name = f"opal_client_{i+1}" # Unique name for each client + + client_token = opal_servers[0].obtain_OPAL_tokens(container_name)["client"] + callbacks = json.dumps( + { + "callbacks": [ + [ + f"{opal_server_url}/data/callback_report", + { + "method": "post", + "process_data": False, + "headers": { + "Authorization": f"Bearer {client_token}", + "content-type": "application/json", + }, + }, + ] + ] + } + ) + + container = OpalClientContainer( + OpalClientSettings( + image=opal_client_with_opa_image, + container_name=container_name, + container_index=i + 1, + opal_server_url=opal_server_url, + client_token=client_token, + default_update_callbacks=callbacks, + # inline_opa_enabled=False, + # policy_store_url=f"http://localhost:8181",#{opa_server.settings.port}", + # opa_port=opa_server.settings.port, + ), + network=opal_network, + ) + + container.start() + logger.info( + f"Started OpalClientContainer: {container_name}, ID: {container.get_wrapped_container().id}" + ) + containers.append(container) + + yield containers + + try: + for container in containers: + container.stop() + except Exception: + logger.error(f"Failed to stop containers: {container}") + pass + +@pytest.fixture(scope="session") +def topiced_clients(opal_network: Network, opal_servers: list[OpalServerContainer], session_matrix): + """Fixture that starts and manages multiple OPAL client containers, each + subscribing to a different topic. + + The fixture takes a dictionary of topics and the number of clients to + subscribe to each topic. It starts the specified number of OPAL client + containers, each connected to the first OPAL server container, and each + subscribing to the specified topic. The fixture yields the list of started + OPAL client containers, organized by topic. + + Parameters + ---------- + topics : dict + A dictionary mapping topic names to the number of OpalClientContainer + instances that should subscribe to each topic. + opal_network : Network + The Docker network to which the containers are connected. + opal_servers : list[OpalServerContainer] + A list of OPAL server containers. + + Yields + ------ + dict + A dictionary mapping topic names to a list of OpalClientContainer + instances that are subscribed to the topic. + """ + if not opal_servers or len(opal_servers) == 0: + raise ValueError("Missing 'opal_server' container.") + + opal_server_url = f"http://{opal_servers[0].settings.container_name}:7002"#{opal_servers[0].settings.port}" + containers = {} # List to store OpalClientContainer instances + + client_token = opal_servers[0].obtain_OPAL_tokens("topiced_opal_client_?x?")[ + "client" + ] + callbacks = json.dumps( + { + "callbacks": [ + [ + f"{opal_server_url}/data/callback_report", + { + "method": "post", + "process_data": False, + "headers": { + "Authorization": f"Bearer {client_token}", + "content-type": "application/json", + }, + }, + ] + ] + } + ) + + for topic, number_of_clients in session_matrix["topics"].items(): + for i in range(number_of_clients): + container_name = f"opal_client_{topic}_{i+1}" # Unique name for each client + + container = OpalClientContainer( + OpalClientSettings( + image="permitio/opal-client:latest", + container_name=container_name, + container_index=i + 1, + opal_server_url=opal_server_url, + client_token=client_token, + default_update_callbacks=callbacks, + topics=topic, + ), + network=opal_network, + ) + + container.start() + logger.info( + f"Started OpalClientContainer: {container_name}, ID: {container.get_wrapped_container().id} - on topic: {topic}" + ) + containers[topic] = containers.get(topic, []) + + assert container.wait_for_log( + log_str="Connected to PubSub server", timeout=30 + ), f"Client {client.settings.container_name} did not connect to PubSub server." + + containers[topic].append(container) + + yield containers + + for _, clients in containers.items(): + for client in clients: + client.stop() + diff --git a/tests/fixtures/policy_repos.py b/tests/fixtures/policy_repos.py index 5ba1eb7ca..3dd953cfd 100644 --- a/tests/fixtures/policy_repos.py +++ b/tests/fixtures/policy_repos.py @@ -162,7 +162,7 @@ def policy_repo_settings(temp_dir: str, session_matrix, opal_network, policy_rep yield (map[policy_repo_type](temp_dir, session_matrix), gitea_server_settings) @pytest.fixture(scope="session") -def policy_repo(temp_dir: str, session_matrix, opal_network, request) -> GithubPolicyRepo | GiteaPolicyRepo: +def policy_repo(temp_dir: str, session_matrix, opal_network, request): """Creates a policy repository for testing. This fixture creates a policy repository based on the configuration @@ -180,13 +180,6 @@ def policy_repo(temp_dir: str, session_matrix, opal_network, request) -> GithubP :return: The PolicyRepoBase object. """ - logger.info("Creating policy repo...") - logger.info("\n\nusing session matrix:") - for key in session_matrix: - logger.info(f"{key}: {session_matrix[key]}") - logger.info("\n\n") - - settings, server = next(policy_repo_settings(temp_dir, session_matrix, opal_network, session_matrix["repo_provider"])) policy_repo = PolicyRepoFactory( diff --git a/tests/utils.py b/tests/utils.py index aff7b25a7..db38bd4e4 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -5,9 +5,11 @@ import re import subprocess import sys +import threading import time import aiohttp +import debugpy import requests from cryptography.hazmat.primitives import serialization from cryptography.hazmat.primitives.asymmetric import rsa @@ -506,4 +508,51 @@ def global_exception_handler(exc_type, exc_value, exc_traceback): sys.excepthook = global_exception_handler def str2bool(val: str) -> bool: - return str(val).lower() in ("true", "1", "yes") \ No newline at end of file + return str(val).lower() in ("true", "1", "yes") + + +def pre_set(): + # wait some seconds for the debugger to attach + debugger_wait_time = 5 # seconds + + + def cancel_wait_for_client_after_timeout(): + try: + time.sleep(debugger_wait_time) + debugpy.wait_for_client.cancel() + except Exception as e: + logger.debug(f"Failed to cancel wait for client: {e}") + + + try: + if pytest_settings.wait_for_debugger: + t = threading.Thread(target=cancel_wait_for_client_after_timeout) + t.start() + logger.debug(f"Waiting for debugger to attach... {debugger_wait_time} seconds timeout") + debugpy.wait_for_client() + except Exception as e: + logger.debug(f"Failed to attach debugger: {e}") + + export_env("OPAL_TESTS_DEBUG", "true") + # utils.install_opal_server_and_client() + + + +def wait_sometime(): + """Pauses execution based on the environment. + + If the code is running inside GitHub Actions, it pauses execution + for 30 seconds. Otherwise, it waits for user input to continue. + + This can be used to control the flow of execution depending on the + environment in which the code is being executed. + """ + + if os.getenv("GITHUB_ACTIONS") == "true": + logger.info("Running inside GitHub Actions. Sleeping for 30 seconds...") + for secconds_ellapsed in range(30): + time.sleep(1) + print(f"Sleeping for \033[91m{29 - secconds_ellapsed}\033[0m seconds... \r",end="\r" if secconds_ellapsed < 29 else "\n") + else: + logger.info("Running on the local machine. Press Enter to continue...") + input() # Wait for key press From c180f44929731dd36e2e8f16947559c2b01eb149 Mon Sep 17 00:00:00 2001 From: ariWeinberg <66802642+ariWeinberg@users.noreply.github.com> Date: Sun, 30 Mar 2025 14:48:17 -0400 Subject: [PATCH 41/41] feat: clean up code by removing unused files and fixing formatting issues --- .github/workflows/pytest-tests.yml | 1 - a.txt | 10 -- app-tests/minrun.sh | 2 +- app-tests/sample_service/Dockerfile | 2 +- app-tests/sample_service/app.py | 53 ++++---- app-tests/sample_service/nginx.conf | 6 +- app-tests/sample_service/openapi.yaml | 4 +- app-tests/sample_service/policy.rego | 2 +- app-tests/sample_service/supervisord.conf | 2 +- docker/Dockerfile.client | 30 ++-- docker/docker-compose-local.yml | 2 +- packages/opal-client/opal_client/main.py | 5 +- packages/opal-server/opal_server/main.py | 7 +- tests/.env.example | 2 +- tests/README.md | 2 +- tests/conftest.py | 40 ++++-- tests/containers/OPA/opa_container.py | 2 +- tests/containers/broadcast_container_base.py | 4 +- .../kafka/kafka_broadcast_container.py | 6 +- tests/containers/kafka/kafka_ui_container.py | 1 - .../settings/kafka_broadcast_settings.py | 24 ++-- tests/containers/kafka/zookeeper_container.py | 4 +- tests/containers/opal_server_container.py | 7 +- tests/containers/permitContainer.py | 10 +- tests/containers/redis_broadcast_container.py | 5 +- tests/containers/redis_ui_container.py | 4 +- tests/containers/settings/gitea_settings.py | 8 +- .../settings/opal_client_settings.py | 1 - .../settings/opal_server_settings.py | 2 +- .../settings/postgres_broadcast_settings.py | 7 +- .../settings/redis_broadcast_settings.py | 13 +- tests/fixtures/broadcasters.py | 30 ++-- tests/fixtures/opal.py | 28 ++-- tests/fixtures/policy_repos.py | 128 ++++++++++-------- tests/fixtures/policy_stores.py | 13 +- tests/policies/rbac.rego | 2 +- tests/policy_repos/gitea_policy_repo.py | 3 +- .../gitea_policy_repo_settings.py | 55 +++++--- tests/policy_repos/github_policy_repo copy.py | 2 +- tests/policy_repos/github_policy_repo.py | 14 +- tests/policy_repos/github_policy_settings.py | 40 ++++-- tests/policy_repos/policy_repo_factory.py | 3 +- tests/settings.py | 37 ++--- tests/test_app.py | 13 +- tests/utils.py | 18 ++- 45 files changed, 376 insertions(+), 278 deletions(-) delete mode 100644 a.txt diff --git a/.github/workflows/pytest-tests.yml b/.github/workflows/pytest-tests.yml index e25a4c21f..892b39607 100644 --- a/.github/workflows/pytest-tests.yml +++ b/.github/workflows/pytest-tests.yml @@ -40,4 +40,3 @@ jobs: - name: Run tests working-directory: ./tests run: ./run.sh - diff --git a/a.txt b/a.txt deleted file mode 100644 index cb49702a9..000000000 --- a/a.txt +++ /dev/null @@ -1,10 +0,0 @@ -source_repo_owner = "", -local_clone_path = "", -source_repo_name = "", -webhook_secret = "", -ssh_key_path = "", -should_fork = "", -repo_name = "", -password = "", -owner = "", -pat = "", \ No newline at end of file diff --git a/app-tests/minrun.sh b/app-tests/minrun.sh index 1905be2c6..86222dc8b 100755 --- a/app-tests/minrun.sh +++ b/app-tests/minrun.sh @@ -64,7 +64,7 @@ else else echo "Error creating fork: $?" fi - + # Update OPAL_POLICY_REPO_URL to point to the forked repo OPAL_POLICY_REPO_URL="$FORKED_REPO_URL" echo "Updated OPAL_POLICY_REPO_URL to $OPAL_POLICY_REPO_URL" diff --git a/app-tests/sample_service/Dockerfile b/app-tests/sample_service/Dockerfile index f777de051..8590359f3 100644 --- a/app-tests/sample_service/Dockerfile +++ b/app-tests/sample_service/Dockerfile @@ -41,4 +41,4 @@ RUN mkdir -p /var/log/nginx && \ # Run both OpenResty and Flask COPY start.sh /start.sh RUN chmod +x /start.sh -CMD /start.sh \ No newline at end of file +CMD /start.sh diff --git a/app-tests/sample_service/app.py b/app-tests/sample_service/app.py index 8e345f348..4661047de 100644 --- a/app-tests/sample_service/app.py +++ b/app-tests/sample_service/app.py @@ -1,29 +1,34 @@ -from flask import Flask, request, jsonify -import requests import debugpy +import requests +from flask import Flask, jsonify, request app = Flask(__name__) debugpy.listen(("0.0.0.0", 5682)) # Optional, listen for debug requests on port 5678 # OPAL Authorization endpoint -OPAL_AUTH_URL = "http://opal_client:8181/v1/data/authorize" # Adjust with actual OPAL endpoint +OPAL_AUTH_URL = ( + "http://opal_client:8181/v1/data/authorize" # Adjust with actual OPAL endpoint +) -@app.route('/a') + +@app.route("/a") def a(): - return 'Endpoint A' + return "Endpoint A" + -@app.route('/b') +@app.route("/b") def b(): - return 'Endpoint B' + return "Endpoint B" -@app.route('/c') + +@app.route("/c") def c(): # Assuming the JWT token is passed in the Authorization header - auth_header = request.headers.get('Authorization') + auth_header = request.headers.get("Authorization") debugpy.wait_for_client() - + if not auth_header: return jsonify({"error": "Unauthorized, missing Authorization header"}), 401 @@ -46,13 +51,7 @@ def c(): return jsonify({"error": "Unauthorized, 'sub' field not found in token"}), 401 # Prepare the payload for the OPAL authorization request with the extracted user - payload = { - "input": { - "user": user, - "method": request.method, - "path": request.path - } - } + payload = {"input": {"user": user, "method": request.method, "path": request.path}} # Send the request to OPAL authorization endpoint try: @@ -62,13 +61,20 @@ def c(): if response.status_code == 200: opal_response = response.json() if opal_response.get("result") is True: - return 'Endpoint C - Authorized' # Authorized access + return "Endpoint C - Authorized" # Authorized access # If the result is not `true`, deny access - + # Assuming `response` is your variable containing the response object from OPAL - response_data = response.get_data(as_text=True) - return jsonify({"error": f"Forbidden, authorization denied! \n Response Body: {response_data}"}), 403 + response_data = response.get_data(as_text=True) + return ( + jsonify( + { + "error": f"Forbidden, authorization denied! \n Response Body: {response_data}" + } + ), + 403, + ) # OPAL responded but with a non-200 status, treat as denied return jsonify({"error": "Forbidden, OPAL authorization failed"}), 403 @@ -76,5 +82,6 @@ def c(): # Handle connection or other request errors return jsonify({"error": f"Error contacting OPAL client: {str(e)}"}), 500 -if __name__ == '__main__': - app.run() \ No newline at end of file + +if __name__ == "__main__": + app.run() diff --git a/app-tests/sample_service/nginx.conf b/app-tests/sample_service/nginx.conf index 8662f7fe6..9ec84a039 100644 --- a/app-tests/sample_service/nginx.conf +++ b/app-tests/sample_service/nginx.conf @@ -17,13 +17,13 @@ http { access_log /var/log/nginx/proxy_access.log; # Directly proxy to Flask without authorization - proxy_pass http://127.0.0.1:5000; + proxy_pass http://127.0.0.1:5000; } # This will be enforced in the endpoint location /c { access_log /var/log/nginx/proxy_access.log; - + proxy_pass http://127.0.0.1:5000; } @@ -106,4 +106,4 @@ http { return 401 "Unauthorized"; } } -} \ No newline at end of file +} diff --git a/app-tests/sample_service/openapi.yaml b/app-tests/sample_service/openapi.yaml index f45907a6a..7ce68d870 100644 --- a/app-tests/sample_service/openapi.yaml +++ b/app-tests/sample_service/openapi.yaml @@ -37,7 +37,7 @@ paths: get: summary: Endpoint C with Authorization description: | - This endpoint requires authorization. The client must provide a JWT token in the Authorization header. + This endpoint requires authorization. The client must provide a JWT token in the Authorization header. The endpoint checks with an OPAL server to authorize the user based on the token. security: - bearerAuth: [] @@ -85,4 +85,4 @@ components: bearerAuth: type: http scheme: bearer - bearerFormat: JWT # Indicates the use of JWT for bearer token \ No newline at end of file + bearerFormat: JWT # Indicates the use of JWT for bearer token diff --git a/app-tests/sample_service/policy.rego b/app-tests/sample_service/policy.rego index fc156dcbd..b894c7425 100644 --- a/app-tests/sample_service/policy.rego +++ b/app-tests/sample_service/policy.rego @@ -34,4 +34,4 @@ allow = true { input.path = ["c"] input.method = "GET" user_role == "writer" or user_role == "reader" -} \ No newline at end of file +} diff --git a/app-tests/sample_service/supervisord.conf b/app-tests/sample_service/supervisord.conf index 62a9ffba4..eafb00dc6 100644 --- a/app-tests/sample_service/supervisord.conf +++ b/app-tests/sample_service/supervisord.conf @@ -5,4 +5,4 @@ nodaemon=true command=nginx -g "daemon off;" [program:flask] -command=flask run --host=0.0.0.0 --port=5000 \ No newline at end of file +command=flask run --host=0.0.0.0 --port=5000 diff --git a/docker/Dockerfile.client b/docker/Dockerfile.client index 81c1cd192..54deb45b0 100644 --- a/docker/Dockerfile.client +++ b/docker/Dockerfile.client @@ -61,10 +61,10 @@ CMD ["./start.sh"] # uvicorn config ------------------------------------ # install the opal-client package RUN cd ./packages/opal-client && python setup.py install - + # WARNING: do not change the number of workers on the opal client! # only one worker is currently supported for the client. - + # number of uvicorn workers ENV UVICORN_NUM_WORKERS=1 # uvicorn asgi app @@ -73,23 +73,23 @@ CMD ["./start.sh"] ENV UVICORN_PORT=7000 # disable inline OPA ENV OPAL_INLINE_OPA_ENABLED=false - + # expose opal client port EXPOSE 7000 USER opal - + RUN mkdir -p /opal/backup VOLUME /opal/backup - - + + # IMAGE to extract OPA from official image ---------- # --------------------------------------------------- FROM alpine:latest AS opa-extractor USER root - + RUN apk update && apk add skopeo tar WORKDIR /opal - + # copy opa from official docker image ARG opa_image=openpolicyagent/opa ARG opa_tag=latest-static @@ -97,22 +97,22 @@ CMD ["./start.sh"] mkdir image && tar xf image.tar -C ./image && cat image/*.tar | tar xf - -C ./image -i && \ find image/ -name "opa*" -type f -executable -print0 | xargs -0 -I "{}" cp {} ./opa && chmod 755 ./opa && \ rm -r image image.tar - - + + # OPA CLIENT IMAGE ---------------------------------- # Using standalone image as base -------------------- # --------------------------------------------------- FROM client-standalone AS client - + # Temporarily move back to root for additional setup USER root - + # copy opa from opa-extractor COPY --from=opa-extractor /opal/opa ./opa - + # enable inline OPA ENV OPAL_INLINE_OPA_ENABLED=true # expose opa port EXPOSE 8181 - - USER opal \ No newline at end of file + + USER opal diff --git a/docker/docker-compose-local.yml b/docker/docker-compose-local.yml index 086bb7aee..e28d10d3a 100644 --- a/docker/docker-compose-local.yml +++ b/docker/docker-compose-local.yml @@ -102,4 +102,4 @@ services: volumes: opa_backup: - gitea_data: # Data volume for Gitea \ No newline at end of file + gitea_data: # Data volume for Gitea diff --git a/packages/opal-client/opal_client/main.py b/packages/opal-client/opal_client/main.py index 635ddccc4..a04d7a7b9 100644 --- a/packages/opal-client/opal_client/main.py +++ b/packages/opal-client/opal_client/main.py @@ -3,9 +3,10 @@ client = OpalClient() import debugpy -#debugpy.listen(("0.0.0.0", 5678)) + +# debugpy.listen(("0.0.0.0", 5678)) print("Waiting for debugger attach...") -#debugpy.wait_for_client() # Optional, wait for debugger to attach before continuing +# debugpy.wait_for_client() # Optional, wait for debugger to attach before continuing # expose app for Uvicorn app = client.app diff --git a/packages/opal-server/opal_server/main.py b/packages/opal-server/opal_server/main.py index 9a56377d7..28f6445bc 100644 --- a/packages/opal-server/opal_server/main.py +++ b/packages/opal-server/opal_server/main.py @@ -1,8 +1,9 @@ - import debugpy -#debugpy.listen(("0.0.0.0", 5678)) + +# debugpy.listen(("0.0.0.0", 5678)) print("Waiting for debugger attach...") -#debugpy.wait_for_client() # Optional, wait for debugger to attach before continuing +# debugpy.wait_for_client() # Optional, wait for debugger to attach before continuing + def create_app(*args, **kwargs): from opal_server.server import OpalServer diff --git a/tests/.env.example b/tests/.env.example index dc2d48795..1112ae7b4 100644 --- a/tests/.env.example +++ b/tests/.env.example @@ -18,4 +18,4 @@ OPAL_PYTEST_POLICY_REPO_SSH_PRIVATE_KEY= OPAL_PYTEST_POLICY_REPO_SSH_PUBLIC_KEY= OPAL_AUTH_PRIVATE_KEY= -OPAL_AUTH_PUBLIC_KEY= \ No newline at end of file +OPAL_AUTH_PUBLIC_KEY= diff --git a/tests/README.md b/tests/README.md index 815917d65..054614225 100644 --- a/tests/README.md +++ b/tests/README.md @@ -79,4 +79,4 @@ Refer to the [OPAL API Documentation](https://opal-v2.permit.io/redoc#tag/Bundle --- -Let me know if you'd like to include specific code examples or any other details! \ No newline at end of file +Let me know if you'd like to include specific code examples or any other details! diff --git a/tests/conftest.py b/tests/conftest.py index 5557bfa70..1d89fc4f1 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,21 +1,40 @@ import os -import time import shutil -import pytest import tempfile -from tests import utils +import time + +import pytest from testcontainers.core.utils import setup_logger -from tests.fixtures.opal import opal_clients, opal_servers, opal_network, connected_clients, topiced_clients -from tests.fixtures.policy_repos import policy_repo -from tests.fixtures.images import opa_image, cedar_image, opal_client_image, opal_client_with_opa_image, opal_server_image -from tests.fixtures.broadcasters import broadcast_channel, postgres_broadcast_channel, redis_broadcast_channel, kafka_broadcast_channel +from tests import utils +from tests.fixtures.broadcasters import ( + broadcast_channel, + kafka_broadcast_channel, + postgres_broadcast_channel, + redis_broadcast_channel, +) +from tests.fixtures.images import ( + cedar_image, + opa_image, + opal_client_image, + opal_client_with_opa_image, + opal_server_image, +) +from tests.fixtures.opal import ( + connected_clients, + opal_clients, + opal_network, + opal_servers, + topiced_clients, +) +from tests.fixtures.policy_repos import policy_repo logger = setup_logger("conftest") utils.pre_set() + @pytest.fixture(scope="session") def temp_dir(): # Setup: Create a temporary directory @@ -34,8 +53,10 @@ def temp_dir(): path.mkdir(parents=True, exist_ok=True) os.chmod(path, 0o777) # Set permissions to allow read/write/execute for all users - dir_path = tempfile.mkdtemp(prefix="opal_tests_",suffix=".tmp",dir=str(path)) - os.chmod(dir_path, 0o777) # Set permissions to allow read/write/execute for all users + dir_path = tempfile.mkdtemp(prefix="opal_tests_", suffix=".tmp", dir=str(path)) + os.chmod( + dir_path, 0o777 + ) # Set permissions to allow read/write/execute for all users logger.debug(f"Temporary directory created: {dir_path}") yield dir_path @@ -67,7 +88,6 @@ def setup(opal_clients, policy_repo, session_matrix): ------ None """ - logger.info("Initializing test session...") logger.debug("\n\nusing session matrix:") diff --git a/tests/containers/OPA/opa_container.py b/tests/containers/OPA/opa_container.py index e091e7451..3c8d1bc51 100644 --- a/tests/containers/OPA/opa_container.py +++ b/tests/containers/OPA/opa_container.py @@ -2,9 +2,9 @@ from testcontainers.core.network import Network from testcontainers.core.utils import setup_logger +from tests.containers.OPA.OPA_settings import OpaSettings from tests.containers.permitContainer import PermitContainer -from tests.containers.OPA.OPA_settings import OpaSettings class OpaContainer(PermitContainer, DockerContainer): def __init__( diff --git a/tests/containers/broadcast_container_base.py b/tests/containers/broadcast_container_base.py index 0ad398077..e212699fa 100644 --- a/tests/containers/broadcast_container_base.py +++ b/tests/containers/broadcast_container_base.py @@ -6,8 +6,6 @@ def __init__(self): PermitContainer.__init__(self) def get_url(self) -> str: - - url = "" match self.settings.protocol: @@ -45,7 +43,7 @@ def get_url(self) -> str: ) print(url) return url - + case _: raise ValueError(f"Unknown broadcast container type: {self.settings}") print(url) diff --git a/tests/containers/kafka/kafka_broadcast_container.py b/tests/containers/kafka/kafka_broadcast_container.py index 955f98d6f..766b13ecc 100644 --- a/tests/containers/kafka/kafka_broadcast_container.py +++ b/tests/containers/kafka/kafka_broadcast_container.py @@ -1,9 +1,10 @@ from testcontainers.core.container import DockerContainer from testcontainers.core.network import Network + class KafkaContainer(DockerContainer): def __init__(self, network: Network): - # Kafka Broker 0 + # Kafka Broker 0 self.settings = { "env": { "KAFKA_BROKER_ID": "1", @@ -21,14 +22,13 @@ def __init__(self, network: Network): self.with_exposed_ports(9092, 29092) for key, value in self.settings.get("env").items(): self.with_env(key, value) - + self.with_name("kafka0") self.with_network(network) self.with_network_aliases("kafka0") self.start() - def get_url(self) -> str: url = "kafka://kafka0:9092" return url diff --git a/tests/containers/kafka/kafka_ui_container.py b/tests/containers/kafka/kafka_ui_container.py index 142981651..f2fe7e582 100644 --- a/tests/containers/kafka/kafka_ui_container.py +++ b/tests/containers/kafka/kafka_ui_container.py @@ -18,4 +18,3 @@ def __init__(self, network: Network): self.with_network_aliases("kafka-ui") self.start() - \ No newline at end of file diff --git a/tests/containers/kafka/settings/kafka_broadcast_settings.py b/tests/containers/kafka/settings/kafka_broadcast_settings.py index 4620783d4..e8b682208 100644 --- a/tests/containers/kafka/settings/kafka_broadcast_settings.py +++ b/tests/containers/kafka/settings/kafka_broadcast_settings.py @@ -1,16 +1,16 @@ import os + from testcontainers.core.utils import setup_logger + class KafkaBroadcastSettings: def __init__(self, kafka_port: int | None = None): - self.logger = setup_logger("KafkaBroadcastSettings") self.load_from_env() - self.kafka_port = kafka_port if kafka_port else self.kafka_port - + self.protocol = "kafka" self.validate_dependencies() @@ -19,29 +19,25 @@ def validate_dependencies(self): """Validate required dependencies before starting the server.""" if not self.kafka_port: raise ValueError("POSTGRES_PORT is required.") - self.logger.info(f"{self.kafka_container_name} | Dependencies validated successfully.") - - + self.logger.info( + f"{self.kafka_container_name} | Dependencies validated successfully." + ) def getKafkaUiEnvVars(self): - return { - "KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS": "kafka:9092" - } + return {"KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS": "kafka:9092"} def getZookiperEnvVars(self): return { "ZOOKEEPER_CLIENT_PORT": self.zookeeper_port, "ZOOKEEPER_TICK_TIME": self.zookeeper_tick_time, - "ALLOW_ANONYMOUS_LOGIN": self.zookeeper_allow_anonymous_login + "ALLOW_ANONYMOUS_LOGIN": self.zookeeper_allow_anonymous_login, } def getKafkaEnvVars(self): - return { - } + return {} def load_from_env(self): - """ - Load Kafka settings from environment variables. + """Load Kafka settings from environment variables. The following environment variables are supported: - KAFKA_IMAGE_NAME diff --git a/tests/containers/kafka/zookeeper_container.py b/tests/containers/kafka/zookeeper_container.py index 17b179c0c..f9c36f355 100644 --- a/tests/containers/kafka/zookeeper_container.py +++ b/tests/containers/kafka/zookeeper_container.py @@ -1,9 +1,9 @@ from testcontainers.core.container import DockerContainer from testcontainers.core.network import Network + class ZookeeperContainer(DockerContainer): def __init__(self, network: Network): - self.settings = { "env": { "ZOOKEEPER_CLIENT_PORT": "2181", @@ -14,7 +14,7 @@ def __init__(self, network: Network): DockerContainer.__init__(self, image="confluentinc/cp-zookeeper:6.2.0") self.with_exposed_ports(2181) - + self.with_name("zookeeper") for key, value in self.settings.get("env").items(): self.with_env(key, value) diff --git a/tests/containers/opal_server_container.py b/tests/containers/opal_server_container.py index 292b647e8..6fc2f2f9f 100644 --- a/tests/containers/opal_server_container.py +++ b/tests/containers/opal_server_container.py @@ -32,7 +32,6 @@ def configure(self): for key, value in self.settings.getEnvVars().items(): self.with_env(key, value) - # Configure network and other settings self.with_name(self.settings.container_name).with_bind_ports( 7002, self.settings.port @@ -54,7 +53,7 @@ def reload_with_settings(self, settings: OpalServerSettings | None = None): self.start() - def obtain_OPAL_tokens(self, caller: str = "Unkonwn caller") -> dict: + def obtain_OPAL_tokens(self, caller: str = "Unknown caller"): """Fetch client and datasource tokens from the OPAL server.""" token_url = f"http://localhost:{self.settings.port}/token" headers = { @@ -78,7 +77,9 @@ def obtain_OPAL_tokens(self, caller: str = "Unkonwn caller") -> dict: token = response.json().get("token") if token: tokens[token_type] = token - self.logger.info(f"{caller} | Successfully fetched OPAL {token_type} token.") + self.logger.info( + f"{caller} | Successfully fetched OPAL {token_type} token." + ) else: self.logger.error( f"{caller} | Failed to fetch OPAL {token_type} token: {response.json()}" diff --git a/tests/containers/permitContainer.py b/tests/containers/permitContainer.py index f5ec0956f..ae0e1e011 100644 --- a/tests/containers/permitContainer.py +++ b/tests/containers/permitContainer.py @@ -10,7 +10,9 @@ def __init__(self): self.permitLogger = setup_logger(__name__) # Regex to match any ANSI-escaped timestamp in the format YYYY-MM-DDTHH:MM:SS.mmmmmm+0000 - self.timestamp_with_ansi = (r"\x1b\[.*?(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{6}\+\d{4})") + self.timestamp_with_ansi = ( + r"\x1b\[.*?(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{6}\+\d{4})" + ) self.errors = [] # self.check_errors() @@ -38,14 +40,16 @@ def wait_for_log( # Check if the timeout has been exceeded elapsed_time = time.time() - start_time if elapsed_time > timeout: - self.permitLogger.warning(f"{self.settings.container_name} | Timeout reached while waiting for the log. | {log_str}") + self.permitLogger.warning( + f"{self.settings.container_name} | Timeout reached while waiting for the log. | {log_str}" + ) break decoded_line = line.decode("utf-8").strip() # Search for the timestamp in the line match = re.search(self.timestamp_with_ansi, decoded_line) - if (reference_timestamp is None): + if reference_timestamp is None: if log_str in decoded_line: log_found = True break diff --git a/tests/containers/redis_broadcast_container.py b/tests/containers/redis_broadcast_container.py index 591177761..33417c1d7 100644 --- a/tests/containers/redis_broadcast_container.py +++ b/tests/containers/redis_broadcast_container.py @@ -2,8 +2,8 @@ from testcontainers.redis import RedisContainer from tests.containers.broadcast_container_base import BroadcastContainerBase -from tests.containers.settings.redis_broadcast_settings import RedisBroadcastSettings from tests.containers.permitContainer import PermitContainer +from tests.containers.settings.redis_broadcast_settings import RedisBroadcastSettings class RedisBroadcastContainer(PermitContainer, RedisContainer): @@ -14,7 +14,6 @@ def __init__( docker_client_kw: dict | None = None, **kwargs, ) -> None: - self.settings = redisContainerSettings self.network = network @@ -29,6 +28,6 @@ def __init__( self.with_name(self.settings.container_name) self.start() - + def get_url(self) -> str: return f"redis://{self.settings.container_name}:{self.settings.port}" diff --git a/tests/containers/redis_ui_container.py b/tests/containers/redis_ui_container.py index 0bc8cfed8..37c46f6ea 100644 --- a/tests/containers/redis_ui_container.py +++ b/tests/containers/redis_ui_container.py @@ -6,7 +6,6 @@ from tests.containers.permitContainer import PermitContainer - class RedisUIContainer(PermitContainer, DockerContainer): def __init__( self, @@ -15,7 +14,6 @@ def __init__( docker_client_kw: dict | None = None, **kwargs, ) -> None: - self.redis_container = redis_container self.network = network self.container_name = f"{self.redis_container.settings.container_name}-ui" @@ -33,4 +31,4 @@ def __init__( self.with_network_aliases("redis_ui") - self.start() \ No newline at end of file + self.start() diff --git a/tests/containers/settings/gitea_settings.py b/tests/containers/settings/gitea_settings.py index 8a61f69db..4fe001ff3 100644 --- a/tests/containers/settings/gitea_settings.py +++ b/tests/containers/settings/gitea_settings.py @@ -74,7 +74,6 @@ def __init__( self.validate_dependencies() self.gitea_internal_base_url = f"http://{self.container_name}:{self.port_http}" - def validate_dependencies(self): """Validate required parameters.""" @@ -90,9 +89,10 @@ def validate_dependencies(self): raise ValueError( "Missing required parameters for Gitea container initialization." ) - - self.logger.info(f"{self.container_name} | Dependencies validated successfully.") - + + self.logger.info( + f"{self.container_name} | Dependencies validated successfully." + ) def getEnvVars(self): return { diff --git a/tests/containers/settings/opal_client_settings.py b/tests/containers/settings/opal_client_settings.py index e5fd9dd27..30808a7b5 100644 --- a/tests/containers/settings/opal_client_settings.py +++ b/tests/containers/settings/opal_client_settings.py @@ -243,7 +243,6 @@ def getEnvVars(self): else: env_vars["OPAL_INLINE_OPA_ENABLED"] = self.inline_opa_enabled - if self.iniline_cedar_enabled: env_vars["OPAL_INILINE_CEDAR_ENABLED"] = self.iniline_cedar_enabled env_vars["OPAL_INILINE_CEDAR_EXEC_PATH"] = self.inline_cedar_exec_path diff --git a/tests/containers/settings/opal_server_settings.py b/tests/containers/settings/opal_server_settings.py index 065ca8998..66bfbf4e0 100644 --- a/tests/containers/settings/opal_server_settings.py +++ b/tests/containers/settings/opal_server_settings.py @@ -193,7 +193,7 @@ def getEnvVars(self): "UVICORN_PORT": self.uvicorn_port, "OPAL_ALL_DATA_URL": self.all_data_url, "OPAL_POLICY_REPO_REUSE_CLONE_PATH": self.policy_repo_reuse_clone_path, - "OPAL_POLICY_REPO_SSH_KEY": self.POLICY_REPO_SSH_KEY + "OPAL_POLICY_REPO_SSH_KEY": self.POLICY_REPO_SSH_KEY, } if pytest_settings.use_webhook: diff --git a/tests/containers/settings/postgres_broadcast_settings.py b/tests/containers/settings/postgres_broadcast_settings.py index 09f3ba4d9..c26e550c0 100644 --- a/tests/containers/settings/postgres_broadcast_settings.py +++ b/tests/containers/settings/postgres_broadcast_settings.py @@ -1,4 +1,5 @@ import os + from testcontainers.core.utils import setup_logger @@ -12,7 +13,6 @@ def __init__( password: str | None = None, database: str | None = None, ): - self.logger = setup_logger("PostgresBroadcastSettings") self.load_from_env() @@ -39,9 +39,10 @@ def validate_dependencies(self): raise ValueError("POSTGRES_PASSWORD is required.") if not self.database: raise ValueError("POSTGRES_DATABASE is required.") - - self.logger.info(f"{self.container_name} | Dependencies validated successfully.") + self.logger.info( + f"{self.container_name} | Dependencies validated successfully." + ) def getEnvVars(self): return { diff --git a/tests/containers/settings/redis_broadcast_settings.py b/tests/containers/settings/redis_broadcast_settings.py index 6168efa98..69062fddb 100644 --- a/tests/containers/settings/redis_broadcast_settings.py +++ b/tests/containers/settings/redis_broadcast_settings.py @@ -1,4 +1,5 @@ import os + from testcontainers.core.utils import setup_logger @@ -9,7 +10,6 @@ def __init__( port: int | None = None, password: str | None = None, ): - self.logger = setup_logger("PostgresBroadcastSettings") self.load_from_env() @@ -27,13 +27,14 @@ def validate_dependencies(self): raise ValueError("POSTGRES_PORT is required.") if not self.password: raise ValueError("POSTGRES_PASSWORD is required.") - - self.logger.info(f"{self.container_name} | Dependencies validated successfully.") - + self.logger.info( + f"{self.container_name} | Dependencies validated successfully." + ) def load_from_env(self): - self.container_name = os.getenv("REDIS_CONTAINER_NAME", "redis_broadcast_channel") + self.container_name = os.getenv( + "REDIS_CONTAINER_NAME", "redis_broadcast_channel" + ) self.port = int(os.getenv("REDIS_PORT", 6379)) self.password = os.getenv("REDIS_PASSWORD", "redis") - diff --git a/tests/fixtures/broadcasters.py b/tests/fixtures/broadcasters.py index f1cddeb19..277bc4005 100644 --- a/tests/fixtures/broadcasters.py +++ b/tests/fixtures/broadcasters.py @@ -4,16 +4,17 @@ from tests.containers.kafka.kafka_broadcast_container import KafkaContainer from tests.containers.kafka.kafka_ui_container import KafkaUIContainer +from tests.containers.kafka.settings.kafka_broadcast_settings import ( + KafkaBroadcastSettings, +) +from tests.containers.kafka.zookeeper_container import ZookeeperContainer from tests.containers.postgres_broadcast_container import PostgresBroadcastContainer from tests.containers.redis_broadcast_container import RedisBroadcastContainer from tests.containers.redis_ui_container import RedisUIContainer -from tests.containers.kafka.settings.kafka_broadcast_settings import KafkaBroadcastSettings from tests.containers.settings.postgres_broadcast_settings import ( PostgresBroadcastSettings, ) from tests.containers.settings.redis_broadcast_settings import RedisBroadcastSettings -from tests.containers.kafka.zookeeper_container import ZookeeperContainer - from tests.settings import session_matrix logger = setup_logger(__name__) @@ -30,8 +31,7 @@ def postgres_broadcast_channel(opal_network: Network): try: try: container = PostgresBroadcastContainer( - network=opal_network, - settings=PostgresBroadcastSettings() + network=opal_network, settings=PostgresBroadcastSettings() ) yield [container] except Exception as e: @@ -39,12 +39,17 @@ def postgres_broadcast_channel(opal_network: Network): f"Failed to start container: {container} with error: {e} {e.__traceback__}" ) exit(1) - + try: - if container.get_wrapped_container().status == "running" or container.get_wrapped_container().status == "created": + if ( + container.get_wrapped_container().status == "running" + or container.get_wrapped_container().status == "created" + ): container.stop() else: - logger.info(f"Container status is: {container.get_wrapped_container().status}") + logger.info( + f"Container status is: {container.get_wrapped_container().status}" + ) container.stop() except Exception: logger.error(f"Failed to stop containers: {container}") @@ -91,7 +96,9 @@ def redis_broadcast_channel(opal_network: Network): container. The yield value is a list of the two containers. The fixture stops the containers after the test is done. """ - redis_container = RedisBroadcastContainer(opal_network, redisContainerSettings=RedisBroadcastSettings()) + redis_container = RedisBroadcastContainer( + opal_network, redisContainerSettings=RedisBroadcastSettings() + ) redis_ui_container = RedisUIContainer(opal_network, redis_container) containers = [redis_container, redis_ui_container] yield containers @@ -107,9 +114,10 @@ def redis_broadcast_channel(opal_network: Network): broadcasters_map = { "postgres": postgres_broadcast_channel, "kafka": kafka_broadcast_channel, - "redis": redis_broadcast_channel + "redis": redis_broadcast_channel, } + @pytest.fixture(scope="session") def broadcast_channel(opal_network: Network, session_matrix): """Fixture that yields a running broadcast channel container. @@ -122,4 +130,4 @@ def broadcast_channel(opal_network: Network, session_matrix): # logger.info(f"Using {session_matrix["broadcaster"]} broadcaster") # input() - yield from broadcasters_map[session_matrix["broadcaster"]](opal_network) \ No newline at end of file + yield from broadcasters_map[session_matrix["broadcaster"]](opal_network) diff --git a/tests/fixtures/opal.py b/tests/fixtures/opal.py index d433c7e3a..67a0242e5 100644 --- a/tests/fixtures/opal.py +++ b/tests/fixtures/opal.py @@ -1,20 +1,21 @@ -from testcontainers.core.network import Network import json +import time +from typing import List + +import pytest +from testcontainers.core.network import Network +from testcontainers.core.utils import setup_logger + from tests.containers.broadcast_container_base import BroadcastContainerBase from tests.containers.opal_client_container import OpalClientContainer -from typing import List from tests.containers.opal_server_container import OpalServerContainer from tests.containers.settings.opal_client_settings import OpalClientSettings from tests.containers.settings.opal_server_settings import OpalServerSettings from tests.policy_repos.policy_repo_base import PolicyRepoBase -import time -from testcontainers.core.utils import setup_logger -import pytest - - logger = setup_logger("opal_fixtures") + @pytest.fixture(scope="session", autouse=True) def opal_network(): """Creates a Docker network and yields it. @@ -36,6 +37,7 @@ def opal_network(): else: logger.error(f"Failed to remove network got exception\n{e}") + from tests.fixtures.broadcasters import ( broadcast_channel, kafka_broadcast_channel, @@ -89,7 +91,6 @@ def opal_servers( containers = [] # List to store container instances - for i in range(session_matrix["number_of_opal_servers"]): container_name = f"opal_server_{i+1}" @@ -133,6 +134,7 @@ def opal_servers( for container in containers: container.stop() + @pytest.fixture(scope="session") def connected_clients(opal_clients: List[OpalClientContainer]): """A fixture that waits for all OPAL clients to connect to the PubSub @@ -211,7 +213,7 @@ def opal_clients( if not opal_servers or len(opal_servers) == 0: raise ValueError("Missing 'opal_server' container.") - opal_server_url = f"http://{opal_servers[0].settings.container_name}:7002"#{opal_servers[0].settings.port}" + opal_server_url = f"http://{opal_servers[0].settings.container_name}:7002" # {opal_servers[0].settings.port}" containers = [] # List to store OpalClientContainer instances @@ -267,8 +269,11 @@ def opal_clients( logger.error(f"Failed to stop containers: {container}") pass + @pytest.fixture(scope="session") -def topiced_clients(opal_network: Network, opal_servers: list[OpalServerContainer], session_matrix): +def topiced_clients( + opal_network: Network, opal_servers: list[OpalServerContainer], session_matrix +): """Fixture that starts and manages multiple OPAL client containers, each subscribing to a different topic. @@ -297,7 +302,7 @@ def topiced_clients(opal_network: Network, opal_servers: list[OpalServerContaine if not opal_servers or len(opal_servers) == 0: raise ValueError("Missing 'opal_server' container.") - opal_server_url = f"http://{opal_servers[0].settings.container_name}:7002"#{opal_servers[0].settings.port}" + opal_server_url = f"http://{opal_servers[0].settings.container_name}:7002" # {opal_servers[0].settings.port}" containers = {} # List to store OpalClientContainer instances client_token = opal_servers[0].obtain_OPAL_tokens("topiced_opal_client_?x?")[ @@ -355,4 +360,3 @@ def topiced_clients(opal_network: Network, opal_servers: list[OpalServerContaine for _, clients in containers.items(): for client in clients: client.stop() - diff --git a/tests/fixtures/policy_repos.py b/tests/fixtures/policy_repos.py index 3dd953cfd..490c456c8 100644 --- a/tests/fixtures/policy_repos.py +++ b/tests/fixtures/policy_repos.py @@ -57,86 +57,92 @@ def gitea_server(opal_network: Network): """ gitea_container = GiteaContainer(settings=gitea_settings(), network=opal_network) - + gitea_container.deploy_gitea() gitea_container.init_repo() - + yield gitea_container + def create_github_policy_repo_settings(temp_dir: str, session_matrix): # logger.info("Creating GithubPolicyRepoSettings...") # logger.info("\n\n\n\n") # logger.info(session_matrix) # logger.info("\n\n\n\n") - - # input() + # input() return GithubPolicyRepoSettings( - temp_dir = temp_dir, - local_clone_path = temp_dir, - - source_repo_owner = session_matrix["source_repo_owner"], - source_repo_name = session_matrix["source_repo_name"], - repo_name = session_matrix["repo_name"], - owner = session_matrix["repo_owner"], - - ssh_key_path = session_matrix["ssh_key_path"], - - pat = session_matrix["github_pat"], - - webhook_secret = session_matrix["webhook_secret"], - should_fork = False, + temp_dir=temp_dir, + local_clone_path=temp_dir, + source_repo_owner=session_matrix["source_repo_owner"], + source_repo_name=session_matrix["source_repo_name"], + repo_name=session_matrix["repo_name"], + owner=session_matrix["repo_owner"], + ssh_key_path=session_matrix["ssh_key_path"], + pat=session_matrix["github_pat"], + webhook_secret=session_matrix["webhook_secret"], + should_fork=False, # should_fork = session_matrix["should_fork"], - password = session_matrix["repo_password"], - - opal_policy_repo_ssh_key_public = session_matrix["opal_policy_repo_ssh_key_public"], - opal_policy_repo_ssh_key_private = session_matrix["opal_policy_repo_ssh_key_private"], + password=session_matrix["repo_password"], + opal_policy_repo_ssh_key_public=session_matrix[ + "opal_policy_repo_ssh_key_public" + ], + opal_policy_repo_ssh_key_private=session_matrix[ + "opal_policy_repo_ssh_key_private" + ], ) -def create_gitea_policy_repo_settings(temp_dir: str, session_matrix, gitea_settings: GiteaSettings): - """ - Creates a Gitea policy repository settings object. +def create_gitea_policy_repo_settings( + temp_dir: str, session_matrix, gitea_settings: GiteaSettings +): + """Creates a Gitea policy repository settings object. - This method creates a Gitea policy repository settings object based on the - given parameters. The object is initialized with the default values for - the local clone path, owner, repository name, branch name, container name, - repository host, repository ports, password, SSH key path, source - repository owner and name, whether to fork the repository, whether to - create the repository, and webhook secret. + This method creates a Gitea policy repository settings object based + on the given parameters. The object is initialized with the default + values for the local clone path, owner, repository name, branch + name, container name, repository host, repository ports, password, + SSH key path, source repository owner and name, whether to fork the + repository, whether to create the repository, and webhook secret. - :param temp_dir: The temporary directory to use for the policy repository. - :param session_matrix: The session matrix to use for the policy repository. + :param temp_dir: The temporary directory to use for the policy + repository. + :param session_matrix: The session matrix to use for the policy + repository. :param gitea_settings: The settings for the Gitea container. :return: The GiteaPolicyRepoSettings object. """ - + return GiteaPolicyRepoSettings( - local_clone_path = temp_dir, - owner = gitea_settings.username, - repo_name = gitea_settings.repo_name, - branch_name = "master", - container_name = gitea_settings.container_name, - repo_host = "localhost", - repo_port_http = gitea_settings.port_http, - repo_port_ssh = gitea_settings.port_ssh, - password = gitea_settings.password, - pat = None, - ssh_key_path = pytest_settings.ssh_key_path, - source_repo_owner = gitea_settings.username, - source_repo_name = gitea_settings.repo_name, - should_fork = False, - should_create_repo = True, - webhook_secret = pytest_settings.webhook_secret + local_clone_path=temp_dir, + owner=gitea_settings.username, + repo_name=gitea_settings.repo_name, + branch_name="master", + container_name=gitea_settings.container_name, + repo_host="localhost", + repo_port_http=gitea_settings.port_http, + repo_port_ssh=gitea_settings.port_ssh, + password=gitea_settings.password, + pat=None, + ssh_key_path=pytest_settings.ssh_key_path, + source_repo_owner=gitea_settings.username, + source_repo_name=gitea_settings.repo_name, + should_fork=False, + should_create_repo=True, + webhook_secret=pytest_settings.webhook_secret, ) # @pytest.fixture(scope="session") -def policy_repo_settings(temp_dir: str, session_matrix, opal_network, policy_repo_type = SupportedPolicyRepo.GITHUB): - - """ - Creates a policy repository settings object based on the specified type of policy repository. +def policy_repo_settings( + temp_dir: str, + session_matrix, + opal_network, + policy_repo_type=SupportedPolicyRepo.GITHUB, +): + """Creates a policy repository settings object based on the specified type + of policy repository. This method takes in the following parameters: @@ -153,14 +159,16 @@ def policy_repo_settings(temp_dir: str, session_matrix, opal_network, policy_rep # gitea_server = request.getfixturevalue("gitea_server") gitea_server_settings = next(gitea_server(opal_network)) - map = { SupportedPolicyRepo.GITHUB: create_github_policy_repo_settings, - SupportedPolicyRepo.GITEA: lambda temp_dir, session_matrix: create_gitea_policy_repo_settings(temp_dir, session_matrix, gitea_server_settings.settings), + SupportedPolicyRepo.GITEA: lambda temp_dir, session_matrix: create_gitea_policy_repo_settings( + temp_dir, session_matrix, gitea_server_settings.settings + ), } yield (map[policy_repo_type](temp_dir, session_matrix), gitea_server_settings) + @pytest.fixture(scope="session") def policy_repo(temp_dir: str, session_matrix, opal_network, request): """Creates a policy repository for testing. @@ -180,8 +188,12 @@ def policy_repo(temp_dir: str, session_matrix, opal_network, request): :return: The PolicyRepoBase object. """ - settings, server = next(policy_repo_settings(temp_dir, session_matrix, opal_network, session_matrix["repo_provider"])) - + settings, server = next( + policy_repo_settings( + temp_dir, session_matrix, opal_network, session_matrix["repo_provider"] + ) + ) + policy_repo = PolicyRepoFactory( session_matrix["repo_provider"], ).get_policy_repo( diff --git a/tests/fixtures/policy_stores.py b/tests/fixtures/policy_stores.py index 725af94f8..f9afa73e9 100644 --- a/tests/fixtures/policy_stores.py +++ b/tests/fixtures/policy_stores.py @@ -1,4 +1,5 @@ import datetime + import pytest from images import cedar_image, opa_image from testcontainers.core.network import Network @@ -34,9 +35,17 @@ def opa_server(opal_network: Network, opa_image): container : OpaContainer The OPA server container object. """ - container = OpaContainer(settings=OpaSettings(container_name="opa",image=opa_image,),network=opal_network,) + container = OpaContainer( + settings=OpaSettings( + container_name="opa", + image=opa_image, + ), + network=opal_network, + ) # assert container.wait_for_log(reference_timestamp=datetime.datetime.now(datetime.timezone.utc) - datetime.timedelta(seconds=1000),log_str="msg=Initializing server", timeout=5), "OPA server did not start." - container.permitLogger.info("OPA server started with id: " + container.get_wrapped_container().id) + container.permitLogger.info( + "OPA server started with id: " + container.get_wrapped_container().id + ) yield container container.stop() diff --git a/tests/policies/rbac.rego b/tests/policies/rbac.rego index c564c0544..67ff4d4b6 100644 --- a/tests/policies/rbac.rego +++ b/tests/policies/rbac.rego @@ -6,4 +6,4 @@ allow if { # unless user location is outside US country := data.users[input.user].location.country country == "US" -} \ No newline at end of file +} diff --git a/tests/policy_repos/gitea_policy_repo.py b/tests/policy_repos/gitea_policy_repo.py index 44371203c..b2ebc6cca 100644 --- a/tests/policy_repos/gitea_policy_repo.py +++ b/tests/policy_repos/gitea_policy_repo.py @@ -3,14 +3,13 @@ import shutil from git import GitCommandError, Repo +from testcontainers.core.utils import setup_logger from tests.containers.settings.gitea_settings import GiteaSettings from tests.policy_repos.gitea_policy_repo_settings import GiteaPolicyRepoSettings from tests.policy_repos.policy_repo_base import PolicyRepoBase from tests.policy_repos.policy_repo_settings import PolicyRepoSettings -from testcontainers.core.utils import setup_logger - class GiteaPolicyRepo(PolicyRepoBase): def __init__(self, settings: GiteaPolicyRepoSettings, *args): diff --git a/tests/policy_repos/gitea_policy_repo_settings.py b/tests/policy_repos/gitea_policy_repo_settings.py index eb6dbf42c..3b4cb7721 100644 --- a/tests/policy_repos/gitea_policy_repo_settings.py +++ b/tests/policy_repos/gitea_policy_repo_settings.py @@ -1,7 +1,10 @@ import os -from tests.policy_repos.supported_policy_repo import SupportedPolicyRepo + from testcontainers.core.utils import setup_logger +from tests.policy_repos.supported_policy_repo import SupportedPolicyRepo + + class GiteaPolicyRepoSettings: def __init__( self, @@ -25,9 +28,7 @@ def __init__( # if False, the an existing repository is expected webhook_secret: str | None = None, ): - - """ - GiteaPolicyRepoSettings initialization. + """GiteaPolicyRepoSettings initialization. This method is used to initialize the Gitea policy repository settings. It takes in the following parameters: @@ -52,7 +53,7 @@ def __init__( This method sets the following attributes of the Gitea policy repository settings: local_clone_path, owner, repo_name, branch_name, repo_host, repo_port_http, repo_port_ssh, password, pat, ssh_key_path, source_repo_owner, source_repo_name, should_fork, should_create_repo, webhook_secret. """ - + self.policy_repo_type = SupportedPolicyRepo.GITEA self.logger = setup_logger(__name__) @@ -62,7 +63,9 @@ def __init__( # Set attributes self.username = username if username else self.username - self.local_clone_path = local_clone_path if local_clone_path else self.local_clone_path + self.local_clone_path = ( + local_clone_path if local_clone_path else self.local_clone_path + ) self.owner = owner if owner else self.owner self.repo_name = repo_name if repo_name else self.repo_name self.branch_name = branch_name if branch_name else self.branch_name @@ -73,22 +76,28 @@ def __init__( self.password = password if password else self.password self.pat = pat if pat else self.pat self.ssh_key_path = ssh_key_path if ssh_key_path else self.ssh_key_path - self.source_repo_owner = source_repo_owner if source_repo_owner else self.source_repo_owner - self.source_repo_name = source_repo_name if source_repo_name else self.source_repo_name + self.source_repo_owner = ( + source_repo_owner if source_repo_owner else self.source_repo_owner + ) + self.source_repo_name = ( + source_repo_name if source_repo_name else self.source_repo_name + ) self.should_fork = should_fork if should_fork else self.should_fork - self.should_create_repo = should_create_repo if should_create_repo else self.should_create_repo + self.should_create_repo = ( + should_create_repo if should_create_repo else self.should_create_repo + ) self.webhook_secret = webhook_secret if webhook_secret else self.webhook_secret self.validate_dependencies() def load_from_env(self): - """ - Loads environment variables into the Gitea policy repository settings. + """Loads environment variables into the Gitea policy repository + settings. This method retrieves various environment variables required for configuring - the Gitea policy repository settings and assigns them to the corresponding - attributes of the settings object. It provides flexibility to configure - repository details, authentication credentials, and other configurations + the Gitea policy repository settings and assigns them to the corresponding + attributes of the settings object. It provides flexibility to configure + repository details, authentication credentials, and other configurations through environment variables. Attributes set by this method: @@ -114,10 +123,14 @@ def load_from_env(self): self.username = os.getenv("gitea_username", "permitAdmin") self.owner = os.getenv("OPAL_TARGET_ACCOUNT", None) self.github_pat = os.getenv("OPAL_GITHUB_PAT", None) - self.ssh_key_path = os.getenv("OPAL_PYTEST_POLICY_REPO_SSH_KEY_PATH", "~/.ssh/id_rsa") + self.ssh_key_path = os.getenv( + "OPAL_PYTEST_POLICY_REPO_SSH_KEY_PATH", "~/.ssh/id_rsa" + ) self.repo = os.getenv("OPAL_TARGET_REPO_NAME", "opal-example-policy-repo") self.source_repo_owner = os.getenv("OPAL_SOURCE_ACCOUNT", "ariWeinberg") - self.source_repo_name = os.getenv("OPAL_SOURCE_REPO_NAME", "opal-example-policy-repo") + self.source_repo_name = os.getenv( + "OPAL_SOURCE_REPO_NAME", "opal-example-policy-repo" + ) self.webhook_secret: str = os.getenv("OPAL_WEBHOOK_SECRET", "xxxxx") self.repo_host = os.getenv("OPAL_GITEA_HOST", "127.0.0.1") @@ -138,10 +151,14 @@ def load_from_env(self): self.local_clone_path = os.getenv("OPAL_GITEA_LOCAL_CLONE_PATH", None) self.source_repo_owner = os.getenv("OPAL_SOURCE_ACCOUNT", "ariWeinberg") - self.source_repo_name = os.getenv("OPAL_SOURCE_REPO_NAME", "opal-example-policy-repo") - + self.source_repo_name = os.getenv( + "OPAL_SOURCE_REPO_NAME", "opal-example-policy-repo" + ) + def validate_dependencies(self): """Validate required dependencies before starting the server.""" if not self.local_clone_path: raise ValueError("OPAL_GITEA_LOCAL_CLONE_PATH is required.") - self.logger.info(f"Gitea policy repo settings | Dependencies validated successfully.") \ No newline at end of file + self.logger.info( + f"Gitea policy repo settings | Dependencies validated successfully." + ) diff --git a/tests/policy_repos/github_policy_repo copy.py b/tests/policy_repos/github_policy_repo copy.py index 6cc5f0258..5093d5de9 100644 --- a/tests/policy_repos/github_policy_repo copy.py +++ b/tests/policy_repos/github_policy_repo copy.py @@ -33,7 +33,7 @@ # self.ssh_key = self.settings.opal_policy_repo_ssh_key_public # self.private_key = self.settings.opal_policy_repo_ssh_key_private # return - + # if self.settings.ssh_key_path.startswith("~"): # self.settings.ssh_key_path = os.path.expanduser(self.settings.ssh_key_path) diff --git a/tests/policy_repos/github_policy_repo.py b/tests/policy_repos/github_policy_repo.py index 773bd9635..4e4e37405 100644 --- a/tests/policy_repos/github_policy_repo.py +++ b/tests/policy_repos/github_policy_repo.py @@ -22,6 +22,7 @@ def setup_webhook(self, host, port): def create_webhook(self): pass + def __init__( self, settings: GithubPolicyRepoSettings, @@ -45,7 +46,9 @@ def build_repo_url(self, owner, repo) -> str: raise Exception("No valid authentication method set") def get_source_repo_url(self): - return self.build_repo_url(self.settings.source_repo_owner, self.settings.source_repo_name) + return self.build_repo_url( + self.settings.source_repo_owner, self.settings.source_repo_name + ) def clone_initial_repo(self): Repo.clone_from(self.get_source_repo_url(), self.settings.local_repo_path) @@ -56,7 +59,9 @@ def check_repo_exists(self): repo_list = gh.get_user().get_repos() for repo in repo_list: if repo.full_name == self.settings.repo: - self.logger.debug(f"Repository {self.settings.repo} already exists.") + self.logger.debug( + f"Repository {self.settings.repo} already exists." + ) return True except Exception as e: @@ -79,7 +84,9 @@ def cleanup(self, delete_repo=True): try: if os.path.exists(self.settings.local_repo_path): shutil.rmtree(self.settings.local_repo_path) - self.logger.info(f"Local repository at {self.settings.local_repo_path} deleted.") + self.logger.info( + f"Local repository at {self.settings.local_repo_path} deleted." + ) except Exception as e: self.logger.error(f"Failed to delete local repo path: {e}") @@ -188,6 +195,7 @@ def update_branch(self, file_name, file_content): ref = repo.get_git_ref(branch_ref) latest_commit = repo.get_git_commit(ref.object.sha) from github.InputGitTreeElement import InputGitTreeElement + new_tree = repo.create_git_tree( [ InputGitTreeElement( diff --git a/tests/policy_repos/github_policy_settings.py b/tests/policy_repos/github_policy_settings.py index 70e9f384d..76e5ae781 100644 --- a/tests/policy_repos/github_policy_settings.py +++ b/tests/policy_repos/github_policy_settings.py @@ -1,11 +1,12 @@ -from testcontainers.core.utils import setup_logger import os +from testcontainers.core.utils import setup_logger + from tests.policy_repos.supported_policy_repo import SupportedPolicyRepo + class GithubPolicyRepoSettings: - """ - GithubPolicyRepoSettings class. + """GithubPolicyRepoSettings class. This class is used to store the settings for the GithubPolicyRepo. It is initialized with the following parameters: @@ -23,6 +24,7 @@ class GithubPolicyRepoSettings: should_fork (bool, optional): Whether to fork the repo. Defaults to False. webhook_secret (str, optional): The secret to use for the webhook. Defaults to None. """ + def __init__( self, temp_dir: str, @@ -38,10 +40,8 @@ def __init__( webhook_secret: str | None = None, opal_policy_repo_ssh_key_public: str | None = None, opal_policy_repo_ssh_key_private: str | None = None, - ): - - """ - GithubPolicyRepoSettings initialization. + ): + """GithubPolicyRepoSettings initialization. This class is used to store the settings for the GithubPolicyRepo. It is initialized with the following parameters: @@ -59,7 +59,7 @@ def __init__( should_fork (bool, optional): Whether to fork the repo. Defaults to False. webhook_secret (str, optional): The secret to use for the webhook. Defaults to None. """ - + self.policy_repo_type: str = SupportedPolicyRepo.GITHUB # Set the logger @@ -90,8 +90,12 @@ def __init__( self.repo = repo_name if repo_name else self.repo # Set the owner and name of the source repo - self.source_repo_owner = source_repo_owner if source_repo_owner else self.source_repo_owner - self.source_repo_name = source_repo_name if source_repo_name else self.source_repo_name + self.source_repo_owner = ( + source_repo_owner if source_repo_owner else self.source_repo_owner + ) + self.source_repo_name = ( + source_repo_name if source_repo_name else self.source_repo_name + ) # Set the local path to clone the repo to self.local_repo_path = os.path.join(temp_dir, self.source_repo_name) @@ -109,20 +113,28 @@ def __init__( self.opal_policy_repo_ssh_key_public = opal_policy_repo_ssh_key_public self.opal_policy_repo_ssh_key_private = opal_policy_repo_ssh_key_private - # Validate the dependencies and load the SSH key self.validate_dependencies() def load_from_env(self): self.owner = os.getenv("OPAL_TARGET_ACCOUNT", None) self.github_pat = os.getenv("OPAL_GITHUB_PAT", None) - self.ssh_key_path = os.getenv("OPAL_PYTEST_POLICY_REPO_SSH_KEY_PATH", "~/.ssh/id_rsa") + self.ssh_key_path = os.getenv( + "OPAL_PYTEST_POLICY_REPO_SSH_KEY_PATH", "~/.ssh/id_rsa" + ) self.repo = os.getenv("OPAL_TARGET_REPO_NAME", "opal-example-policy-repo") self.source_repo_owner = os.getenv("OPAL_SOURCE_ACCOUNT", "ariWeinberg") - self.source_repo_name = os.getenv("OPAL_SOURCE_REPO_NAME", "opal-example-policy-repo") + self.source_repo_name = os.getenv( + "OPAL_SOURCE_REPO_NAME", "opal-example-policy-repo" + ) self.webhook_secret: str = os.getenv("OPAL_WEBHOOK_SECRET", "xxxxx") def validate_dependencies(self): - if not self.password and not self.github_pat and not self.ssh_key_path and not self.opal_policy_repo_ssh_key_private: + if ( + not self.password + and not self.github_pat + and not self.ssh_key_path + and not self.opal_policy_repo_ssh_key_private + ): self.logger.error("No password or Github PAT or SSH key provided.") raise Exception("No authentication method provided.") diff --git a/tests/policy_repos/policy_repo_factory.py b/tests/policy_repos/policy_repo_factory.py index b5a3f33f0..72f5f50c4 100644 --- a/tests/policy_repos/policy_repo_factory.py +++ b/tests/policy_repos/policy_repo_factory.py @@ -4,12 +4,13 @@ from testcontainers.core.utils import setup_logger -from tests.policy_repos.supported_policy_repo import SupportedPolicyRepo from tests.policy_repos.gitea_policy_repo import GiteaPolicyRepo from tests.policy_repos.github_policy_repo import GithubPolicyRepo from tests.policy_repos.gitlab_policy_repo import GitlabPolicyRepo from tests.policy_repos.policy_repo_base import PolicyRepoBase from tests.policy_repos.policy_repo_settings import PolicyRepoSettings +from tests.policy_repos.supported_policy_repo import SupportedPolicyRepo + # Factory class to create a policy repository object based on the type of policy repository. class PolicyRepoFactory: diff --git a/tests/settings.py b/tests/settings.py index 436fa89b4..7c736604d 100644 --- a/tests/settings.py +++ b/tests/settings.py @@ -61,30 +61,41 @@ def load_from_env(self): load_dotenv() - self.policy_repo_provider = os.getenv("OPAL_PYTEST_POLICY_REPO_PROVIDER", SupportedPolicyRepo.GITEA) + self.policy_repo_provider = os.getenv( + "OPAL_PYTEST_POLICY_REPO_PROVIDER", SupportedPolicyRepo.GITEA + ) self.repo_owner = os.getenv("OPAL_PYTEST_REPO_OWNER", "iwphonedo") self.repo_name = os.getenv("OPAL_PYTEST_REPO_NAME", "opal-example-policy-repo") self.repo_password = os.getenv("OPAL_PYTEST_REPO_PASSWORD") self.github_pat = os.getenv("OPAL_PYTEST_GITHUB_PAT") self.ssh_key_path = os.getenv("OPAL_PYTEST_SSH_KEY_PATH") self.source_repo_owner = os.getenv("OPAL_PYTEST_SOURCE_ACCOUNT", "ariWeinberg") - self.source_repo_name = os.getenv("OPAL_PYTEST_SOURCE_REPO", "opal-example-policy-repo") + self.source_repo_name = os.getenv( + "OPAL_PYTEST_SOURCE_REPO", "opal-example-policy-repo" + ) self.webhook_secret = os.getenv("OPAL_PYTEST_WEBHOOK_SECRET", "xxxxx") self.should_fork = bool(os.getenv("OPAL_PYTEST_SHOULD_FORK", True)) self.use_webhook = bool(os.getenv("OPAL_PYTEST_USE_WEBHOOK", True)) self.wait_for_debugger = bool(os.getenv("OPAL_PYTEST_WAIT_FOR_DEBUGGER", False)) - - self.opal_policy_repo_ssh_key_private = os.getenv("OPAL_PYTEST_POLICY_REPO_SSH_PRIVATE_KEY", None) - self.opal_policy_repo_ssh_key_public = os.getenv("OPAL_PYTEST_POLICY_REPO_SSH_PUBLIC_KEY", None) + self.opal_policy_repo_ssh_key_private = os.getenv( + "OPAL_PYTEST_POLICY_REPO_SSH_PRIVATE_KEY", None + ) + self.opal_policy_repo_ssh_key_public = os.getenv( + "OPAL_PYTEST_POLICY_REPO_SSH_PUBLIC_KEY", None + ) # This will fallback to the official permitio images of opal-server and opal-client, you could use it to fallback also opa and cedar - self.do_not_build_images = bool(os.getenv("OPAL_PYTEST_DO_NOT_BUILD_IMAGES", False)) + self.do_not_build_images = bool( + os.getenv("OPAL_PYTEST_DO_NOT_BUILD_IMAGES", False) + ) # This will use the same image between test sessions. Otherwise, it will rebuild the images with every execution. # Don't use it if you changed the code, as your changes won't be deployed. # In order to use this flag, you should first set the keep_images flag to true, and for the following execution you will have the images. - self.skip_rebuild_images = bool(os.getenv("OPAL_PYTEST_SKIP_REBUILD_IMAGES", False)) + self.skip_rebuild_images = bool( + os.getenv("OPAL_PYTEST_SKIP_REBUILD_IMAGES", False) + ) # This will keep the images after the test session. If you use it, you will be able to use skip_rebuild_images the next time. self.keep_images = bool(os.getenv("OPAL_PYTEST_KEEP_IMAGES", True)) @@ -102,9 +113,9 @@ def dump_settings(self): class PyTestSessionSettings(List): - repo_providers = [SupportedPolicyRepo.GITHUB]#, SupportedPolicyRepo.GITEA] + repo_providers = [SupportedPolicyRepo.GITHUB] # , SupportedPolicyRepo.GITEA] modes = ["without_webhook"] - broadcasters = ["postgres"]#, "redis"] + broadcasters = ["postgres"] # , "redis"] broadcaster = "fgsfdg" repo_provider = "fdgdfg" mode = "rgrtre" @@ -159,13 +170,9 @@ def __next__(self): "repo_provider": self.repo_provider, "broadcaster": self.broadcaster, "mode": self.mode, - - "is_final": (self.current_broadcaster >= len(self.broadcasters)), "is_first": is_first, - - - "github_pat":pytest_settings.github_pat, + "github_pat": pytest_settings.github_pat, "repo_owner": pytest_settings.repo_owner, "repo_name": pytest_settings.repo_name, "repo_password": pytest_settings.repo_password, @@ -180,10 +187,8 @@ def __next__(self): "do_not_build_images": pytest_settings.do_not_build_images, "skip_rebuild_images": pytest_settings.skip_rebuild_images, "keep_images": pytest_settings.keep_images, - "opal_policy_repo_ssh_key_private": pytest_settings.opal_policy_repo_ssh_key_private, "opal_policy_repo_ssh_key_public": pytest_settings.opal_policy_repo_ssh_key_public, - "number_of_opal_servers": 2, "number_of_opal_clients": 2, "topics": {"topic_1": 1, "topic_2": 1}, diff --git a/tests/test_app.py b/tests/test_app.py index 5614aa39f..fb7bd6dec 100644 --- a/tests/test_app.py +++ b/tests/test_app.py @@ -107,7 +107,6 @@ def update_policy( utils.wait_policy_repo_polling_interval(opal_server_container) - def test_topiced_user_location( opal_servers: list[OpalServerContainer], topiced_clients: dict[str, OpalClientContainer], @@ -207,14 +206,16 @@ async def test_policy_and_data_updates( @pytest.mark.parametrize("attempts", [10]) # Number of attempts to repeat the check -def test_read_statistics(attempts, opal_servers: list[OpalServerContainer], session_matrix,): +def test_read_statistics( + attempts, + opal_servers: list[OpalServerContainer], + session_matrix, +): """Tests the statistics feature by verifying the number of clients and servers.""" logger.info("- Testing statistics feature") - - number_of_opal_servers = session_matrix["number_of_opal_servers"] number_of_opal_clients = session_matrix["number_of_opal_clients"] @@ -270,7 +271,9 @@ def test_read_statistics(attempts, opal_servers: list[OpalServerContainer], sess except requests.RequestException as e: if response is not None: - logger.error(f"Request failed: {response.status_code} {response.text}") + logger.error( + f"Request failed: {response.status_code} {response.text}" + ) pytest.fail(f"Failed to fetch statistics: {e}") logger.info("Statistics check passed in all attempts.") diff --git a/tests/utils.py b/tests/utils.py index db38bd4e4..b092e83e1 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -144,7 +144,10 @@ def generate_ssh_key_pair(): ) # Return the keys as strings - return {"private": private_key_pem.decode("utf-8"), "public": public_key_openssh.decode("utf-8")} + return { + "private": private_key_pem.decode("utf-8"), + "public": public_key_openssh.decode("utf-8"), + } async def opal_authorize(user: str, policy_url: str): @@ -507,6 +510,7 @@ def global_exception_handler(exc_type, exc_value, exc_traceback): # Set the global exception handler sys.excepthook = global_exception_handler + def str2bool(val: str) -> bool: return str(val).lower() in ("true", "1", "yes") @@ -515,7 +519,6 @@ def pre_set(): # wait some seconds for the debugger to attach debugger_wait_time = 5 # seconds - def cancel_wait_for_client_after_timeout(): try: time.sleep(debugger_wait_time) @@ -523,12 +526,13 @@ def cancel_wait_for_client_after_timeout(): except Exception as e: logger.debug(f"Failed to cancel wait for client: {e}") - try: if pytest_settings.wait_for_debugger: t = threading.Thread(target=cancel_wait_for_client_after_timeout) t.start() - logger.debug(f"Waiting for debugger to attach... {debugger_wait_time} seconds timeout") + logger.debug( + f"Waiting for debugger to attach... {debugger_wait_time} seconds timeout" + ) debugpy.wait_for_client() except Exception as e: logger.debug(f"Failed to attach debugger: {e}") @@ -537,7 +541,6 @@ def cancel_wait_for_client_after_timeout(): # utils.install_opal_server_and_client() - def wait_sometime(): """Pauses execution based on the environment. @@ -552,7 +555,10 @@ def wait_sometime(): logger.info("Running inside GitHub Actions. Sleeping for 30 seconds...") for secconds_ellapsed in range(30): time.sleep(1) - print(f"Sleeping for \033[91m{29 - secconds_ellapsed}\033[0m seconds... \r",end="\r" if secconds_ellapsed < 29 else "\n") + print( + f"Sleeping for \033[91m{29 - secconds_ellapsed}\033[0m seconds... \r", + end="\r" if secconds_ellapsed < 29 else "\n", + ) else: logger.info("Running on the local machine. Press Enter to continue...") input() # Wait for key press