From f38e6ce11c090da29d8a5a656d6d75ca1ae76ff8 Mon Sep 17 00:00:00 2001 From: ElNiak Date: Tue, 15 Oct 2024 18:39:24 +0200 Subject: [PATCH] compose +- ok -> still need pcap -> then try with tester or shadow ? --- panther/config/config.py | 1 - panther/config/experiment_config.yaml | 23 +- panther/config/global_config.yaml | 10 - panther/config/global_config.yaml.j2 | 10 - panther/config/gml/topology.gml | 27 +++ panther/core/experiment_manager.py | 206 ++++------------- panther/core/factories/environment_manager.py | 6 + .../network_environment_interface.py | 7 - .../interfaces/service_manager_interface.py | 6 + panther/core/interfaces/todo.md | 1 + panther/core/observer/gui_observer.py | 2 +- panther/core/test.py | 80 +++++++ panther/generate_certificates.sh | 26 +++ panther/outputs/experiment_report.txt | 0 panther/panther_cli.py | 59 +---- .../docker_compose/docker-compose-template.j2 | 38 +++- .../docker-compose.generated.yml | 64 +++++- .../docker_compose/docker_compose_plugin.py | 209 +++--------------- .../implementations/quic/picoquic/Dockerfile | 14 +- .../implementations/quic/picoquic/config.yaml | 16 +- .../quic/picoquic/service_manager.py | 196 ++++++++-------- .../quic/picoquic/templates/client_command.j2 | 2 +- .../quic/picoquic/templates/server_command.j2 | 2 +- 23 files changed, 428 insertions(+), 577 deletions(-) delete mode 100644 panther/config/global_config.yaml delete mode 100644 panther/config/global_config.yaml.j2 create mode 100644 panther/config/gml/topology.gml create mode 100644 panther/core/interfaces/todo.md create mode 100644 panther/core/test.py create mode 100755 panther/generate_certificates.sh delete mode 100644 panther/outputs/experiment_report.txt diff --git a/panther/config/config.py b/panther/config/config.py index 451d8c67de..c59bb8ce7d 100644 --- a/panther/config/config.py +++ b/panther/config/config.py @@ -6,7 +6,6 @@ class ConfigLoader: def __init__(self, config_dir: str): self.config_dir = config_dir - self.global_config = self.load_global_config() self.experiment_config = self.load_experiment_config() def load_global_config(self): diff --git a/panther/config/experiment_config.yaml b/panther/config/experiment_config.yaml index 6c6a66c0df..0ea6cd0ab4 100644 --- a/panther/config/experiment_config.yaml +++ b/panther/config/experiment_config.yaml @@ -1,3 +1,14 @@ +logging: + level: DEBUG + format: "%(asctime)s [%(levelname)s] - %(module)s - %(message)s" +paths: + output_dir: "outputs" + log_dir: "outputs/logs" + config_dir: "configs" + plugin_dir: "plugins" +docker: + build_docker_image: True +generate_new_certificates: True tests: - name: "QUIC Client-Server Communication Test" description: "Verify that the Picoquic client can communicate with the Picoquic server over Docker Compose network." @@ -7,10 +18,9 @@ tests: services: picoquic_server: name: "picoquic_server" # Added 'name' key - implementation: "picoquic" + implementation: "picoquic" # parameters are presents in folder plugins/implementations/quic/picoquic/config.yaml version: "rfc9000" - parameters: - role: "server" + role: "server" ports: - "4443:4443" - "8080:8080" # Health check endpoint @@ -18,11 +28,8 @@ tests: name: "picoquic_client" # Added 'name' key implementation: "picoquic" version: "rfc9000" - parameters: - role: "client" - ticket-file: "/opt/ticket/ticket.pem" # Added 'ticket-file' if required - target: "picoquic_server" # Docker Compose service name - message: "Hello from Picoquic Client!" + role: "client" + target: "picoquic_server" # Docker Compose service name ports: - "5000:5000" # Example port if needed - "8081:8081" # Changed to avoid port conflict diff --git a/panther/config/global_config.yaml b/panther/config/global_config.yaml deleted file mode 100644 index c76fcac8b0..0000000000 --- a/panther/config/global_config.yaml +++ /dev/null @@ -1,10 +0,0 @@ -logging: - level: DEBUG - format: "%(asctime)s [%(levelname)s] - %(module)s - %(message)s" -paths: - output_dir: "outputs" - log_dir: "outputs/logs" - config_dir: "configs" - plugin_dir: "plugins" -docker: - build_docker_image: True diff --git a/panther/config/global_config.yaml.j2 b/panther/config/global_config.yaml.j2 deleted file mode 100644 index c76fcac8b0..0000000000 --- a/panther/config/global_config.yaml.j2 +++ /dev/null @@ -1,10 +0,0 @@ -logging: - level: DEBUG - format: "%(asctime)s [%(levelname)s] - %(module)s - %(message)s" -paths: - output_dir: "outputs" - log_dir: "outputs/logs" - config_dir: "configs" - plugin_dir: "plugins" -docker: - build_docker_image: True diff --git a/panther/config/gml/topology.gml b/panther/config/gml/topology.gml new file mode 100644 index 0000000000..fc486067bf --- /dev/null +++ b/panther/config/gml/topology.gml @@ -0,0 +1,27 @@ +graph [ + directed 0 + node [ + id 1 + label "Node1" + ] + node [ + id 2 + label "Node2" + ] + node [ + id 3 + label "Node3" + ] + edge [ + source 1 + target 2 + ] + edge [ + source 2 + target 3 + ] + edge [ + source 3 + target 1 + ] +] diff --git a/panther/core/experiment_manager.py b/panther/core/experiment_manager.py index 57f5a1be7e..a78e3be8c7 100644 --- a/panther/core/experiment_manager.py +++ b/panther/core/experiment_manager.py @@ -20,19 +20,18 @@ from core.factories.plugin_manager import PluginManager from core.observer.event_manager import EventManager from core.observer.event import Event +# from core.test import Test class ExperimentManager: def __init__( self, - global_config: DictConfig, experiment_config: DictConfig, experiment_name: str = None, plugin_dir: str = "plugins", ): self.logger = logging.getLogger("ExperimentManager") - self.global_config = global_config self.experiment_config = experiment_config self.experiment_name = ( f"{experiment_name}_{datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}" if experiment_name @@ -40,7 +39,8 @@ def __init__( ) # TODO sometime logs dir is not used directly in inv -> assume it is used in the future - self.experiment_dir = Path(global_config.paths.output_dir) / self.experiment_name + # TODO split experiment parameters and test parameters + move some parameters to the test level + self.experiment_dir = Path(experiment_config.paths.output_dir) / self.experiment_name self.experiment_dir.mkdir(parents=True, exist_ok=True) self.logs_dir = self.experiment_dir / "logs" self.docker_compose_logs_dir = self.experiment_dir / "docker_compose_logs" @@ -66,9 +66,8 @@ def load_logging(self): """ Configures logging to output to both console and a log file. """ - log_level = getattr(logging, self.global_config.logging.level.upper(), logging.INFO) - log_format = self.global_config.logging.format - formatter = logging.Formatter('%(asctime)s [%(levelname)s] %(name)s: %(message)s') + log_level = getattr(logging, self.experiment_config.logging.level.upper(), logging.INFO) + log_format = self.experiment_config.logging.format # File Handler panther_log_file = self.logs_dir / "experiment.log" @@ -156,33 +155,26 @@ def build_docker_images(self): """ self.logger.info("Building Docker images for implementations") self.plugin_loader.build_all_docker_images() - # for manager in self.service_managers: - # if hasattr(manager, "build_image"): - # try: - # manager.build_image() - # self.logger.info(f"Built Docker image for '{manager.__class__.__name__}'") - # self.event_manager.notify(Event("docker_image_built", {"manager": manager})) - # except Exception as e: - # self.logger.error(f"Failed to build Docker image for '{manager.__class__.__name__}': {e}") - # else: - # self.logger.debug(f"No build_image method for '{manager.__class__.__name__}'. Skipping.") - - def setup_environments(self, services: Dict[str, Dict[str, Any]], deployment_commands: Dict[str, str]): + + def setup_environments(self, services: Dict[str, Dict[str, Any]], deployment_commands: Dict[str, str], paths: Dict[str, str], timestamp: str): """ Sets up all environments managed by the environment managers, providing service configurations and deployment commands. :param services: Dictionary of services with their configurations. :param deployment_commands: Dictionary of deployment commands generated by service managers. + :param paths: Dictionary containing various path configurations. + :param timestamp: The timestamp string to include in log paths. """ self.logger.info("Setting up all environments") for env_manager in self.environment_managers: try: - env_manager.setup_environment(services, deployment_commands) + env_manager.setup_environment(services, deployment_commands, paths, timestamp) self.logger.info(f"Environment '{env_manager.__class__.__name__}' setup successfully.") self.event_manager.notify(Event("environment_setup", {"environment": env_manager})) except Exception as e: self.logger.error(f"Failed to setup environment '{env_manager.__class__.__name__}': {e}") + def generate_deployment_commands(self, environment:str) -> Dict[str, str]: """ Collects deployment commands from all service managers based on the services defined in the tests. @@ -202,8 +194,8 @@ def generate_deployment_commands(self, environment:str) -> Dict[str, str]: # Ensure 'name' key exists if 'name' not in service_details: service_details['name'] = service_name - command_dict = manager.generate_deployment_commands(service_details,environment) - deployment_commands.update(command_dict) + info_commands = manager.generate_deployment_commands(service_details,environment) + deployment_commands.update(info_commands) except Exception as e: self.logger.error(f"Failed to generate deployment command for service '{service_name}': {e}") self.logger.debug(f"Collected deployment commands: {deployment_commands}") @@ -263,7 +255,7 @@ def check_service_responsiveness(self, service_name: str, endpoint: str, expecte return # Assuming service manager provides the base URL or IP - base_url = service_manager.get_base_url() # Implement this method in IServiceManager and concrete classes + base_url = service_manager.get_base_url(service_name) # Implement this method in IServiceManager and concrete classes url = urljoin(base_url, endpoint) self.logger.debug(f"Checking responsiveness of '{service_name}' at '{url}'") @@ -332,6 +324,15 @@ def execute_steps(self, steps: Dict[str, Any]): self.event_manager.notify(Event("step_completed", {"step": "wait", "duration": duration})) # Add more step handlers as needed + def load_tests(self): + """ + Loads tests from the experiment configuration and initializes Test instances. + """ + tests_config = self.experiment_config.get('tests', []) + self.tests = [] + for test_config in tests_config: + test = Test(test_config, self) + self.tests.append(test) def run_tests(self): """ @@ -342,14 +343,18 @@ def run_tests(self): for test in tests: self.logger.info(f"Starting Test: {test.get('name', 'Unnamed Test')}") - self.logger.info(f"Description: {test.get('description', '')}") + self.logger.info(f"Description: {test.get('description', '')}") - protocol = test.get("protocol") + protocol = test.get("protocol") environment = test.get("network_environment") # Ensure consistent naming - services = test.get("services", {}) + services = test.get("services", {}) self.current_test_services = services - steps = test.get("steps", {}) + steps = test.get("steps", {}) assertions = test.get("assertions", []) + + # Check if new certificates should be generated + if self.experiment_config.get('generate_new_certificates', False): + subprocess.run(["bash", 'generate_certificates.sh']) # Step 1: Extract required implementations from services required_implementations = set() @@ -378,9 +383,14 @@ def run_tests(self): deployment_commands = self.generate_deployment_commands(environment) # Step 5: Setup environments with services - self.setup_environments(services,deployment_commands) + # Generate timestamp and paths + timestamp = datetime.now().strftime("%Y-%m-%d_%H-%M-%S") + paths = self.experiment_config.get('paths', {}) + + self.setup_environments(services,deployment_commands, paths, timestamp) # Step 6: Deploy services + self.deploy_services() # Step 7: Execute test steps @@ -397,144 +407,4 @@ def run_tests(self): except Exception as e: self.logger.error(f"Experiment encountered an error: {e}") self.teardown_experiment() - - - # def run_experiment(self): - # for test in self.experiment.tests: - # self.log.info(f"Running Test: {test.name}") - # protocol_name = test.protocol - # environment_name = test.environment - - # protocol_plugin = self.protocol_plugins.get(protocol_name) - # if not protocol_plugin: - # self.log.error(f"Protocol plugin '{protocol_name}' not found.") - # continue - - # self.log.info(f"Setting up Environment: {environment_name}") - # try: - # # Extract service configurations - # services_config = {} - # for service_name, service_details in test.services.items(): - # implementation_name = service_details.get("implementation") - # version = service_details.get("version") - # service_manager = protocol_plugin.get_service_manager( - # implementation_name - # ) - # if not service_manager: - # self.log.error( - # f"Service manager for implementation '{implementation_name}' not found." - # ) - # continue - # image_name = f"{implementation_name}_{version}_panther:latest" # Adjust if using different tagging - # ports = service_details.get("ports", []) - # depends_on = service_details.get("depends_on", []) - # environment_vars = service_details.get("environment", {}) - # services_config[service_name] = { - # "image": image_name, - # "ports": ports, - # "depends_on": depends_on, - # "environment": environment_vars, - # } - # self.log.debug( - # f"Service '{service_name}' configured with image '{image_name}'" - # ) - - # self.env_manager.setup_environment(environment_name, services_config) - # except Exception as e: - # self.log.error( - # f"Failed to set up environment '{environment_name}': {e}" - # ) - # continue - - # self.log.info("Services are up and running.") - - # # Execute Test Steps - # self.log.info("Executing Test Steps") - # wait_step = test.steps.get("wait", {}) - # wait_duration = wait_step.get("duration", 10) - # self.log.info( - # f"Waiting for {wait_duration} seconds to allow communication..." - # ) - # time.sleep(wait_duration) - - # # Perform Assertions - # self.log.info("Performing Assertions") - # assertions = test.assertions if "assertions" in test else [] - # test_result = {"test_name": test.name, "success": True, "assertions": []} - # for assertion in assertions: - # assertion_outcome = self.perform_assertion(assertion) - # test_result["assertions"].append(assertion_outcome) - # if not assertion_outcome["passed"]: - # test_result["success"] = False - - # self.results.append(test_result) - # self.log.info( - # f"Test '{test.name}' Completed with {'Success' if test_result['success'] else 'Failure'}\n" - # ) - - # # Stop Services - # self.log.info("Stopping Services") - # try: - # self.env_manager.teardown_environment(environment_name) - # self.log.info("Services stopped successfully.") - # except Exception as e: - # self.log.error(f"Failed to stop services: {e}") - - # # After all tests, generate the report - # self.generate_report() - - # def perform_assertion(self, assertion): - # assertion_type = assertion.type - # passed = False - # details = "" - # if assertion_type == "service_responsive": - # service = assertion.service - # endpoint = assertion.endpoint # Format: "port/path" - # expected_status = assertion.expected_status - # try: - # port, path = endpoint.split("/", 1) - # url = f"http://localhost:{port}/{path}" - # self.log.info(f"Performing health check on {service} at {url}") - # response = requests.get(url, timeout=5) - # if response.status_code == expected_status: - # passed = True - # details = f"Service '{service}' responded with expected status {expected_status}." - # self.log.info(details) - # else: - # details = f"Service '{service}' responded with status {response.status_code}, expected {expected_status}." - # self.log.error(details) - # except Exception as e: - # details = f"Service '{service}' health check failed: {e}" - # self.log.error(details) - # elif assertion_type == "performance_metrics": - # # Implement performance metrics validation - # service = assertion.service - # metric = assertion.metric - # expected_min = assertion.expected_min - # # Placeholder implementation - # details = f"Performance metrics for '{service}' not implemented yet." - # self.log.warning(details) - # else: - # details = f"Unknown assertion type: {assertion_type}" - # self.log.warning(details) - - # return {"type": assertion_type, "passed": passed, "details": details} - - # def generate_report(self): - # """ - # Generates a simple textual report summarizing test results. - # """ - # report_path = os.path.join( - # self.global_config.paths.output_dir, "experiment_report.txt" - # ) - # os.makedirs(os.path.dirname(report_path), exist_ok=True) - # with open(report_path, "w") as report_file: - # for result in self.results: - # report_file.write(f"Test: {result['test_name']}\n") - # report_file.write(f"Success: {'Yes' if result['success'] else 'No'}\n") - # for assertion in result["assertions"]: - # status = "PASSED" if assertion["passed"] else "FAILED" - # report_file.write(f" Assertion: {assertion['type']} - {status}\n") - # report_file.write(f" Details: {assertion['details']}\n") - # report_file.write("\n") - # self.log.info(f"Experiment report generated at {report_path}") + \ No newline at end of file diff --git a/panther/core/factories/environment_manager.py b/panther/core/factories/environment_manager.py index a00e5d2b2f..c0a77b5d8c 100644 --- a/panther/core/factories/environment_manager.py +++ b/panther/core/factories/environment_manager.py @@ -6,6 +6,12 @@ class EnvironmentManager: def __init__(self, environment_plugins: Dict[str, IEnvironmentPlugin]): self.environment_plugins = environment_plugins + + def parse_gml(self, gml_file: str): + """ + Parses the GML file and returns the graph. + """ + raise NotImplementedError def setup_environment(self, environment_name: str, services: Dict[str, Dict[str, Any]]): """ diff --git a/panther/core/interfaces/environments/network_environment_interface.py b/panther/core/interfaces/environments/network_environment_interface.py index 64148925bb..5e4c324d50 100644 --- a/panther/core/interfaces/environments/network_environment_interface.py +++ b/panther/core/interfaces/environments/network_environment_interface.py @@ -4,13 +4,6 @@ from core.interfaces.environments.environment_interface import IEnvironmentPlugin class INetworkEnvironment(IEnvironmentPlugin): - @abstractmethod - def configure_network(self, services: Dict[str, Dict[str, Any]]): - """ - Configures the network environment. - """ - pass - @abstractmethod def setup_environment(self): """ diff --git a/panther/core/interfaces/service_manager_interface.py b/panther/core/interfaces/service_manager_interface.py index 989aac3572..40ca7cc515 100644 --- a/panther/core/interfaces/service_manager_interface.py +++ b/panther/core/interfaces/service_manager_interface.py @@ -5,6 +5,12 @@ class IServiceManager(ABC): def __init__(self): pass + @abstractmethod + def get_base_url(self, service_name: str) -> str: + """ + Returns the base URL for the given service. + """ + raise NotImplementedError("Method 'get_base_url' must be implemented in subclasses.") @abstractmethod def get_implementation_name(self) -> str: diff --git a/panther/core/interfaces/todo.md b/panther/core/interfaces/todo.md new file mode 100644 index 0000000000..3b72e9bb6e --- /dev/null +++ b/panther/core/interfaces/todo.md @@ -0,0 +1 @@ +create same directory sructure than plugin for the interface so it can be easily found diff --git a/panther/core/observer/gui_observer.py b/panther/core/observer/gui_observer.py index 2816c7a83c..e2e8c6a16b 100644 --- a/panther/core/observer/gui_observer.py +++ b/panther/core/observer/gui_observer.py @@ -1,5 +1,5 @@ -from panther.core.interfaces.observer_interface import IObserver +from core.interfaces.observer_interface import IObserver from core.experiment import Experiment diff --git a/panther/core/test.py b/panther/core/test.py new file mode 100644 index 0000000000..db0b24c5d1 --- /dev/null +++ b/panther/core/test.py @@ -0,0 +1,80 @@ +from datetime import datetime +import logging +from typing import Any, Dict, List + +from core.experiment_manager import ExperimentManager +from core.interfaces.environments.environment_interface import IEnvironmentPlugin +from core.interfaces.service_manager_interface import IServiceManager + + +class Test: + def __init__(self, test_config: Dict[str, Any], experiment_manager: 'ExperimentManager'): + self.logger = logging.getLogger(f"Test:{test_config.get('name', 'Unnamed Test')}") + self.test_config = test_config + self.name = test_config.get('name', 'Unnamed Test') + self.description = test_config.get('description', '') + self.protocol = test_config.get('protocol') + self.environment = test_config.get('network_environment') + self.services = test_config.get('services', {}) + self.steps = test_config.get('steps', {}) + self.assertions = test_config.get('assertions', []) + self.experiment_manager = experiment_manager + self.service_managers: List[IServiceManager] = [] + self.environment_managers: List[IEnvironmentPlugin] = [] + self.current_test_services = self.services + + def run(self): + self.logger.info(f"Starting Test: {self.name}") + self.logger.info(f"Description: {self.description}") + + # Step 1: Extract required implementations from services + required_implementations = set() + for service_name, service_details in self.services.items(): + implementation = service_details.get("implementation") + if implementation: + required_implementations.add(implementation) + else: + self.logger.warning(f"Service '{service_name}' does not specify an implementation.") + + if not required_implementations: + self.logger.error("No implementations specified for services. Aborting test.") + return + + self.logger.debug(f"Required implementations for this test: {required_implementations}") + + # Step 2: Initialize service managers + self.initialize_protocol_managers([self.protocol], required_implementations) + + # Step 3: Initialize environment managers + self.initialize_environment_managers([self.environment]) + + # Step 4: Build Docker images if necessary + self.build_docker_images() + + # Step 5: Generate deployment commands + deployment_commands = self.generate_deployment_commands(self.environment) + + # Generate timestamp and paths + timestamp = datetime.now().strftime("%Y-%m-%d_%H-%M-%S") + paths = self.experiment_manager.experiment_config.get('paths', {}) + + # Step 6: Setup environments with services + self.setup_environments(self.services, deployment_commands, paths, timestamp) + + # Step 7: Deploy services + self.deploy_services() + + # Step 8: Execute test steps + self.execute_steps(self.steps) + + # Step 9: Perform assertions + self.perform_assertions(self.assertions) + + # Step 10: Teardown for the test + self.teardown_test() + + self.logger.info(f"Completed Test: {self.name}") + self.experiment_manager.event_manager.notify(Event("test_completed", {"test": self.name})) + + # Methods to be implemented... + diff --git a/panther/generate_certificates.sh b/panther/generate_certificates.sh new file mode 100755 index 0000000000..9a75a4c131 --- /dev/null +++ b/panther/generate_certificates.sh @@ -0,0 +1,26 @@ +# generate_certificates.sh + +#!/bin/bash + +# Create directories if they don't exist +mkdir -p config/certs +mkdir -p config/tls_keys + +# Remove existing certificates and keys +rm -f config/certs/*.pem config/certs/*.csr +rm -f config/tls_keys/*.key + +# Generate private key +openssl genrsa -out config/certs/key.pem 2048 + +# Generate certificate signing request (CSR) +openssl req -new -key config/certs/key.pem -out config/certs/cert.csr \ + -subj "/C=US/ST=State/L=City/O=Organization/OU=Unit/CN=localhost" + +# Generate self-signed certificate +openssl x509 -req -days 365 -in config/certs/cert.csr -signkey config/certs/key.pem -out config/certs/cert.pem + +# Generate ticket key (if applicable) +openssl rand -out config/tls_keys/ticket.key 48 + +echo "Certificates and keys generated successfully." diff --git a/panther/outputs/experiment_report.txt b/panther/outputs/experiment_report.txt deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/panther/panther_cli.py b/panther/panther_cli.py index a4c02ffd46..b712a9f2f5 100644 --- a/panther/panther_cli.py +++ b/panther/panther_cli.py @@ -45,72 +45,15 @@ def main(): else: # # Load Configurations config_loader = ConfigLoader(args.config_dir) - global_config = config_loader.global_config experiment_config = config_loader.experiment_config + # Start the experiment experiment_manager = ExperimentManager( - global_config=global_config, experiment_config=experiment_config, experiment_name=args.experiment_name ) experiment_manager.run_tests() - # # Create Experiment Directory - # experiment_dir = create_experiment_directory(global_config) - # logs_dir = experiment_dir / "logs" - # docker_compose_logs_dir = experiment_dir / "docker_compose_logs" - # container_logs_dir = experiment_dir / "container_logs" - # other_artifacts_dir = experiment_dir / "other_artifacts" - - # # Create necessary subdirectories - # logs_dir.mkdir(parents=True, exist_ok=True) - # docker_compose_logs_dir.mkdir(parents=True, exist_ok=True) - # container_logs_dir.mkdir(parents=True, exist_ok=True) - # other_artifacts_dir.mkdir(parents=True, exist_ok=True) - - # # Setup Logging - # logger = setup_logger(global_config, logs_dir) - - # # Initialize PluginLoader - # plugins_dir = args.plugin_dir # Adjust as per your directory structure - # plugin_loader = PluginLoader(plugins_dir) - - # # Load plugins and build Docker images - # loaded_plugins = plugin_loader.load_plugins( - # build_docker_image=global_config.docker.build_docker_image - # ) - - # # Access built images if needed - # built_images = loaded_plugins.get("built_images", {}) - # logger.info(f"Built Images: {built_images}") - - # # Initialize Experiment Manager - # experiment_manager = ExperimentManager( - # experiment_config=experiment_config, - # protocol_plugins=loaded_plugins["protocol_plugins"], - # environment_plugins=loaded_plugins["environment_plugins"], - # global_config=global_config, - # ) - - # # Run Experiments - # experiment_manager.run_experiment() - - # logger.info("Panther-SCP CLI Finished") - - - -def create_experiment_directory(global_config) -> Path: - """ - Creates a unique experiment directory based on the current timestamp. - """ - timestamp = datetime.now().strftime("%Y-%m-%d_%H-%M-%S") - experiment_dir = Path( - os.path.join(global_config.paths.output_dir, f"experiment_{timestamp}") - ) - experiment_dir.mkdir(parents=True, exist_ok=True) - return experiment_dir - - if __name__ == "__main__": main() diff --git a/panther/plugins/environments/network_environment/docker_compose/docker-compose-template.j2 b/panther/plugins/environments/network_environment/docker_compose/docker-compose-template.j2 index fec7709fd8..5348d3a2b9 100644 --- a/panther/plugins/environments/network_environment/docker_compose/docker-compose-template.j2 +++ b/panther/plugins/environments/network_environment/docker_compose/docker-compose-template.j2 @@ -1,5 +1,3 @@ -# plugins/environments/docker_compose/templates/docker-compose-template.j2 - version: '3.8' services: @@ -11,15 +9,39 @@ services: {% for port in service.ports %} - "{{ port }}" {% endfor %} + {% if service.role == 'client' %} + depends_on: + - {{ service.target }} + # condition: service_healthy + {% endif %} + # {% if service.role == 'server' %} + # healthcheck: + # test: ["CMD", "curl", "-f", "http://localhost:{{ service.healthcheck_port }}/health"] + # interval: 5s + # timeout: 2s + # retries: 5 + # start_period: 5s + # {% endif %} environment: - ROLE: {{ service.parameters.role }} - VERBOSITY: {{ service.parameters.verbosity }} - entrypoint: "{{ deployment_commands[service_name] }}" + ROLE: {{ service.role }} + {% if service.role == 'client' %} + TARGET: {{ service.target }} + MESSAGE: "{{ service.message }}" + {% endif %} + tty: true + stdin_open: true + working_dir: "{{ deployment_info[service_name]['working_dir'] }}" + command: + - {{ deployment_info[service_name]['command'] }} volumes: - - "{{ output_dir }}/logs/{{ service_name }}:/app/logs/" - - "{{ cert_dir }}:/app/certs/" + - "{{ log_dir | realpath }}/{{ service_name }}/:/app/logs/" + {% for volume in deployment_info[service_name]['volumes'] %} + - "{{ volume.local | realpath }}:{{ volume.container }}" + {% endfor %} networks: - - panther_network + panther_network: + aliases: + - {{ service_name }} {% endfor %} networks: diff --git a/panther/plugins/environments/network_environment/docker_compose/docker-compose.generated.yml b/panther/plugins/environments/network_environment/docker_compose/docker-compose.generated.yml index 0c29b456fb..6e30badefe 100644 --- a/panther/plugins/environments/network_environment/docker_compose/docker-compose.generated.yml +++ b/panther/plugins/environments/network_environment/docker_compose/docker-compose.generated.yml @@ -1,5 +1,3 @@ -# plugins/environments/docker_compose/templates/docker-compose-template.j2 - version: '3.8' services: @@ -13,15 +11,36 @@ services: - "8080:8080" + + # + # healthcheck: + # test: ["CMD", "curl", "-f", "http://localhost:/health"] + # interval: 5s + # timeout: 2s + # retries: 5 + # start_period: 5s + # environment: ROLE: server - VERBOSITY: - entrypoint: "/opt/picoquic/./picoquicdemo -q /app/logs/server.qlog -c /opt/certs/cert.pem -k /opt/certs/key.pem -a hq-interop -p 4443 -D -L -n servername > /app/logs/server.log 2>&1" + + tty: true + stdin_open: true + working_dir: "/opt/picoquic" + command: + - "./picoquicdemo -c /opt/certs/cert.pem -k /opt/certs/key.pem -a hq-interop -n servername -D -L -p 4443 > /app/logs/server.log 2>&1" volumes: - - "/home/crochetch/Documents/Projects/VerificationQUIC/PANTHER-SCP/panther/outputs/experiment_2024-10-11_14-22-31/logs/picoquic_server:/app/logs/" - - "/home/crochetch/Documents/Projects/VerificationQUIC/PANTHER-SCP/panther/outputs/experiment_2024-10-11_14-22-31/certs:/app/certs/" + - "/home/crochetch/Documents/Projects/VerificationQUIC/PANTHER-SCP/panther/outputs/experiment_2024-10-15_18-37-52/logs/picoquic_server/:/app/logs/" + + - "/home/crochetch/Documents/Projects/VerificationQUIC/PANTHER-SCP/panther/config/certs/cert.pem:/opt/certs/cert.pem" + + - "/home/crochetch/Documents/Projects/VerificationQUIC/PANTHER-SCP/panther/config/certs/key.pem:/opt/certs/key.pem" + + - "/home/crochetch/Documents/Projects/VerificationQUIC/PANTHER-SCP/panther/config/tls_keys:/opt/ticket/ticket.key" + networks: - - panther_network + panther_network: + aliases: + - picoquic_server picoquic_client: image: picoquic_rfc9000_panther:latest @@ -32,15 +51,36 @@ services: - "8081:8081" + + depends_on: + - picoquic_server + # condition: service_healthy + + # environment: ROLE: client - VERBOSITY: - entrypoint: "/opt/picoquic/./picoquicdemo -l - -T /opt/ticket/ticket.pem -v 00000001 -c /opt/certs/cert.pem -k /opt/certs/key.pem -a hq-interop -D -L -n servername 11.0.0.1 4443 > /app/logs/client.log 2>&1" + + TARGET: picoquic_server + MESSAGE: "" + + tty: true + stdin_open: true + working_dir: "/opt/picoquic" + command: + - "./picoquicdemo -c /opt/certs/cert.pem -k /opt/certs/key.pem -T /opt/ticket/ticket.key -a hq-interop -D -L -v 00000001 picoquic_server 4443 > /app/logs/client.log 2>&1" volumes: - - "/home/crochetch/Documents/Projects/VerificationQUIC/PANTHER-SCP/panther/outputs/experiment_2024-10-11_14-22-31/logs/picoquic_client:/app/logs/" - - "/home/crochetch/Documents/Projects/VerificationQUIC/PANTHER-SCP/panther/outputs/experiment_2024-10-11_14-22-31/certs:/app/certs/" + - "/home/crochetch/Documents/Projects/VerificationQUIC/PANTHER-SCP/panther/outputs/experiment_2024-10-15_18-37-52/logs/picoquic_client/:/app/logs/" + + - "/home/crochetch/Documents/Projects/VerificationQUIC/PANTHER-SCP/panther/config/certs/cert.pem:/opt/certs/cert.pem" + + - "/home/crochetch/Documents/Projects/VerificationQUIC/PANTHER-SCP/panther/config/certs/key.pem:/opt/certs/key.pem" + + - "/home/crochetch/Documents/Projects/VerificationQUIC/PANTHER-SCP/panther/config/tls_keys:/opt/ticket/ticket.key" + networks: - - panther_network + panther_network: + aliases: + - picoquic_client networks: diff --git a/panther/plugins/environments/network_environment/docker_compose/docker_compose_plugin.py b/panther/plugins/environments/network_environment/docker_compose/docker_compose_plugin.py index 69c69a21d3..dfc7b7c4f2 100644 --- a/panther/plugins/environments/network_environment/docker_compose/docker_compose_plugin.py +++ b/panther/plugins/environments/network_environment/docker_compose/docker_compose_plugin.py @@ -33,13 +33,16 @@ def __init__( self.network_driver = network_driver self.templates_dir = templates_dir self.output_dir = output_dir + self.log_dirs = os.path.join(self.output_dir, "logs") self.rendered_docker_compose_path = os.path.join( - self.output_dir, "logs", "docker-compose.yml" + self.output_dir, "docker-compose.yml" ) self.compose_file_path = Path(self.services_network_config_file_path) self.services = {} self.deployment_commands = {} + self.timeout = 60 self.jinja_env = Environment(loader=FileSystemLoader(self.templates_dir)) + self.jinja_env.filters['realpath'] = lambda x: os.path.abspath(x) def build_images(self): """ @@ -69,224 +72,67 @@ def find_free_port( return port raise RuntimeError(f"No free ports available in range {start_port}-{end_port}") - def configure_network(self, services: Dict[str, Dict[str, Any]]): - """ - Generates a docker-compose.yml file dynamically based on the provided services. - :param services: A dictionary mapping service names to their configurations. - """ - self.logger.info("Generating dynamic docker-compose.yml") - compose_dict = { - "version": "3.8", - "services": {}, - "networks": {self.network_name: {"driver": self.network_driver}}, - } - - assigned_ports = set() - - for service_name, config in services.items(): - image = config.get("image") - ports = config.get("ports", []) - depends_on = config.get("depends_on", []) - environment_vars = config.get("environment", {}) - - service_type = service_name.split("_")[ - -1 - ].upper() # Assuming naming convention like 'picoquic_server_rfc9000_panther' - log_path = config.get("log_path", "/app/logs/service.log") # Get log path - - # Adjust port assignments to avoid conflicts - adjusted_ports = [] - for port_mapping in ports: - try: - host_port, container_port = map(int, port_mapping.split(":")) - except ValueError: - self.logger.error( - f"Invalid port mapping '{port_mapping}' for service '{service_name}'. Skipping." - ) - continue - - if host_port in assigned_ports or not self.is_port_free(host_port): - self.logger.warning( - f"Host port {host_port} is in use. Finding an alternative port." - ) - try: - new_host_port = self.find_free_port( - assigned_ports=assigned_ports - ) - self.logger.info( - f"Assigning new host port {new_host_port} for container port {container_port}." - ) - adjusted_ports.append(f"{new_host_port}:{container_port}") - assigned_ports.add(new_host_port) - except RuntimeError as e: - self.logger.error( - f"Failed to assign a free port for {service_name}: {e}" - ) - raise e - else: - adjusted_ports.append(port_mapping) - assigned_ports.add(host_port) - - service_def = { - "image": image, - "container_name": service_name, - "ports": ( - adjusted_ports if self.network_driver != "host" else [] - ), # Ports are not needed in host mode - "networks": [self.network_name], - "volumes": [ - f"{Path(self.services_network_config_file_path).parent}/container_logs/{service_name}:/app/logs" - ], - "logging": { - "driver": "json-file", - "options": { - "max-size": "10m", - "max-file": "3", - "path": f"{Path(self.services_network_config_file_path).parent}/logs/{service_name}.log", - }, - }, - "restart": "always", - "command": compose_command, # Set the command directly - } - - if depends_on: - service_def["depends_on"] = depends_on - - if environment_vars: - service_def["environment"] = environment_vars - - # Add health checks if specified - if "healthcheck" in config: - service_def["healthcheck"] = config["healthcheck"] - - compose_dict["services"][service_name] = service_def - - # Validate unique host ports - all_host_ports = [] - for service in compose_dict["services"].values(): - for port in service.get("ports", []): - host_port = int(port.split(":")[0]) - all_host_ports.append(host_port) - if len(all_host_ports) != len(set(all_host_ports)): - self.logger.error( - "Duplicate host ports detected in the Docker Compose configuration." - ) - raise ValueError( - "Duplicate host ports detected in the Docker Compose configuration." - ) - - with open(self.services_network_config_file_path, "w") as compose_file: - self.logger.debug( - f"Writing Docker Compose file to '{self.services_network_config_file_path}' with contents: {compose_dict}" - ) - yaml.safe_dump(compose_dict, compose_file, default_flow_style=False) - - self.logger.info( - f"Docker Compose file generated at '{self.services_network_config_file_path}'" - ) - - # def setup_environment(self, services: Dict[str, Dict[str, Any]]): - # """ - # Sets up the Docker Compose environment by generating the compose file and bringing up services. - # :param services: A dictionary mapping service names to their configurations. - # """ - # try: - # self.configure_network(services) - # except Exception as e: - # self.logger.error(f"Failed to generate Docker Compose file: {e}") - # raise e - - # self.logger.info("Starting Docker Compose services") - # try: - # if self.network_driver == "host": - # # In host mode, no need to use 'docker-compose up' - # # Instead, run each container with --network host - # for service_name, config in services.items(): - # image = config.get('image') - # environment_vars = config.get('environment', {}) - # env_str = ' '.join([f"-e {key}='{value}'" for key, value in environment_vars.items()]) - # cmd = f"docker run -d --name {service_name} --network host {env_str} {image}" - # self.logger.debug(f"Executing command: {cmd}") - # subprocess.run( - # cmd, - # shell=True, - # check=True, - # stdout=subprocess.PIPE, - # stderr=subprocess.PIPE - # ) - # else: - # # For other network drivers, use docker-compose - # subprocess.run( - # ["docker", "compose", "-f", self.services_network_config_file_path, "up", "-d"], - # check=True, - # stdout=subprocess.PIPE, - # stderr=subprocess.PIPE - # ) - # self.logger.info("Docker Compose services started successfully") - # except subprocess.CalledProcessError as e: - # self.logger.error(f"Failed to start Docker Compose services: {e.stderr}") - # subprocess.run( - # ["docker", "compose", "-f", self.services_network_config_file_path, "down"], - # check=True, - # stdout=subprocess.PIPE, - # stderr=subprocess.PIPE - # ) - # raise e - def setup_environment( - self, services: Dict[str, Dict[str, Any]], deployment_commands: Dict[str, str] + self, services: Dict[str, Dict[str, Any]], deployment_info: Dict[str, Dict[str, Any]], paths: Dict[str, str], timestamp: str ): """ Sets up the Docker Compose environment by generating the docker-compose.yml file with deployment commands. :param services: Dictionary of services with their configurations. - :param deployment_commands: Dictionary of deployment commands generated by service managers. + :param deployment_info: Dictionary containing commands and volumes for each service. + :param paths: Dictionary containing various path configurations. + :param timestamp: The timestamp string to include in log paths. """ self.services = services - self.deployment_commands = deployment_commands + self.deployment_info = deployment_info self.logger.debug( - f"Setting up Docker Compose environment with services: {services} with commands {deployment_commands}" + f"Setting up Docker Compose environment with services: {services} and deployment info: {deployment_info}" ) - self.generate_docker_compose() + self.generate_docker_compose(paths=paths, timestamp=timestamp) self.logger.info("Docker Compose environment setup complete") + def deploy_services(self): self.logger.info("Deploying services") self.launch_docker_compose() - def generate_docker_compose(self): + def generate_docker_compose(self, paths: Dict[str, str], timestamp: str): """ Generates the docker-compose.yml file using the provided services and deployment commands. + + :param paths: Dictionary containing various path configurations. + :param timestamp: The timestamp string to include in log paths. """ try: # Ensure the log directory for each service exists for service_name in self.services.keys(): - log_dir = os.path.join(self.output_dir, "logs", service_name) + log_dir = os.path.join(self.log_dirs, service_name) if not os.path.exists(log_dir): os.makedirs(log_dir) self.logger.info(f"Created log directory: {log_dir}") template = self.jinja_env.get_template("docker-compose-template.j2") rendered = template.render( services=self.services, - deployment_commands=self.deployment_commands, - output_dir=os.path.abspath(self.output_dir), - cert_dir=os.path.abspath(os.path.join(self.output_dir, "certs")), + deployment_info=self.deployment_info, + paths=paths, + timestamp=timestamp, + log_dir=self.log_dirs, ) + # Write the rendered content to docker-compose.generated.yml with open(self.compose_file_path, "w") as f: f.write(rendered) + + with open(self.rendered_docker_compose_path, "w") as f: + f.write(rendered) + self.logger.info( f"Docker Compose file generated at '{self.compose_file_path}'" ) - # Copy the generated docker-compose.yml to the output directory - output_compose_path = os.path.join(self.output_dir, "logs", "docker-compose.yml") - with open(output_compose_path, "w") as output_file: - output_file.write(rendered) - self.logger.info(f"Copied Docker Compose file to '{output_compose_path}'") - except Exception as e: self.logger.error( f"Failed to generate Docker Compose file: {e}\n{traceback.format_exc()}" ) + raise e def launch_docker_compose(self): """ @@ -306,6 +152,7 @@ def launch_docker_compose(self): "-f", str(self.compose_file_path), "up", + "-d" ], check=True, stdout=subprocess.PIPE, diff --git a/panther/plugins/implementations/quic/picoquic/Dockerfile b/panther/plugins/implementations/quic/picoquic/Dockerfile index 38e61812b7..316882fb8e 100644 --- a/panther/plugins/implementations/quic/picoquic/Dockerfile +++ b/panther/plugins/implementations/quic/picoquic/Dockerfile @@ -4,9 +4,7 @@ ENV DEBIAN_FRONTEND=noninteractive RUN ln -fs /usr/share/zoneinfo/UTC /etc/localtime && \ apt-get update && \ apt-get install -y build-essential git cmake software-properties-common \ - openssl libssl-dev pkg-config clang python3 - -WORKDIR /opt + openssl libssl-dev pkg-config clang python3 net-tools # Define build arguments for version-specific configurations ARG VERSION=master @@ -22,7 +20,8 @@ RUN echo install Scope::Guard | perl -MCPAN - RUN apt-get install -y jq # Function to parse and build dependencies # TODO make more modular -RUN echo "Starting dependency installation..." && \ +RUN cd /opt && \ + echo "Starting dependency installation..." && \ echo $DEPENDENCIES | jq -c '.[]' | while read -r dep; do \ DEP_NAME=$(echo $dep | jq -r '.name'); \ DEP_URL=$(echo $dep | jq -r '.url'); \ @@ -58,9 +57,8 @@ EXPOSE 8080 # Ensure the log directory exists RUN mkdir -p /app/logs - -# Copy service manager scripts if needed -COPY service_manager.py /opt/service_manager.py +RUN mkdir -p /opt/certs +RUN mkdir -p /opt/ticket # Set entrypoint (can be overridden) -ENTRYPOINT ["/bin/bash"] +ENTRYPOINT [ "/bin/bash", "-l", "-c" ] \ No newline at end of file diff --git a/panther/plugins/implementations/quic/picoquic/config.yaml b/panther/plugins/implementations/quic/picoquic/config.yaml index d11afab58a..cff0e33a95 100644 --- a/panther/plugins/implementations/quic/picoquic/config.yaml +++ b/panther/plugins/implementations/quic/picoquic/config.yaml @@ -13,7 +13,7 @@ picoquic: alpn: param: "-a" value: "hq-interop" - additional_parameters: "-D -L -n servername" + additional_parameters: "-D -L" binary: dir: "/opt/picoquic" name: "./picoquicdemo" @@ -27,9 +27,15 @@ picoquic: cert: param: "-c" file: "/opt/certs/cert.pem" + local_file: "config/certs/cert.pem" key: param: "-k" file: "/opt/certs/key.pem" + local_file: "config/certs/key.pem" + ticket_file: + param: "-T" + file: "/opt/ticket/ticket.key" + local_file: "config/tls_keys/" logging: log_path: "/app/logs/client.log" qlog: @@ -42,7 +48,7 @@ picoquic: alpn: param: "-a" value: "hq-interop" - additional_parameters: "-D -L -n servername" + additional_parameters: " -n servername -D -L" binary: dir: "/opt/picoquic" name: "./picoquicdemo" @@ -56,9 +62,15 @@ picoquic: cert: param: "-c" file: "/opt/certs/cert.pem" + local_file: "config/certs/cert.pem" key: param: "-k" file: "/opt/certs/key.pem" + local_file: "config/certs/key.pem" + ticket_file: + param: "-T" + file: "/opt/ticket/ticket.key" + local_file: "config/tls_keys/" logging: log_path: "/app/logs/server.log" qlog: diff --git a/panther/plugins/implementations/quic/picoquic/service_manager.py b/panther/plugins/implementations/quic/picoquic/service_manager.py index a91042bcc8..032ebd9627 100644 --- a/panther/plugins/implementations/quic/picoquic/service_manager.py +++ b/panther/plugins/implementations/quic/picoquic/service_manager.py @@ -30,6 +30,23 @@ def __init__(self,implementation_config_path: str = "plugins/implementations/qui templates = os.listdir(self.templates_dir) self.logger.debug(f"Available templates in '{self.templates_dir}': {templates}") + def get_base_url(self, service_name: str) -> str: + """ + Returns the base URL for the given service. + """ + # Assuming services are accessible via localhost and mapped ports + # You might need to adjust this based on your actual setup + port_mappings = { + 'picoquic_server': 8080, + 'picoquic_client': 8081, + } + port = port_mappings.get(service_name, None) + if port: + return f"http://localhost:{port}/" + else: + self.logger.error(f"No port mapping found for service '{service_name}'") + return "" + def get_implementation_name(self) -> str: return "picoquic" @@ -101,125 +118,102 @@ def load_config(self) -> dict: self.logger.error(f"Failed to load configuration: {e}\n{traceback.format_exc()}") return {} - def generate_deployment_commands(self, service_params: Dict[str, Any], environment:str) -> Dict[str, str]: + def generate_deployment_commands(self, service_params: Dict[str, Any], environment: str) -> Dict[str, Any]: """ - Generates deployment commands based on service parameters using the protocol-specific templates. + Generates deployment commands and collects volume mappings based on service parameters. :param service_params: Parameters specific to the service. - :return: A dictionary mapping service names to their respective command strings. + :param environment: The environment in which the services are being deployed. + :return: A dictionary with service name as key and a dictionary containing command and volumes. """ self.logger.debug(f"Generating deployment commands for service: {service_params}") - role = service_params.get("parameters").get("role") + role = service_params.get("role") version = service_params.get("version", "rfc9000") version_config = self.config.get("picoquic", {}).get("versions", {}).get(version, {}) - + + # Determine if network interface parameters should be included based on environment include_interface = environment not in ["docker_compose"] - self.logger.debug(f"Using version '{version}' configuration: {version_config}") - # Extract parameters based on role + # Build parameters for the command template + params = { + "binary": { + "dir": version_config.get(role, {}).get("binary", {}).get("dir", "/opt/picoquic"), + "name": version_config.get(role, {}).get("binary", {}).get("name", "./picoquicdemo"), + }, + "initial_version": version_config.get(role, {}).get("initial_version", "00000001"), + "protocol": { + "alpn": version_config.get(role, {}).get("protocol", {}).get("alpn", {}), + "additional_parameters": version_config.get(role, {}).get("protocol", {}).get("additional_parameters", ""), + }, + "network": { + "interface": version_config.get(role, {}).get("network", {}).get("interface", {}), + "port": version_config.get(role, {}).get("network", {}).get("port", 4443), + "destination": service_params.get("target", version_config.get(role, {}).get("network", {}).get("destination", "picoquic_server")), + }, + "certificates": { + "cert_param": version_config.get(role, {}).get("certificates", {}).get("cert", {}).get("param"), + "cert_file": version_config.get(role, {}).get("certificates", {}).get("cert", {}).get("file"), + "cert_local_file": version_config.get(role, {}).get("certificates", {}).get("cert", {}).get("local_file"), + "key_param": version_config.get(role, {}).get("certificates", {}).get("key", {}).get("param"), + "key_file": version_config.get(role, {}).get("certificates", {}).get("key", {}).get("file"), + "key_local_file": version_config.get(role, {}).get("certificates", {}).get("key", {}).get("local_file"), + }, + "ticket_file": { + "param": version_config.get(role, {}).get("ticket_file", {}).get("param"), + "file": version_config.get(role, {}).get("ticket_file", {}).get("file"), + "local_file": version_config.get(role, {}).get("ticket_file", {}).get("local_file"), + }, + "logging": version_config.get(role, {}).get("logging", {}), + } + + # For the client, include target and message if available if role == "client": - template_name = "client_command.j2" - params = { - "binary": { - "dir": self.replace_env_vars(version_config.get("client", {}).get("binary", {}).get("dir", "./picoquic")), - "name": version_config.get("client", {}).get("binary", {}).get("name", "./picoquicdemo"), - }, - "ticket_file": service_params.get("parameters").get("ticket-file"), - "initial_version": version_config.get("client", {}).get("initial_version"), - "certificates": { - "cert_param": version_config.get("client", {}).get("certificates", {}).get("cert", {}).get("param"), - "cert_file": self.replace_env_vars(version_config.get("client", {}).get("certificates", {}).get("cert", {}).get("file")), - "key_param": version_config.get("client", {}).get("certificates", {}).get("key", {}).get("param"), - "key_file": self.replace_env_vars(version_config.get("client", {}).get("certificates", {}).get("key", {}).get("file")), - }, - "protocol": { - "alpn" : { - "param": version_config.get("server", {}).get("protocol", {}).get("alpn", {}).get("param"), - "value": version_config.get("server", {}).get("protocol", {}).get("alpn", {}).get("value"), - }, - "additional_parameters": version_config.get("client", {}).get("protocol", {}).get("additional_parameters"), - }, - "network": { - "port": version_config.get("client", {}).get("network", {}).get("port"), - "destination": service_params.get("target", version_config.get("client", {}).get("network", {}).get("destination")), - }, - "logging": { - "log_path": version_config.get("client", {}).get("logging", {}).get("log_path"), - "qlog": { - "param": version_config.get("client", {}).get("logging", {}).get("qlog", {}).get("param"), - "path": version_config.get("client", {}).get("logging", {}).get("qlog", {}).get("path"), - } - } - } - # Conditionally include network interface parameters - if include_interface: - params["network"]["interface"] = { - "param": version_config.get("client", {}).get("network", {}).get("interface", {}).get("param"), - "value": version_config.get("client", {}).get("network", {}).get("interface", {}).get("value"), - } - # Handle missing parameters - missing_params = self.check_missing_params(params) - if missing_params: - self.logger.error(f"Missing parameters for server service: {missing_params}") - raise KeyError(f"Missing parameters for server service: {missing_params}") + params["target"] = service_params.get("target") + params["message"] = service_params.get("message") + + # Conditionally include network interface parameters + if not include_interface: + params["network"].pop("interface", None) + + # Collect volume mappings + volumes = [] + # Only add certificate volumes if the user doesn't want to generate new certificates + if not service_params.get('generate_new_certificates', False): + # Certificates + volumes.append({ + "local": os.path.abspath(params["certificates"]["cert_local_file"]), + "container": params["certificates"]["cert_file"] + }) + volumes.append({ + "local": os.path.abspath(params["certificates"]["key_local_file"]), + "container": params["certificates"]["key_file"] + }) + # Ticket file (if applicable) + if params["ticket_file"]["local_file"]: + volumes.append({ + "local": os.path.abspath(params["ticket_file"]["local_file"]), + "container": params["ticket_file"]["file"] + }) - elif role == "server": - template_name = "server_command.j2" - params = { - "binary": { - "dir": self.replace_env_vars(version_config.get("server", {}).get("binary", {}).get("dir", "./picoquic")), - "name": version_config.get("server", {}).get("binary", {}).get("name", "./picoquicdemo"), - }, - "logging": { - "log_path": version_config.get("server", {}).get("logging", {}).get("log_path"), - "qlog": { - "param": version_config.get("server", {}).get("logging", {}).get("qlog", {}).get("param"), - "path": version_config.get("server", {}).get("logging", {}).get("qlog", {}).get("path"), - } - }, - "certificates": { - "cert_param": version_config.get("server", {}).get("certificates", {}).get("cert", {}).get("param"), - "cert_file": self.replace_env_vars(version_config.get("client", {}).get("certificates", {}).get("cert", {}).get("file")), - "key_param": version_config.get("server", {}).get("certificates", {}).get("key", {}).get("param"), - "key_file": self.replace_env_vars(version_config.get("client", {}).get("certificates", {}).get("key", {}).get("file")), - }, - "protocol": { - "alpn" : { - "param": version_config.get("server", {}).get("protocol", {}).get("alpn", {}).get("param"), - "value": version_config.get("server", {}).get("protocol", {}).get("alpn", {}).get("value"), - }, - "additional_parameters": version_config.get("server", {}).get("protocol", {}).get("additional_parameters"), - }, - "network": { - "port": version_config.get("server", {}).get("network", {}).get("port"), - "destination": service_params.get("destination-value", version_config.get("server", {}).get("network", {}).get("destination")), - } - } - # Conditionally include network interface parameters - if include_interface: - params["network"]["interface"] = { - "param": version_config.get("client", {}).get("network", {}).get("interface", {}).get("param"), - "value": version_config.get("client", {}).get("network", {}).get("interface", {}).get("value"), - } - # Handle missing parameters - missing_params = self.check_missing_params(params) - if missing_params: - self.logger.error(f"Missing parameters for server service: {missing_params}") - raise KeyError(f"Missing parameters for server service: {missing_params}") - else: - self.logger.error(f"Unknown role '{role}' for service.") - raise ValueError(f"Unknown role '{role}' for service.") # Render the appropriate template try: + template_name = f"{role}_command.j2" self.logger.debug(f"Rendering command using template '{template_name}' with parameters: {params}") template = self.jinja_env.get_template(template_name) command = template.render(**params) + + # Clean up the command string + command_str = command.replace('\t', ' ').replace('\n', ' ').strip() + + command_str = '"' + command_str + '"' + + # Create the command list + working_dir = version_config.get(role, {}).get("binary", {}).get("dir", "/opt/picoquic") + service_name = service_params.get("name") - if not service_name: - service_name = "picoquic_client" if role == "client" else "picoquic_server" - self.logger.debug(f"Generated command for '{service_name}': {command}") - return {service_name: command} + self.logger.debug(f"Generated command for '{service_name}': {command_str}") + return {service_name: {"command": command_str, "volumes": volumes, "working_dir": working_dir}} except Exception as e: self.logger.error(f"Failed to render command for service '{service_params.get('name', 'unknown')}': {e}\n{traceback.format_exc()}") raise e diff --git a/panther/plugins/implementations/quic/picoquic/templates/client_command.j2 b/panther/plugins/implementations/quic/picoquic/templates/client_command.j2 index 9047dda8a7..551a3caea8 100644 --- a/panther/plugins/implementations/quic/picoquic/templates/client_command.j2 +++ b/panther/plugins/implementations/quic/picoquic/templates/client_command.j2 @@ -1 +1 @@ -{{ binary.dir }}/{{ binary.name }} -l - -T {{ ticket_file }} -v {{ initial_version }} {{ certificates.cert_param }} {{ certificates.cert_file }} {{ certificates.key_param }} {{ certificates.key_file }} {{ protocol.alpn.param }} {{ protocol.alpn.value }} {% if network.interface %} {{ network.interface.param }} {{ network.interface.value }} {% endif %} {{ protocol.additional_parameters }} {{ network.destination }} {{ network.port }} > {{ logging.log_path }} 2>&1 \ No newline at end of file +{{ binary.name }} {{ certificates.cert_param }} {{ certificates.cert_file }} {{ certificates.key_param }} {{ certificates.key_file }} {{ ticket_file.param }} {{ ticket_file.file }} {{ protocol.alpn.param }} {{ protocol.alpn.value }} {{ protocol.additional_parameters }} {% if network.interface %} {{ network.interface.param }} {{ network.interface.value }} {% endif %} {% if initial_version %} -v {{ initial_version }} {% endif %} {{ target }} {{ network.port }} > {{ logging.log_path }} 2>&1 \ No newline at end of file diff --git a/panther/plugins/implementations/quic/picoquic/templates/server_command.j2 b/panther/plugins/implementations/quic/picoquic/templates/server_command.j2 index 6e026fb048..411a33923e 100644 --- a/panther/plugins/implementations/quic/picoquic/templates/server_command.j2 +++ b/panther/plugins/implementations/quic/picoquic/templates/server_command.j2 @@ -1 +1 @@ -{{ binary.dir }}/{{ binary.name }} -q {{ logging.qlog.path }} {{ certificates.cert_param }} {{ certificates.cert_file }} {{ certificates.key_param }} {{ certificates.key_file }} {{ protocol.alpn.param }} {{ protocol.alpn.value }}{% if network.interface %} {{ network.interface.param }} {{ network.interface.value }} {% endif %} -p {{ network.port }} {{ protocol.additional_parameters }} > {{ logging.log_path }} 2>&1 \ No newline at end of file +{{ binary.name }} {{ certificates.cert_param }} {{ certificates.cert_file }} {{ certificates.key_param }} {{ certificates.key_file }} {{ protocol.alpn.param }} {{ protocol.alpn.value }} {{ protocol.additional_parameters }} {% if network.interface %} {{ network.interface.param }} {{ network.interface.value }} {% endif %} -p {{ network.port }} > {{ logging.log_path }} 2>&1 \ No newline at end of file