diff --git a/RELEASE_NOTES.md b/RELEASE_NOTES.md index 23a0ff3..21218b0 100644 --- a/RELEASE_NOTES.md +++ b/RELEASE_NOTES.md @@ -3,6 +3,10 @@ Release notes for the AD Build System (Software Factory) ## Releases: +* tag:1.2.0 30-Mar-2026 P. Nisperos (pnispero) + * Refactor configuration for more transparency. And refactored deployment controller to a more simpler generic deployment that most types of apps can follow. Fixed bugs with deployment controller + * Removed deploymentType, added in playbook to pass to deployment, every deployment through CLI is now polled as well like IOCs for easier to track progress, and should work for other types of deployments + * tag:1.1.12 17-Feb-2026 P. Nisperos (pnispero) * CLI bs clone bug fix with tab complete diff --git a/bs_cli/adbs_cli/admin_commands.py b/bs_cli/adbs_cli/admin_commands.py index 9659be7..fb8562b 100644 --- a/bs_cli/adbs_cli/admin_commands.py +++ b/bs_cli/adbs_cli/admin_commands.py @@ -82,6 +82,7 @@ def add_initial_deployment(component: str, verbose: bool = False): # so if api ever changes, you don't need to update this function. # Grab deployment info from cram ls + click.confirm("Ensure you did a 'kinit' before continuing. Continue?") click.echo("== ADBS == Adding initial deployment configuration") click.echo("== ADBS == Running 'cram ls'...") @@ -108,24 +109,16 @@ def add_initial_deployment(component: str, verbose: bool = False): print(json.dumps(facility_data, indent=2)) print("\n" + "-"*50 + "\n") if (click.confirm("Does the above deployment info look correct")): - question = [ - inquirer.List( - "deploymentType", - message="What type of deployment will this app use?", - choices=["ioc", "hla", "tools", "matlab", "pydm", "container"] - ), - ] - deployment_type = inquirer.prompt(question)['deploymentType'] for facility, facility_data in result.items(): data_to_write = { "facility": facility, "component_name": component, "tag": facility_data['tag'], "user": cli_configuration['github_uname'], - "type": deployment_type + "type": "ioc", + "ioc_list": facility_data['dependsOn'] } - if (deployment_type == 'ioc'): - data_to_write["ioc_list"] = facility_data['dependsOn'] + data_to_write["ioc_list"] = facility_data['dependsOn'] request = Request(Component(component), Api.DEPLOYMENT) request.set_endpoint(ApiEndpoints.DEPLOYMENT_INITIAL) request.add_dict_to_payload(data_to_write) @@ -147,34 +140,20 @@ def add_repo(verbose: bool=False): manifest_filepath = user_src_repo + '/config.yaml' manifest_data = request.component.parse_manifest(manifest_filepath) - # 3) Populate the payload + # 3) Populate the payload from config.yaml request.add_to_payload("name", manifest_data["repo"]) request.add_to_payload("description", manifest_data["description"]) - request.add_to_payload("testingCriteria", manifest_data["testingCriteria"]) - request.add_to_payload("approvalRule", manifest_data["approvalRule"]) organization = manifest_data["organization"] request.add_to_payload("organization", organization) - issue_tracker = manifest_data["issueTracker"] - request.add_to_payload("issueTracker", issue_tracker) - if (issue_tracker != 'jira' and issue_tracker != 'github'): - click.echo("== ADBS == issue tracker must be jira or github.") - return - if (issue_tracker == 'jira'): - request.add_to_payload("jiraProjectKey", manifest_data["jiraProjectKey"]) - - build_os = {"buildOs": manifest_data.get("environments")} - request.add_dict_to_payload(build_os) request.add_to_payload("url", f"https://github.com/{organization}/{request.component.name}") request.add_to_payload("ssh", f"git@github.com:{organization}/{request.component.name}.git") - if (not click.confirm("Do you want to add automated build and tests?")): - request.add_to_payload("skipBuildAndTests", True) - + request.set_endpoint(ApiEndpoints.COMPONENT) request.post_request(verbose, msg="Add component to component database") - # Create another put request but to enable permissions for backend to receive events + # Enable permissions for backend to receive events request = Request(request.component) - request.set_endpoint(ApiEndpoints.COMPONENT_EVENT, + request.set_endpoint(ApiEndpoints.COMPONENT_EVENT, component_name=request.component.name, enable="true") request.put_request(log=verbose, msg="Enable events for component") @@ -197,10 +176,14 @@ def add_repo(verbose: bool=False): @click.option("-v", "--verbose", is_flag=True, required=False, help="More detailed output") def onboard_repo(ctx, verbose: bool=False): """Command to onboard a repo to software factory. - Creates config.yaml, adds component to database, adds initial deployment configuration (if existing IOC application)""" - click.confirm("Ensure you did a 'kinit' before continuing.") + Adds component to database and adds initial deployment configuration (if existing IOC application). + NOTE: You must create your own config.yaml before running this command. + See BuildSystem/examples/ for templates.""" + click.confirm("Please create a config.yaml in the top level of your repo (see BuildSystem/examples/ for templates) before running this command. Continue?") + + # Add component to database (reads repo/org/description from config.yaml) + add_repo(verbose) - # Create the config.yaml ===================================================== # Get user input at_top = click.confirm("Are you at the TOP level of your repo?") if (at_top): @@ -213,110 +196,23 @@ def onboard_repo(ctx, verbose: bool=False): else: click.echo(f"Error: The specified path '{top_level}' does not exist.") return - - org_name = input(INPUT_PREFIX + "Specify name of GitHub organization: ") - description = input(INPUT_PREFIX + "Specify repo description: ") - build_command = input(INPUT_PREFIX + "Specify how to build (if applicable, can be as simple as 'make'): ") - question = [ - inquirer.List( - "issueTracker", - message="What issue tracking system does this app use?", - choices=["github", "jira"] - ), - ] - issue_tracker = inquirer.prompt(question)['issueTracker'] - jira_project_key = 'n/a' - if (issue_tracker == 'jira'): - jira_project_key = input(INPUT_PREFIX + "Specify jira project key: ") - question = [ - inquirer.Checkbox( - "buildOs", - message="What are the operating systems this app runs on?", - choices=["ROCKY9", "RHEL7", "RHEL6", "RHEL5"], - default=[], - ), - ] - build_os_list = inquirer.prompt(question)['buildOs'] - question = [ - inquirer.List( - "deploymentType", - message="What type of deployment will this app use?", - choices=["ioc", "hla", "tools", "matlab", "pydm", "container"] - ), - ] - deployment_type = inquirer.prompt(question)['deploymentType'] - - # Create the content - content = f"""# [Required] -# Basic component information -repo: {repo_name} -organization: {org_name} -url: https://github.com/{org_name}/{repo_name} -description: {description} - -# [Required] -# Continous integration -approvalRule: all -testingCriteria: all -issueTracker: {issue_tracker} -jiraProjectKey: {jira_project_key} - -# [Required] -# Type of deployment -# Types: [ioc, hla, tools, matlab, pydm, container] -deploymentType: {deployment_type} - -# [Optional] -# Build method for building the component -# Can be a simple command like 'make' -""" - if (build_command == ""): - content += "# build: \n" - else: - content += f"build: {build_command}\n" - - if (build_os_list == []): - content += """ -# [Optional] -# Environments this app runs on -# environments: -""" - else: - content += f""" -# [Optional] -# Environments this app runs on -environments: -{chr(10).join(' - ' + env for env in build_os_list)} -""" - runtime_dependencies = click.confirm("Are there any runtime dependencies?") - if (runtime_dependencies): - deps_input = click.prompt("Enter dependencies (comma-separated)") - dependencies = [d.strip() for d in deps_input.split(',')] - content += f""" -# [Optional] -# Directories and files needed to run application -runtimeDependencies: -{chr(10).join(' - ' + dependency for dependency in dependencies)} -""" + + # Parse manifest to determine deployment type and get repo info + manifest_data = Component().parse_manifest(os.path.join(top_level, 'config.yaml')) + repo_name = manifest_data["repo"] + org_name = manifest_data["organization"] + playbook = manifest_data.get('deploy', {}).get('playbook', '') + if 'ioc_module' in playbook: + deployment_type = 'ioc' + elif 'pydm_module' in playbook: + deployment_type = 'pydm' else: - content += f""" -# [Optional] -# Directories and files needed to run application -# runtimeDependencies: -""" - - # Generate full filepath - filepath = os.path.join(top_level, 'config.yaml') - - # Write to file - with open(filepath, 'w') as f: - f.write(content) - - click.echo(f"File '{filepath}' has been generated successfully!") + deployment_type = 'generic' - # Create the github actions deployment workflow file - if (deployment_type == "pydm"): - content = """name: Request Deployment - PyDM Display + # Write deploy.yml for github actions deployment (No IOC support yet) + if deployment_type == 'pydm' or deployment_type == 'generic': + deploy_content = f"""\ +name: Deploy on: workflow_dispatch: @@ -345,12 +241,12 @@ def onboard_repo(ctx, verbose: bool=False): description: 'Tag to deploy' required: true type: string - + permissions: deployments: write contents: read actions: read - + jobs: deploy: uses: ad-build-test/build-system-playbooks/.github/workflows/request-deployment.yml@main @@ -360,24 +256,16 @@ def onboard_repo(ctx, verbose: bool=False): deploy_to_facet: ${{ inputs.deploy_to_facet }} deploy_to_testfac: ${{ inputs.deploy_to_testfac }} tag: ${{ inputs.tag }} - deployment_type: 'pydm' + playbook: '{playbook}' """ - - # Generate full filepath - filepath = os.path.join(top_level, '.github/workflows/deploy.yml') - - # Create the directory if it doesn't exist - os.makedirs(os.path.dirname(filepath), exist_ok=True) - - # Write to file - with open(filepath, 'w') as f: - f.write(content) - - click.echo(f"File '{filepath}' has been generated successfully!") - - # If deployment type is IOC, then generate RELEASE_SITE, and remove .cram, and remove RELEASE_SITE from .gitignore - if (deployment_type == 'ioc'): - + deploy_yml_path = os.path.join(top_level, '.github/workflows/deploy.yml') + os.makedirs(os.path.dirname(deploy_yml_path), exist_ok=True) + with open(deploy_yml_path, 'w') as f: + f.write(deploy_content) + click.echo(f"File '{deploy_yml_path}' has been generated successfully!") + + # IOC-specific steps + if deployment_type == 'ioc': # Generate RELEASE_SITE release_site_contents = f""" #============================================================================== @@ -399,21 +287,17 @@ def onboard_repo(ctx, verbose: bool=False): ALARM_CONFIGS_TOP=/afs/slac/g/lcls/tools/AlarmConfigsTop #============================================================================== """ - # Generate full filepath filepath = os.path.join(top_level, 'RELEASE_SITE') - - # Write to file with open(filepath, 'w') as f: f.write(release_site_contents) - click.echo(f"File '{filepath}' has been generated successfully!") # Remove .cram directory try: shutil.rmtree(".cram") - click.echo(f"Successfully removed .cram directory") + click.echo("Successfully removed .cram directory") except FileNotFoundError: - pass # Already doesn't exist + pass # Remove RELEASE_SITE from .gitignore # Read all lines @@ -429,8 +313,6 @@ def onboard_repo(ctx, verbose: bool=False): file.writelines(filtered_lines) click.echo(f"Successfully removed RELEASE_SITE from .gitignore") - - add_repo(verbose) if (deployment_type == 'ioc'): if (click.confirm("Is this an existing IOC application?")): diff --git a/bs_cli/adbs_cli/cli_configuration.py b/bs_cli/adbs_cli/cli_configuration.py index 6ece1fe..32f7c9a 100644 --- a/bs_cli/adbs_cli/cli_configuration.py +++ b/bs_cli/adbs_cli/cli_configuration.py @@ -20,8 +20,8 @@ class ApiEndpoints(str, Enum): DEPLOYMENT_CREATE_GH_STATUS = "deployments/status" # From deployment controller api - DEPLOYMENT = "{deployment_type}/deployment" - DEPLOYMENT_REVERT = "{deployment_type}/deployment/revert" + DEPLOYMENT = "deployment" + DEPLOYMENT_REVERT = "ioc/deployment/revert" DEPLOYMENT_STATUS = "deployment/{task_id}/status" DEPLOYMENT_REPORT = "deployment/{task_id}/report" DEPLOYMENT_INFO = "deployment/info" diff --git a/bs_cli/adbs_cli/entry_point_commands.py b/bs_cli/adbs_cli/entry_point_commands.py index b6acbdc..5e7aae2 100644 --- a/bs_cli/adbs_cli/entry_point_commands.py +++ b/bs_cli/adbs_cli/entry_point_commands.py @@ -137,9 +137,10 @@ def generate_deployment_report(file_content: str, component_name: str, tag: str) click.echo(f"Report downloaded successfully to {file_path}") def poll_deployment(response, deployment_status_request: Request): - """Poll deployment until complete, return final status""" + """Poll deployment until complete, return (report_response, elog_url)""" data = response.json() task_id = data.get("task_id", None) + elog_url = "" sleep(2) # Wait a bit for deployment status for _ in range(30): # 5 min max deployment_status_request.set_endpoint(ApiEndpoints.DEPLOYMENT_STATUS, @@ -153,6 +154,7 @@ def poll_deployment(response, deployment_status_request: Request): click.echo(f"\r\033[K{progress['percent']}% - {progress['current_step']} ", nl=False) if status_data["status"] == "completed": + elog_url = status_data.get("result", {}).get("elog_url", "") click.echo("\n== ADBS == Completed deployment. ") break @@ -160,7 +162,7 @@ def poll_deployment(response, deployment_status_request: Request): # Download report deployment_status_request.set_endpoint(ApiEndpoints.DEPLOYMENT_REPORT, task_id=task_id) - return deployment_status_request.get_request(log=False) + return deployment_status_request.get_request(log=False), elog_url @click.command() def configure_user(): @@ -223,6 +225,23 @@ def configure_user(): click.echo(f"** Successfully added to {conf_file} **") click.echo(f"Please source {conf_file} or reload your shell to apply changes.") + # Ensure ~/.bashrc sources ~/.profile.d/*.conf (required on SLAC S3DF) + bashrc_path = os.path.expanduser("~/.bashrc") + profile_d_snippet = ( + '\n# SLAC S3DF - source all files under ~/.profile.d\n' + 'if [[ -e ~/.profile.d && -n "$(ls -A ~/.profile.d/)" ]]; then\n' + ' source <(cat $(find -L ~/.profile.d -name \'*.conf\' | sort))\n' + 'fi\n' + ) + bashrc_has_snippet = False + if os.path.exists(bashrc_path): + with open(bashrc_path, "r") as f: + bashrc_has_snippet = "source <(cat $(find -L ~/.profile.d" in f.read() + if not bashrc_has_snippet: + with open(bashrc_path, "a") as f: + f.write(profile_d_snippet) + click.echo(f"** Added ~/.profile.d sourcing block to {bashrc_path} **") + @click.command() @click.option("-c", "--component", required=False, help="Component Name") @click.option("-b", "--branch", required=False, help="Branch Name") @@ -466,14 +485,17 @@ def deploy(component: str, facility: str, test: bool, ioc: str, tag: str, list: deployment_request = Request(Component(component), Api.DEPLOYMENT) deployment_request.set_component_name() - # Get app type + # Get app type from deploy.playbook in config.yaml user_src_repo = deployment_request.component.git_get_top_dir() manifest_filepath = user_src_repo + '/config.yaml' manifest_data = deployment_request.component.parse_manifest(manifest_filepath) - deployment_type = manifest_data.get('deploymentType', '').lower() - if not deployment_type: - click.echo("== ADBS == Error: 'deploymentType' not found or empty in manifest") - return + playbook = manifest_data.get('deploy', {}).get('playbook', '') + if 'ioc_module' in playbook: + deployment_type = 'ioc' + elif 'pydm_module' in playbook: + deployment_type = 'pydm' + else: + deployment_type = 'generic' # 1.1) Option - test if (test): @@ -523,8 +545,7 @@ def deploy(component: str, facility: str, test: bool, ioc: str, tag: str, list: return deployment_request.add_to_payload("component_name", deployment_request.component.name) deployment_request.add_to_payload("user", linux_uname) - deployment_request.set_endpoint(ApiEndpoints.DEPLOYMENT_REVERT, - deployment_type=deployment_type) + deployment_request.set_endpoint(ApiEndpoints.DEPLOYMENT_REVERT) click.echo("== ADBS == Deploying to " + str(user_specified_facilities) + "...") for facility in user_specified_facilities: deployment_request.add_to_payload("facility", facility) @@ -532,8 +553,8 @@ def deploy(component: str, facility: str, test: bool, ioc: str, tag: str, list: if (not response.ok): click.echo(f"== ADBS == Error - {response.json()}") return - response = poll_deployment(response, deployment_request) - file_content = response.content.decode('utf-8') + report_response, _ = poll_deployment(response, deployment_request) + file_content = report_response.content.decode('utf-8') generate_deployment_report(file_content, deployment_request.component.name, "revert") return @@ -559,13 +580,10 @@ def deploy(component: str, facility: str, test: bool, ioc: str, tag: str, list: "component_name": deployment_request.component.name, "tag": tag, "user": linux_uname, - "dry_run": dry_run + "dry_run": dry_run, + "playbook": playbook } - # 7) Figure out what the deployment type is and set the endpoint accordingly - user_src_repo = deployment_request.component.git_get_top_dir() - manifest_filepath = user_src_repo + '/config.yaml' - # 6) Error check - If user specified iocs - Confirm with database that every ioc found in the source tree is in the database. if (deployment_type == 'ioc' and ioc): # 6.1) First check that user is in repo @@ -674,21 +692,17 @@ def deploy(component: str, facility: str, test: bool, ioc: str, tag: str, list: if (deployment_type == 'pydm'): subsystem = deployment_request.component.name.replace("pydm-", "") # Remove "pydm-" playbook_args_dict["subsystem"] = subsystem - # Error check - For pydm deployments, there can be a pydm subsystem for more than one facility - # So user_specified_facilities is required. - if (len(user_specified_facilities) < 1): - click.echo("== ADBS == ERROR: Please specify the facility(s)") - - deployment_request.add_to_payload("return_elog", True) # Get elog entry back from deployment playbook to print to user # 8) Send deployment request to deployment controller - deployment_request.set_endpoint(ApiEndpoints.DEPLOYMENT, - deployment_type=deployment_type) + deployment_request.set_endpoint(ApiEndpoints.DEPLOYMENT) deployment_request.add_dict_to_payload(playbook_args_dict) if (len(user_specified_facilities) > 0): click.echo("== ADBS == Deploying to " + str(user_specified_facilities) + "...") + elif (deployment_type == 'ioc'): + click.echo("== ADBS == Deploying to all facilities with active deployments...") else: - click.echo("== ADBS == Deploying...") + click.echo("== ADBS == ERROR: Please specify the facility(s)") + return deployment_response = deployment_request.put_request(log=verbose) if (not deployment_response.ok): try: @@ -697,7 +711,7 @@ def deploy(component: str, facility: str, test: bool, ioc: str, tag: str, list: except Exception: pass - if (deployment_type == 'pydm'): # add github deployment status in progress + if (deployment_type == 'pydm' or deployment_type == 'generic'): # add github deployment status in progress for all except ioc for now deployment_status_request = Request(Component(component), Api.BACKEND) deployment_status_request.add_to_payload("name", deployment_request.component.name) deployment_status_request.add_to_payload("tag", tag) @@ -714,18 +728,16 @@ def deploy(component: str, facility: str, test: bool, ioc: str, tag: str, list: except Exception: pass - if (deployment_type == 'ioc'): # If ioc deployment, then poll status - response = poll_deployment(response, deployment_request) - # Get the file content from the response - file_content = response.content.decode('utf-8') + # Poll all deployment types — PUT /deployment always returns 202 async + report_response, elog_url = poll_deployment(deployment_response, deployment_request) + if (deployment_type == 'ioc'): + file_content = report_response.content.decode('utf-8') generate_deployment_report(file_content, deployment_request.component.name, tag) - elog_url = deployment_response.json().get("elog_url", "") click.echo(f"== ADBS == Complete, log for details - {elog_url}") - if (deployment_type == 'pydm'): # add github deployment status success + if (deployment_type != 'ioc' or deployment_type == 'generic'): # add github deployment status success for all except ioc for now deployment_status_request.add_to_payload("status", "SUCCESS") deployment_status_request.add_to_payload("logUrl", elog_url) - response = deployment_status_request.put_request(log=verbose) for facility in user_specified_facilities: deployment_status_request.add_to_payload("facility", facility) deployment_status_request.set_endpoint(ApiEndpoints.DEPLOYMENT_CREATE_GH_STATUS) diff --git a/bs_cli/dist/adbs_cli-1.1.12-py3-none-any.whl b/bs_cli/dist/adbs_cli-1.1.12-py3-none-any.whl deleted file mode 100644 index 06c79aa..0000000 Binary files a/bs_cli/dist/adbs_cli-1.1.12-py3-none-any.whl and /dev/null differ diff --git a/bs_cli/dist/adbs_cli-1.1.12.tar.gz b/bs_cli/dist/adbs_cli-1.1.12.tar.gz deleted file mode 100644 index 01a7edc..0000000 Binary files a/bs_cli/dist/adbs_cli-1.1.12.tar.gz and /dev/null differ diff --git a/bs_cli/dist/adbs_cli-1.2.0-py3-none-any.whl b/bs_cli/dist/adbs_cli-1.2.0-py3-none-any.whl new file mode 100644 index 0000000..1267332 Binary files /dev/null and b/bs_cli/dist/adbs_cli-1.2.0-py3-none-any.whl differ diff --git a/bs_cli/dist/adbs_cli-1.2.0.tar.gz b/bs_cli/dist/adbs_cli-1.2.0.tar.gz new file mode 100644 index 0000000..e1c72e2 Binary files /dev/null and b/bs_cli/dist/adbs_cli-1.2.0.tar.gz differ diff --git a/bs_cli/setup.py b/bs_cli/setup.py index df05a7f..37a3bd0 100644 --- a/bs_cli/setup.py +++ b/bs_cli/setup.py @@ -2,7 +2,7 @@ # Avoided using pyproject.toml, becuase can't get an editable version installed (pip install -e .) setup( name='adbs_cli', - version='1.1.12', + version='1.2.0', description="Command line interface for the accelerator directorate build system (software factory)", author="Patrick Nisperos, Jerry Katzung, Claudio Bisegni", author_email="pnispero@slac.stanford.edu, katzung@slac.stanford.edu, bisegni@slac.stanford.edu", diff --git a/build_scripts/README b/build_scripts/README new file mode 100644 index 0000000..d186088 --- /dev/null +++ b/build_scripts/README @@ -0,0 +1,3 @@ +these build_scripts are not in use by the build containers as of 3/30/2026. +the core-build-system-backend just directly cd's into scratch directory, then runs the build command which the user provides in the config.yaml. +Will keep here temporarily. \ No newline at end of file diff --git a/build_scripts/start_build.sh b/build_scripts/start_build.sh index a0defe9..80e3b08 100755 --- a/build_scripts/start_build.sh +++ b/build_scripts/start_build.sh @@ -1,9 +1,20 @@ #!/bin/bash -# This is the official script for build containers. +# This is the official entrypoint script for build pods. +set -e -# Source the dev environment (This is needed for apps that have dependencies on the dev environment -# For example, apps that build for linuxRT have dependencies on certain library paths) -source /afs/slac/g/lcls/tools/script/ENVS64.bash +# If ADBS_COMMAND is set, run it directly (new config.yaml flow). +# The command from config.yaml is what runs — no hidden steps. +if [ -n "$ADBS_COMMAND" ]; then + cd "$ADBS_SOURCE" + echo "=== CBS Build ===" + echo "Component: $ADBS_COMPONENT" + echo "Branch: $ADBS_BRANCH" + echo "Command: $ADBS_COMMAND" + echo "=================" + eval "$ADBS_COMMAND" + exit $? +fi -# Call the main python script to do the build +# Legacy fallback: source dev environment and run start_build.py +source /afs/slac/g/lcls/tools/script/ENVS64.bash python3 /build/start_build.py \ No newline at end of file diff --git a/deploy_controller/deployment_controller.py b/deploy_controller/deployment_controller.py index 3f7613d..2c80ad2 100644 --- a/deploy_controller/deployment_controller.py +++ b/deploy_controller/deployment_controller.py @@ -4,7 +4,6 @@ Usage: python3 deployment_controller.py note - this would have to run 24/7 as a service """ -from contextlib import asynccontextmanager, contextmanager import os import shutil import uuid @@ -20,10 +19,10 @@ import time import asyncio from fastapi import FastAPI, BackgroundTasks, HTTPException -from fastapi.responses import JSONResponse, FileResponse, Response +from fastapi.responses import JSONResponse, FileResponse from pydantic import BaseModel -from typing import Dict, Optional -from datetime import datetime, timezone, timedelta +from typing import Optional +from datetime import datetime from dateutil import parser """ @@ -61,6 +60,7 @@ ELOG_HEADERS = {"x-vouch-idp-accesstoken": ELOG_USER_PASSWORD} APP_PATH = "/app" +REQUEST_TIMEOUT = 60 # seconds, for all external HTTP calls # Container deployment secrets are loaded per-app from environment variables. # Naming convention: CONTAINER_{APP_KEY}_{SECRET} @@ -76,7 +76,7 @@ def get_container_secrets(component_name: str) -> dict: } # NOTE - ORDER MATTERS (Last item on the list "wins" if there are overlapping files when deploying for multiple OS) # Please make sure latest OS is the last item. -USED_OS_LIST = ["RHEL7", "ROCKY9"] +USED_OS_LIST = ["rhel7", "rocky9"] FACILITIES_LIST = ["LCLS", "FACET", "TESTFAC", "DEV", "SANDBOX"] @@ -135,8 +135,8 @@ def save_task(task: DeploymentTask): "result": task.result, "error": task.error } - # Auto-expire after 5 mins - redis_client.setex(f"task:{task.task_id}", 300, json.dumps(task_dict)) + # Auto-expire after 10 mins + redis_client.setex(f"task:{task.task_id}", 600, json.dumps(task_dict)) def get_task(task_id: str) -> DeploymentTask: """Load task from Redis""" @@ -161,47 +161,34 @@ class RevertDict(Component): user: str facility: str ioc_list: Optional[list] = None - reboot_iocs: Optional[bool] = False # Optional + reboot_iocs: Optional[bool] = False -class IocDict(Component): - facilities: Optional[list] = None # Optional, defaults to None - tag: str - ioc_list: Optional[list] = None - user: str - dry_run: Optional[bool] = False # Optional - reboot_iocs: Optional[bool] = False # Optional - -class PydmDict(Component): - facilities: Optional[list] = None # Optional +class DeployDict(Component): + """Unified deployment request model. The playbook field determines app type and which + Ansible playbook runs. Special logic is applied for ioc_module and pydm_module playbooks; + everything else uses the generic handler (download release → run playbook).""" tag: str user: str - dry_run: Optional[bool] = False # Optional - subsystem: Optional[str] = "" # Optional Ex: [mps, mgnt, vac, prof, etc.] - return_elog: Optional[bool] = False # Optional - -class ContainerDict(Component): - facilities: Optional[list] = None # Optional - tag: str - user: str - return_elog: Optional[bool] = False # Optional - force_deploy: Optional[bool] = False # Optional - # App-specific configuration - docker_network: Optional[str] = None # Docker network name for inter-container DNS - migration_command: Optional[str] = None # e.g., "alembic upgrade head" — skipped if not set - health_check_path: Optional[str] = "/health" # Health check endpoint path - # Secrets — passed from GitHub Actions via core-build-system, with env var fallback + playbook: str # e.g. "ioc_module/ioc_deploy.yml" + facilities: Optional[list] = None + dry_run: Optional[bool] = False + return_elog: Optional[bool] = False + # IOC-specific + ioc_list: Optional[list] = None + reboot_iocs: Optional[bool] = False + # PyDM-specific + subsystem: Optional[str] = "" + # Container-specific + force_deploy: Optional[bool] = False + docker_network: Optional[str] = None + migration_command: Optional[str] = None + health_check_path: Optional[str] = "/health" database_url: Optional[str] = None redis_url: Optional[str] = None ghcr_token: Optional[str] = None ghcr_user: Optional[str] = None - -class AppDict(Component): - facilities: Optional[list] = None - tag: str - user: str - return_elog: Optional[bool] = False - artifact_url: Optional[str] = None # GitHub release asset URL - artifact_type: Optional[str] = 'rpm' # rpm, tar, zip + # Generic extra vars forwarded to the playbook as-is + extra_vars: Optional[dict] = None class InitialDeploymentDict(Component): # Used for the initial deployment endpoint @@ -235,7 +222,7 @@ def add_log_to_component(facility: str, timestamp: str, user: str, component_to_ "user": user, } endpoint = BACKEND_URL + f'deployments/{component_to_update}/{facility}/logs' - response = requests.post(endpoint, json=deployment_log) + response = requests.post(endpoint, json=deployment_log, timeout=REQUEST_TIMEOUT) return True def add_new_component(facility: str, app_type: str, component_name: str, @@ -260,7 +247,7 @@ def add_new_component(facility: str, app_type: str, component_name: str, logging.debug(f"new_component: {new_component}") endpoint = BACKEND_URL + 'deployments' - response = requests.post(endpoint, json=new_component) + response = requests.post(endpoint, json=new_component, timeout=REQUEST_TIMEOUT) return True def update_component_in_facility(facility: str, app_type: str, component_to_update: str, @@ -294,14 +281,14 @@ def update_component_in_facility(facility: str, app_type: str, component_to_upda # 4) Update component in db logging.debug(component) endpoint = BACKEND_URL + f'deployments/{component_to_update}/{facility}' - response = requests.put(endpoint, json=component) + response = requests.put(endpoint, json=component, timeout=REQUEST_TIMEOUT) logging.debug(f"response.json(): {response.json()}") return True def find_component_in_facility(facility: str, component_to_find: str) -> dict: """ Function to return component information """ endpoint = BACKEND_URL + f'deployments/{component_to_find}/{facility}' - response = requests.get(endpoint) + response = requests.get(endpoint, timeout=REQUEST_TIMEOUT) if (response.ok): return response.json()['payload'] else: @@ -312,7 +299,7 @@ def find_recent_deployment_for_component_facility(facility: str, component_to_fi deployment_index: 0 is the most recent, 1 is second most recent, etc. """ endpoint = BACKEND_URL + f'deployments/{component_to_find}/{facility}/logs' - response = requests.get(endpoint) + response = requests.get(endpoint, timeout=REQUEST_TIMEOUT) if (response.ok): payload = response.json()['payload'] else: @@ -343,29 +330,12 @@ def find_facility_an_ioc_is_in(ioc_to_find: str, component_with_ioc: str) -> lis def extract_date(entry) -> datetime: return datetime.fromisoformat(entry['date']) -def change_directory(path): - try: - os.chdir(path) - # print(f"Changed directory to {os.getcwd()}") - except FileNotFoundError: - raise FileNotFoundError(f"Directory {path} not found.") - -def rename_directory(src_dir, dest_dir): - try: - if os.path.isdir(src_dir): - os.rename(src_dir, dest_dir) - print(f"Renamed directory {src_dir} to {dest_dir}") - else: - raise ValueError(f"Directory {src_dir} not found.") - except Exception as e: - raise ValueError(f"Error renaming directory: {e}") - def create_tarball(directory, tag): tarball_name = f"{tag}.tar.gz" try: with tarfile.open(tarball_name, "w:gz") as tar: tar.add(directory, arcname=os.path.basename(directory)) - print(f"Created tarball: {tarball_name}") + logging.debug(f"Created tarball: {tarball_name}") return tarball_name except Exception as e: raise ValueError(f"Error creating tarball: {e}") @@ -383,7 +353,7 @@ def parse_shebang(shebang_line: str): binary_name = parts[1] # Second part is the binary name (e.g., myApp) return architecture, binary_name else: - print(f"Warning: Invalid path structure in shebang line: {shebang_line}") + logging.warning(f"Invalid path structure in shebang line: {shebang_line}") return None, None def extract_ioc_cpu_shebang_info(app_dir_name: str) -> dict: @@ -395,7 +365,7 @@ def extract_ioc_cpu_shebang_info(app_dir_name: str) -> dict: # 3) Check if the iocBoot directory exists if not os.path.exists(iocBoot_dir): - print(f"Error: The directory {iocBoot_dir} does not exist in the extracted files.") + logging.error(f"Directory {iocBoot_dir} does not exist in the extracted files.") return [] results = [] @@ -423,7 +393,7 @@ def extract_ioc_cpu_shebang_info(app_dir_name: str) -> dict: 'binary': binary_name }) else: - print(f"Warning: No shebang line in {st_cmd_path}") + logging.warning(f"No shebang line in {st_cmd_path}") return results @@ -440,7 +410,7 @@ def cleanup_temp_deployment_dir(directory: str): logging.error(f"Error cleaning up directory {directory}: {str(e)}") def download_release_helper(endpoint: str, download_dir: str, tarball_name: str, extract_tarball: bool): - response = requests.get(endpoint) + response = requests.get(endpoint, timeout=REQUEST_TIMEOUT) # Download file from api, and extract to download_dir # Download the .tar.gz file tarball_filepath = os.path.join(download_dir, tarball_name) @@ -519,6 +489,24 @@ def write_file(filepath: str, content: str): with open(filepath, 'w') as file: file.write(content) +def get_inventory_path() -> str: + """Return the Ansible inventory file path based on TEST_INVENTORY flag.""" + suffix = 'test_inventory.ini' if TEST_INVENTORY else 'global_inventory.ini' + return ANSIBLE_PLAYBOOKS_PATH + suffix + +def finalize_deployment(component_name: str, tag: str, user: str, facilities: list, + deployment_output: str, status: int, deployment_success: bool, + deployment_report_file: str, dry_run: bool, + facilities_ioc_dict: dict = None) -> dict: + """Generate deployment report, write to ELOG, and return the standard result dict.""" + summary = generate_report(component_name, tag, user, deployment_output, status, + deployment_report_file, facilities_ioc_dict, dry_run) + elog_url = "" + if not dry_run: + elog_url = send_deployment_to_elog(component_name, tag, facilities, summary) or "" + return {"summary": summary, "report_file": deployment_report_file, + "status": status, "success": deployment_success, "elog_url": elog_url} + def generate_report(component_name: str, tag: str, user: str, deployment_output: str, status: int, deployment_report_file: str, facilities_ioc_dict: dict=None, dry_run: bool=False): """ Generate a deployment report """ summary = \ @@ -530,9 +518,7 @@ def generate_report(component_name: str, tag: str, user: str, deployment_output: summary += f"\n#### IOCs deployed: {facilities_ioc_dict}" if (status == 200): # 200 means success - # 6.2) Write summary of deployment to report at the top summary += "\n#### Overall status: Success\n\n" + deployment_output - write_file(deployment_report_file, summary) else: # Failure status = 400 summary += "\n#### Overall status: Failure - PLEASE REVIEW\n\n" + deployment_output @@ -545,7 +531,7 @@ def send_deployment_to_elog(component_name: str, tag: str, facilities: list, sum """ Writes processed data to ELOG backend API based on the message type """ - print(f"Writing to the ELOG (SW_LOG) logbook through backend API for: {component_name} - {tag}") + logging.debug(f"Writing to the ELOG (SW_LOG) logbook through backend API for: {component_name} - {tag}") title = f"Deployment: {component_name} - {tag} {facilities}" text = f"
{summary_report}"
@@ -567,58 +553,57 @@ def send_deployment_to_elog(component_name: str, tag: str, facilities: list, sum
# Send the request
try:
- print(f"Sending request to: {ELOG_ENDPOINT}")
- print(f"Headers: {ELOG_HEADERS}")
- response = requests.post(ELOG_ENDPOINT, headers=ELOG_HEADERS, json=payload)
+ logging.debug(f"Sending request to: {ELOG_ENDPOINT}")
+ response = requests.post(ELOG_ENDPOINT, headers=ELOG_HEADERS, json=payload, timeout=REQUEST_TIMEOUT)
- print(f"Response status code: {response.status_code}")
- print(f"Response headers: {response.headers}")
+ logging.debug(f"Response status code: {response.status_code}")
+ logging.debug(f"Response headers: {response.headers}")
# Try to raise for status
response.raise_for_status()
- print(f"Successfully sent to ELOG API: {response.status_code}")
- print(f"response: {response}")
- print(f"response payload: {response.json()['payload']}")
+ logging.debug(f"Successfully sent to ELOG API: {response.status_code}")
+ logging.debug(f"response: {response}")
+ logging.debug(f"response payload: {response.json()['payload']}")
elog_url = ELOG_URL_PREFIX + response.json()['payload'] + ELOG_URL_POSTFIX
return elog_url
except requests.exceptions.HTTPError as http_err:
- print(f"HTTP Error: {http_err}")
+ logging.error(f"HTTP Error: {http_err}")
# Print detailed response information
- print(f"Response status code: {response.status_code}")
- print(f"Response reason: {response.reason}")
+ logging.error(f"Response status code: {response.status_code}")
+ logging.error(f"Response reason: {response.reason}")
# Try to get response text (may contain error details)
try:
- print(f"Response text: {response.text}")
+ logging.error(f"Response text: {response.text}")
except:
- print("Could not get response text")
+ logging.error("Could not get response text")
# Try to parse JSON response (may contain error details)
try:
- print(f"Response JSON: {response.json()}")
+ logging.debug(f"Response JSON: {response.json()}")
except:
- print("Response is not valid JSON")
+ logging.error("Response is not valid JSON")
- print(f"Request URL: {response.request.url}")
- print(f"Request method: {response.request.method}")
- print(f"Request headers: {response.request.headers}")
- print(f"Request body: {response.request.body}")
+ logging.debug(f"Request URL: {response.request.url}")
+ logging.debug(f"Request method: {response.request.method}")
+ logging.debug(f"Request headers: {response.request.headers}")
+ logging.debug(f"Request body: {response.request.body}")
return False
except requests.exceptions.ConnectionError as conn_err:
- print(f"Connection Error: {conn_err}")
+ logging.error(f"Connection Error: {conn_err}")
return False
except requests.exceptions.Timeout as timeout_err:
- print(f"Timeout Error: {timeout_err}")
+ logging.error(f"Timeout Error: {timeout_err}")
return False
except requests.exceptions.RequestException as req_err:
- print(f"Request Error: {req_err}")
+ logging.error(f"Request Error: {req_err}")
return False
except Exception as e:
- print(f"General Error: {e}")
- print(f"Failed payload: {payload}")
+ logging.error(f"General Error: {e}")
+ logging.error(f"Failed payload: {payload}")
return False
# Begin API functions =================================================================================
@@ -677,7 +662,8 @@ async def get_deployment_status(task_id: str):
if task.status == "completed":
response["result"] = {
- "summary": task.result.get("summary")
+ "summary": task.result.get("summary"),
+ "elog_url": task.result.get("elog_url", "")
}
elif task.status == "failed":
response["error"] = task.error
@@ -706,32 +692,6 @@ async def download_deployment_report(task_id: str):
media_type='text/plain'
)
-@app.post("/tag")
-async def post_tag_creation(tag_request: TagDict):
- """
- Function to create a tag and push it to artifact storage
- """
- return NotImplementedError
- results_dir_top = os.path.join(SCRATCH_FILEPATH, tag_request.results, tag_request.component_name)
-
- # 1) Change to the 'build_results' directory
- build_results_dir = os.path.join(results_dir_top, "build_results")
- build_results = f"{tag_request.component_name}-{tag_request.branch}"
- change_directory(build_results_dir)
-
- # 2) Rename the specified directory to the tag
- build_results_full_path = os.path.join(build_results_dir, build_results)
- tagged_dir_path = os.path.join(build_results_dir, tag_request.tag)
- rename_directory(build_results_full_path, tagged_dir_path)
-
- # 3) Create a tarball of the renamed directory
- tarball_name = create_tarball(tagged_dir_path, tag_request.tag)
- full_tarball_path = os.path.join(build_results_dir, tarball_name)
- # 4) Push to artifact storage
- return_code = artifact_api.put_component_to_registry(tag_request.component_name, full_tarball_path, tag_request.tag)
- return JSONResponse(content={"payload": "Success"}, status_code=return_code)
-
-
@app.put("/ioc/deployment/revert")
async def revert_ioc_deployment(ioc_to_deploy: RevertDict, background_tasks: BackgroundTasks):
"""
@@ -759,116 +719,74 @@ async def revert_ioc_deployment(ioc_to_deploy: RevertDict, background_tasks: Bac
revert_tag = previous_deployment.get("tag")
- # 2) Deploy the reverted deployment for this facility
- revert_deployment = IocDict(component_name=ioc_to_deploy.component_name,
+ # 2) Deploy the reverted deployment for this facility
+ if not revert_tag:
+ return JSONResponse(status_code=400, content={"payload": "No previous deployment found to revert to"})
+ revert_deployment = DeployDict(component_name=ioc_to_deploy.component_name,
facilities=[ioc_to_deploy.facility],
tag=revert_tag,
ioc_list=iocs_that_changed,
- user=ioc_to_deploy.user)
-
- task_id = await deploy_ioc(revert_deployment, background_tasks, True)
+ user=ioc_to_deploy.user,
+ playbook='ioc_module/ioc_deploy.yml')
- return JSONResponse(
- status_code=202,
- content={
- "task_id": task_id,
- "status": "pending"
- }
- )
+ return await deploy(revert_deployment, background_tasks)
-@app.put("/ioc/deployment")
-async def deploy_ioc(ioc_to_deploy: IocDict, background_tasks: BackgroundTasks, return_id_only: bool=False):
- """Main entry point for IOC deployment API (async 202 pattern)"""
+@app.put("/deployment")
+async def deploy(deploy_request: DeployDict, background_tasks: BackgroundTasks):
+ """Unified deployment endpoint. Routes to IOC, PyDM, container, or generic handler
+ based on the playbook field (e.g. 'ioc_module/...', 'pydm_module/...', 'container_module/...')."""
task_id = str(uuid.uuid4())
-
- # Create task
task = DeploymentTask(task_id, save_callback=save_task)
save_task(task)
-
- # Start deployment in background
- background_tasks.add_task(
- deploy_ioc_async,
- task_id,
- ioc_to_deploy
- )
- logging.debug(f"Returning TASK_ID: {task_id}")
- if (return_id_only):
- return task_id
-
- return JSONResponse(
- status_code=202,
- content={
- "task_id": task_id,
- "status": "pending"
- }
- )
+ background_tasks.add_task(deploy_async, task_id, deploy_request)
+ return JSONResponse(status_code=202, content={"task_id": task_id, "status": "pending"})
-async def deploy_ioc_async(task_id: str, ioc_to_deploy: IocDict):
- """
- Runs the deployment logic asynchronously and updates task status
-
- Main entry point for IOC deployment API.
- Handles these deployment scenarios:
- 1. Deploy tag to select existing IOCs
- (IOCs specified, facility not required)
- ex: bs deploy -i sioc-sys0-bs01, sioc-sys0-bs02 R1.3.4
- 2. Deploy tag to all existing IOCs
- (IOCs specified, facility not required)
- 2.1. Deploy tag to all existing IOCs but user specified which facilities they want to update.
- This would end up being case 2, but the cli would need logic
- to figure out which IOCs in the facilities to deploy
- ex: bs deploy -i ALL -f LCLS, FACET R1.3.4
- 3. Deploy tag to new IOCs
- (IOCs specified, facility required)
- ex: bs deploy -i sioc-sys0-bs01, sioc-sys0-bs02 -f LCLS R1.3.4
- 4. Deploy tag to component
- (no IOCs, facility required) - works for both new and existing components
- ex: bs deploy -f LCLS R1.3.4
- 5. Deploy tag to new component AND new IOCs
- (IOCs specified, facility required)
- ex: bs deploy -i sioc-sys0-bs01, sioc-sys0-bs02 -f LCLS R1.3.4
- """
- await asyncio.sleep(0.1) # Force yield
+async def deploy_async(task_id: str, deploy_request: DeployDict):
+ """Dispatch to the right deployment handler based on playbook path."""
+ await asyncio.sleep(0.1)
task = get_task(task_id)
- temp_download_dir = f"{APP_PATH}/tmp/{task_id}"
- task.temp_dir = temp_download_dir
-
+ temp_dir = f"{APP_PATH}/tmp/{task_id}"
+ os.makedirs(temp_dir, exist_ok=True)
+ task.temp_dir = temp_dir
try:
- os.makedirs(temp_download_dir, exist_ok=True)
- logging.info(f"New deployment request data: {ioc_to_deploy}")
-
- task.update_progress("Determining deployment type", 5)
-
- # Handle component-only deployment
- if not ioc_to_deploy.ioc_list:
- logging.info("Component-only deployment")
- task.update_progress("Component-only deployment", 10)
- result = deploy_component(ioc_to_deploy, temp_download_dir, task)
-
- # Handle IOC deployments
- elif ioc_to_deploy.facilities:
- logging.info("Deploy tag to new IOCs")
- # Case 3: Deploy tag to new IOCs (IOCs specified, facility required)
- # Case 5: Deploy new component AND new IOCs (IOCs specified, facility required)
- task.update_progress("Deploying to new IOCs", 10)
- result = deploy_iocs_with_facility(ioc_to_deploy, temp_download_dir, task)
+ if 'ioc_module' in deploy_request.playbook:
+ result = await asyncio.get_running_loop().run_in_executor(None, deploy_ioc_sync, deploy_request, temp_dir, task)
+ elif 'container_module' in deploy_request.playbook:
+ result = await asyncio.get_running_loop().run_in_executor(None, deploy_container_sync, deploy_request, temp_dir, task)
else:
- logging.info("Deploy tag to existing IOCs")
- # Cases 1 & 2: Deploy tag to existing IOCs (facility not required)
- task.update_progress("Deploying to existing IOCs", 10)
- result = deploy_existing_iocs(ioc_to_deploy, temp_download_dir, task)
-
- # Store result (summary and report file path)
+ result = await asyncio.get_running_loop().run_in_executor(None, run_generic_deployment, deploy_request, temp_dir, task)
task.complete(result)
-
except Exception as e:
logging.exception(f"Deployment {task_id} failed")
task.fail(str(e))
- # Cleanup on failure
- if os.path.exists(temp_download_dir):
- cleanup_temp_deployment_dir(temp_download_dir)
+ cleanup_temp_deployment_dir(temp_dir)
-def deploy_component(ioc_to_deploy: IocDict, temp_download_dir: str, task: DeploymentTask):
+def deploy_ioc_sync(ioc_to_deploy: DeployDict, temp_download_dir: str, task: DeploymentTask):
+ """
+ IOC deployment logic. Handles these scenarios:
+ 1. Deploy tag to select existing IOCs (IOCs specified, facility not required)
+ 2. Deploy tag to all existing IOCs
+ 3. Deploy tag to new IOCs (IOCs specified, facility required)
+ 4. Deploy tag to component only (no IOCs, facility required)
+ 5. Deploy new component AND new IOCs
+ """
+ logging.info(f"New IOC deployment request: {ioc_to_deploy}")
+ # Handle component-only deployment
+ if not ioc_to_deploy.ioc_list:
+ logging.info("Component-only deployment")
+ task.update_progress("Component-only deployment", 10)
+ return deploy_component(ioc_to_deploy, temp_download_dir, task)
+ # IOC deployments with facility specified (new IOCs or new component)
+ elif ioc_to_deploy.facilities:
+ logging.info("Deploy tag to new IOCs")
+ task.update_progress("Deploying to new IOCs", 10)
+ return deploy_iocs_with_facility(ioc_to_deploy, temp_download_dir, task)
+ else:
+ logging.info("Deploy tag to existing IOCs")
+ task.update_progress("Deploying to existing IOCs", 10)
+ return deploy_existing_iocs(ioc_to_deploy, temp_download_dir, task)
+
+def deploy_component(ioc_to_deploy: DeployDict, temp_download_dir: str, task: DeploymentTask):
"""
Handle component-only deployment
- Deploy tag to component (either new or existing) with facility specified
@@ -898,7 +816,7 @@ def deploy_component(ioc_to_deploy: IocDict, temp_download_dir: str, task: Deplo
facilities_ioc_dict, new_component, task
)
-def deploy_existing_iocs(ioc_to_deploy: IocDict, temp_download_dir: str, task: DeploymentTask):
+def deploy_existing_iocs(ioc_to_deploy: DeployDict, temp_download_dir: str, task: DeploymentTask):
"""
Handle deployments to existing IOCs
- Case 1: Deploy tag to select existing IOCs
@@ -929,7 +847,7 @@ def deploy_existing_iocs(ioc_to_deploy: IocDict, temp_download_dir: str, task: D
facilities_ioc_dict, False, task # No new components
)
-def deploy_iocs_with_facility(ioc_to_deploy: IocDict, temp_download_dir: str, task: DeploymentTask):
+def deploy_iocs_with_facility(ioc_to_deploy: DeployDict, temp_download_dir: str, task: DeploymentTask):
"""
Handle deployment of IOCs with facility specified
- Case 3: Deploy tag to new IOCs (facility required)
@@ -961,7 +879,7 @@ def deploy_iocs_with_facility(ioc_to_deploy: IocDict, temp_download_dir: str, ta
)
-def execute_ioc_deployment(ioc_to_deploy: IocDict, temp_download_dir: str,
+def execute_ioc_deployment(ioc_to_deploy: DeployDict, temp_download_dir: str,
facilities_ioc_dict: dict, new_component: bool, task: DeploymentTask):
"""
Called by the specific deployment functions after they determine what to deploy.
@@ -990,9 +908,7 @@ def execute_ioc_deployment(ioc_to_deploy: IocDict, temp_download_dir: str,
ioc_playbooks_path = ANSIBLE_PLAYBOOKS_PATH + 'ioc_module'
playbook_args_dict['playbook_path'] = ioc_playbooks_path
local_ioc_playbooks_path = ANSIBLE_PLAYBOOKS_PATH + 'ioc_module'
- inventory_file_path = ANSIBLE_PLAYBOOKS_PATH
- if (TEST_INVENTORY): inventory_file_path += 'test_inventory.ini'
- else: inventory_file_path += 'global_inventory.ini'
+ inventory_file_path = get_inventory_path()
# Skip facilities with empty IOC lists for component-only deployments
is_component_only = len(facilities_ioc_dict[facility]) == 0
@@ -1074,144 +990,22 @@ def execute_ioc_deployment(ioc_to_deploy: IocDict, temp_download_dir: str,
facilities_ioc_dict[facility]
)
- # Error check - If deployment output is empty, then nothing was deployed
- if (deployment_output == ""):
- return JSONResponse(content={"payload": {"Error": "No deployments performed. This may be due to empty IOC lists or invalid component/facility combinations."}}, status_code=400)
+ if deployment_output == "":
+ raise ValueError("No deployments performed. This may be due to empty IOC lists or invalid component/facility combinations.")
- # Generate summary for report
task.update_progress("Generating deployment report", 90)
- summary = generate_report(ioc_to_deploy.component_name, ioc_to_deploy.tag, ioc_to_deploy.user,
- deployment_output, status, deployment_report_file, facilities_ioc_dict, ioc_to_deploy.dry_run)
-
- # Send summary to elog
- if (not ioc_to_deploy.dry_run):
+ if not ioc_to_deploy.dry_run:
task.update_progress("Writing to ELOG", 95)
- send_deployment_to_elog(ioc_to_deploy.component_name, ioc_to_deploy.tag, list(facilities_ioc_dict.keys()), summary)
-
- task.complete(summary)
-
- # Return ansible playbook output to user
- return {
- "summary": summary,
- "report_file": deployment_report_file,
- "status": status
- }
-
-@app.put("/pydm/deployment")
-async def deploy_pydm(pydm_to_deploy: PydmDict, background_tasks: BackgroundTasks):
- """
- Function to deploy a pydm "screen/display" component
- """
- # 1) Setup temporary directory for deployment contents
- request_id = str(uuid.uuid4())
- temp_download_dir = f"{APP_PATH}/tmp/{request_id}"
- os.makedirs(temp_download_dir, exist_ok=True)
- logging.info(f"New deployment request data: {pydm_to_deploy}")
- # 2) Call to backend to get component/tag from github releases
- if (not download_release(pydm_to_deploy.component_name, pydm_to_deploy.tag, temp_download_dir, all_os=False)):
- return JSONResponse(content={"payload": {"Error": "Deployment tag may not exist or software factory backend is broken"}}, status_code=400)
-
- # 3) Logic for special cases
- facilities = pydm_to_deploy.facilities
-
- # If subsystem not passed, then use component name
- pydm_to_deploy.subsystem = pydm_to_deploy.component_name.replace("pydm-", "").replace("-displays", "")
-
- # Special case - if adding new deployment
- deploy_new_component = False
- # If adding to multiple facilities, loop through them
- for facility in facilities:
- # Check if deployment already exists
- component = find_component_in_facility(facility, pydm_to_deploy.component_name)
- logging.debug(f"component: {component}")
- if (component):
- deploy_new_component = False
- else:
- # Otherwise create a new entry to deployment database
- deploy_new_component = True
- logging.debug(f"deploy_new_component: {deploy_new_component}")
-
- local_pydm_playbooks_path = ANSIBLE_PLAYBOOKS_PATH + 'pydm_module'
- inventory_file_path = ANSIBLE_PLAYBOOKS_PATH
- if (TEST_INVENTORY): inventory_file_path += 'test_inventory.ini'
- else: inventory_file_path += 'global_inventory.ini'
-
- tarball = f'{pydm_to_deploy.tag}.tar.gz'
- tarball_filepath = os.path.join(temp_download_dir, tarball)
-
- # in the loop below copy the ioc_dict, but only get the iocs within that facility (facilities_ioc_dict[facility])
- # 4) Call the appropriate ansible playbook for each applicable facility
- playbook_args_dict = pydm_to_deploy.model_dump()
- playbook_args_dict['tarball'] = tarball_filepath
- status = 200
- deployment_report_file = temp_download_dir + '/deployment-report-' + pydm_to_deploy.component_name + '-' + pydm_to_deploy.tag + '.log'
- deployment_output = ""
- for facility in facilities:
- logging.info(f"facility: {facility}")
- # 5) If component doesn't exist in facility and not a new component, then skip.
- if (not deploy_new_component and find_component_in_facility(facility, pydm_to_deploy.component_name) is None):
- continue
-
- playbook_args_dict['facility'] = facility
- # TODO: - may want to do a dry run first to see if there would be any fails.
- playbook_args = json.dumps(playbook_args_dict) # Convert dictionary to JSON string
- stdout, stderr, return_code = ansible_api.run_ansible_playbook(inventory_file_path, local_pydm_playbooks_path + '/pydm_deploy.yml',
- facility, playbook_args, return_output=True, no_color=True, check_mode=pydm_to_deploy.dry_run)
- # 5.1) Combine output
- current_output = ""
- current_output += "== Deployment output for " + facility + ' ==\n\n' + stdout
- deployment_success = True
- if (return_code != 0):
- status = 400 # Deployment failed
- if (stderr != ''):
- current_output += "\n== Errors ==\n\n" + stderr
- deployment_success = False
- deployment_output += current_output
-
-
- if (not pydm_to_deploy.dry_run):
- # 6) Write new configuration to deployment db for each facility
- update_db_after_deployment(deployment_success, deploy_new_component, facility, 'pydm', pydm_to_deploy.component_name,
- pydm_to_deploy.tag, pydm_to_deploy.user, current_output)
-
- # Error check - If deployment output is empty, then the component can't be found in deployment database
- if (deployment_output == ""):
- return JSONResponse(content={"payload": {"No deployments performed. This may be due to invalid component/facility combinations"}}, status_code=400)
-
- # 6) Generate summary for report
- summary = generate_report(pydm_to_deploy.component_name, pydm_to_deploy.tag, pydm_to_deploy.user,
- deployment_output, status, deployment_report_file, dry_run=pydm_to_deploy.dry_run)
-
- # Send summary to elog
- if (not pydm_to_deploy.dry_run):
- elog_url = send_deployment_to_elog(pydm_to_deploy.component_name, pydm_to_deploy.tag, facilities, summary)
-
- # Add cleanup
- background_tasks.add_task(cleanup_temp_deployment_dir, temp_download_dir)
-
- # 7) Return ansible playbook output to user
- if os.getenv('PYTHON_TESTING') == 'True':
- content = summary
- return Response(content=content, media_type="text/plain", status_code=status)
- elif (pydm_to_deploy.return_elog):
- return JSONResponse(content={
- "success": deployment_success,
- "elog_url": elog_url
- })
- else:
- return FileResponse(path=deployment_report_file, status_code=status)
-
+ return finalize_deployment(
+ ioc_to_deploy.component_name, ioc_to_deploy.tag, ioc_to_deploy.user,
+ list(facilities_ioc_dict.keys()), deployment_output, status, deployment_success,
+ deployment_report_file, ioc_to_deploy.dry_run, facilities_ioc_dict
+ )
-@app.put("/container/deployment")
-async def deploy_container(container_to_deploy: ContainerDict, background_tasks: BackgroundTasks):
- """
- Deploy a containerized application via Docker Compose.
- Runs the container_module Ansible playbook to pull images and deploy services.
- Secrets can be passed in the request body (from GitHub Actions) or loaded from environment variables as fallback.
- """
+def deploy_container_sync(container_to_deploy: DeployDict, temp_dir: str, task: DeploymentTask):
+ """Container deployment logic. No source tarball needed — just runs the container playbook."""
logging.info(f"New container deployment request: {container_to_deploy}")
- # Use request body secrets if provided, otherwise fall back to per-app env vars
env_secrets = get_container_secrets(container_to_deploy.component_name)
secrets = {
'database_url': container_to_deploy.database_url or env_secrets['database_url'],
@@ -1221,13 +1015,9 @@ async def deploy_container(container_to_deploy: ContainerDict, background_tasks:
}
if not secrets['database_url']:
app_key = container_to_deploy.component_name.upper().replace("-", "_")
- return JSONResponse(content={"payload": {"Error": f"database_url not provided in request and CONTAINER_{app_key}_DATABASE_URL environment variable not set"}}, status_code=500)
+ raise ValueError(f"database_url not provided and CONTAINER_{app_key}_DATABASE_URL env var not set")
- # Setup paths
- local_container_playbooks_path = ANSIBLE_PLAYBOOKS_PATH + 'container_module'
- inventory_file_path = ANSIBLE_PLAYBOOKS_PATH
- if (TEST_INVENTORY): inventory_file_path += 'test_inventory.ini'
- else: inventory_file_path += 'global_inventory.ini'
+ inventory_file_path = get_inventory_path()
# Build extra-vars for Ansible (app_name comes from component_name)
# Only include optional fields that are configured
@@ -1253,17 +1043,15 @@ async def deploy_container(container_to_deploy: ContainerDict, background_tasks:
status = 200
deployment_output = ""
deployment_success = True
- request_id = str(uuid.uuid4())
- temp_download_dir = f"{APP_PATH}/tmp/{request_id}"
- os.makedirs(temp_download_dir, exist_ok=True)
- deployment_report_file = temp_download_dir + '/deployment-report-' + container_to_deploy.component_name + '-' + container_to_deploy.tag + '.log'
+ deployment_report_file = os.path.join(temp_dir, f'deployment-report-{container_to_deploy.component_name}-{container_to_deploy.tag}.log')
+ full_playbook_path = os.path.join(ANSIBLE_PLAYBOOKS_PATH, container_to_deploy.playbook)
for facility in facilities:
logging.info(f"Deploying container to facility: {facility}")
playbook_args = json.dumps(playbook_args_dict)
stdout, stderr, return_code = ansible_api.run_ansible_playbook(
inventory_file_path,
- local_container_playbooks_path + '/container_deploy.yml',
+ full_playbook_path,
facility,
playbook_args,
return_output=True,
@@ -1278,147 +1066,93 @@ async def deploy_container(container_to_deploy: ContainerDict, background_tasks:
deployment_success = False
deployment_output += current_output
- # Update deployment database
update_db_after_deployment(deployment_success, True, facility, 'container',
container_to_deploy.component_name, container_to_deploy.tag,
container_to_deploy.user, current_output)
- if (deployment_output == ""):
- return JSONResponse(content={"payload": {"Error": "No deployments performed"}}, status_code=400)
-
- # Generate report
- summary = generate_report(container_to_deploy.component_name, container_to_deploy.tag,
- container_to_deploy.user, deployment_output, status, deployment_report_file)
-
- # Send to elog
- elog_url = send_deployment_to_elog(container_to_deploy.component_name, container_to_deploy.tag, facilities, summary)
+ if deployment_output == "":
+ raise ValueError("No deployments performed")
- # Cleanup
- background_tasks.add_task(cleanup_temp_deployment_dir, temp_download_dir)
+ return finalize_deployment(
+ container_to_deploy.component_name, container_to_deploy.tag, container_to_deploy.user,
+ facilities, deployment_output, status, deployment_success,
+ deployment_report_file, container_to_deploy.dry_run
+ )
- if os.getenv('PYTHON_TESTING') == 'True':
- return Response(content=summary, media_type="text/plain", status_code=status)
- elif (container_to_deploy.return_elog):
- return JSONResponse(content={
- "success": deployment_success,
- "elog_url": elog_url
- })
- else:
- return FileResponse(path=deployment_report_file, status_code=status)
+def run_generic_deployment(deploy_request: DeployDict, temp_dir: str, task: DeploymentTask):
+ """Generic deployment: download tagged release tarball, then run the specified playbook.
+ Handles pydm, HLA, TOOLS, and any other app type without IOC or container-specific logic."""
+ logging.info(f"Generic deployment request: {deploy_request}")
-@app.put("/app/deployment")
-async def deploy_app(app_to_deploy: AppDict, background_tasks: BackgroundTasks):
- """
- Deploy an application artifact (RPM, tarball, zip) to target servers.
- Downloads the artifact from a GitHub release asset URL, then runs the
- artifact_module Ansible playbook to extract and symlink on each facility.
- """
- logging.info(f"New app deployment request: {app_to_deploy}")
+ # Default subsystem for pydm apps if not provided
+ if 'pydm_module' in deploy_request.playbook and not deploy_request.subsystem:
+ deploy_request.subsystem = deploy_request.component_name.replace("pydm-", "").replace("-displays", "")
- # Validate artifact_url
- if not app_to_deploy.artifact_url:
- return JSONResponse(content={"payload": {"Error": "artifact_url is required for app deployments"}}, status_code=400)
+ # Derive app_type for the deployment DB from the playbook path
+ if 'pydm_module' in deploy_request.playbook:
+ app_type = 'pydm'
+ else:
+ app_type = deploy_request.playbook.split('/')[0] # e.g. 'hla_module' -> 'hla_module'
- # Setup paths
- request_id = str(uuid.uuid4())
- temp_download_dir = f"{APP_PATH}/tmp/{request_id}"
- os.makedirs(temp_download_dir, exist_ok=True)
+ task.update_progress("Downloading release", 20)
+ if not download_release(deploy_request.component_name, deploy_request.tag, temp_dir, all_os=False, extract_tarball=True):
+ raise ValueError(f"Failed to download release for {deploy_request.component_name} tag {deploy_request.tag}")
- # Determine file extension from artifact_type
- ext_map = {'rpm': 'rpm', 'tar': 'tar.gz', 'tar.gz': 'tar.gz', 'tgz': 'tar.gz', 'zip': 'zip'}
- ext = ext_map.get(app_to_deploy.artifact_type, app_to_deploy.artifact_type)
- artifact_filename = f"{app_to_deploy.component_name}-{app_to_deploy.tag}.{ext}"
- artifact_filepath = os.path.join(temp_download_dir, artifact_filename)
+ full_playbook_path = os.path.join(ANSIBLE_PLAYBOOKS_PATH, deploy_request.playbook)
+ inventory_file_path = get_inventory_path()
- # Download artifact from GitHub release asset URL
- github_token = os.environ.get("GITHUB_TOKEN", "")
- headers = {"Accept": "application/octet-stream"}
- if github_token:
- headers["Authorization"] = f"token {github_token}"
-
- try:
- response = requests.get(app_to_deploy.artifact_url, headers=headers, stream=True, allow_redirects=True)
- if response.status_code != 200:
- logging.error(f"Failed to download artifact: HTTP {response.status_code}")
- return JSONResponse(content={"payload": {"Error": f"Failed to download artifact from {app_to_deploy.artifact_url}: HTTP {response.status_code}"}}, status_code=400)
-
- with open(artifact_filepath, 'wb') as f:
- for chunk in response.iter_content(chunk_size=1024*1024):
- if chunk:
- f.write(chunk)
- logging.info(f"Artifact downloaded to {artifact_filepath}")
- except Exception as e:
- logging.error(f"Error downloading artifact: {e}")
- return JSONResponse(content={"payload": {"Error": f"Failed to download artifact: {str(e)}"}}, status_code=500)
-
- # Ansible setup
- local_artifact_playbooks_path = ANSIBLE_PLAYBOOKS_PATH + 'artifact_module'
- inventory_file_path = ANSIBLE_PLAYBOOKS_PATH
- if TEST_INVENTORY:
- inventory_file_path += 'test_inventory.ini'
- else:
- inventory_file_path += 'global_inventory.ini'
+ facilities = deploy_request.facilities or []
+ tarball_filepath = os.path.join(temp_dir, f"{deploy_request.tag}.tar.gz")
+ deployment_report_file = os.path.join(temp_dir, f'deployment-report-{deploy_request.component_name}-{deploy_request.tag}.log')
playbook_args_dict = {
- 'component_name': app_to_deploy.component_name,
- 'tag': app_to_deploy.tag,
- 'artifact_path': artifact_filepath,
- 'artifact_type': app_to_deploy.artifact_type,
+ 'component_name': deploy_request.component_name,
+ 'tag': deploy_request.tag,
+ 'user': deploy_request.user,
+ 'tarball': tarball_filepath,
}
+ if deploy_request.subsystem:
+ playbook_args_dict['subsystem'] = deploy_request.subsystem
+ if deploy_request.extra_vars:
+ playbook_args_dict.update(deploy_request.extra_vars)
- facilities = app_to_deploy.facilities
status = 200
deployment_output = ""
deployment_success = True
- deployment_report_file = os.path.join(temp_download_dir, f'deployment-report-{app_to_deploy.component_name}-{app_to_deploy.tag}.log')
-
- for facility in facilities:
- logging.info(f"Deploying app artifact to facility: {facility}")
+ elog_url = ""
+ for i, facility in enumerate(facilities):
+ task.update_progress(f"Deploying to {facility}", 30 + int(60 * i / max(len(facilities), 1)))
+ playbook_args_dict['facility'] = facility
playbook_args = json.dumps(playbook_args_dict)
stdout, stderr, return_code = ansible_api.run_ansible_playbook(
- inventory_file_path,
- local_artifact_playbooks_path + '/artifact_deploy.yml',
- facility,
- playbook_args,
- return_output=True,
- no_color=True
- )
-
- current_output = "== App deployment output for " + facility + ' ==\n\n' + stdout
+ inventory_file_path, full_playbook_path, facility, playbook_args,
+ return_output=True, no_color=True, check_mode=deploy_request.dry_run)
+ current_output = f"== Deployment output for {facility} ==\n\n{stdout}"
+ deployment_success = True
if return_code != 0:
status = 400
- if stderr != '':
- current_output += "\n== Errors ==\n\n" + stderr
+ if stderr:
+ current_output += f"\n== Errors ==\n\n{stderr}"
deployment_success = False
deployment_output += current_output
-
- update_db_after_deployment(deployment_success, True, facility, 'app',
- app_to_deploy.component_name, app_to_deploy.tag,
- app_to_deploy.user, current_output)
+ if not deploy_request.dry_run:
+ is_new_component = find_component_in_facility(facility, deploy_request.component_name) is None
+ update_db_after_deployment(deployment_success, is_new_component, facility, app_type,
+ deploy_request.component_name, deploy_request.tag,
+ deploy_request.user, current_output)
if deployment_output == "":
- return JSONResponse(content={"payload": {"Error": "No deployments performed"}}, status_code=400)
-
- # Generate report
- summary = generate_report(app_to_deploy.component_name, app_to_deploy.tag,
- app_to_deploy.user, deployment_output, status, deployment_report_file)
-
- # Send to elog
- elog_url = send_deployment_to_elog(app_to_deploy.component_name, app_to_deploy.tag, facilities, summary)
-
- # Cleanup
- background_tasks.add_task(cleanup_temp_deployment_dir, temp_download_dir)
-
- if os.getenv('PYTHON_TESTING') == 'True':
- return Response(content=summary, media_type="text/plain", status_code=status)
- elif app_to_deploy.return_elog:
- return JSONResponse(content={
- "success": deployment_success,
- "elog_url": elog_url
- })
- else:
- return FileResponse(path=deployment_report_file, status_code=status)
+ raise ValueError("No deployments performed — check facilities list and component name")
+
+ task.update_progress("Generating report", 92)
+ if not deploy_request.dry_run:
+ task.update_progress("Writing to ELOG", 96)
+ return finalize_deployment(
+ deploy_request.component_name, deploy_request.tag, deploy_request.user,
+ facilities, deployment_output, status, deployment_success,
+ deployment_report_file, deploy_request.dry_run
+ )
@app.put("/initial/deployment")
@@ -1441,7 +1175,7 @@ async def initial_deployment(initial_deployment: InitialDeploymentDict):
new_component['dependsOn'] = initial_deployment.ioc_list
logging.debug(f"new_component: {new_component}")
endpoint = BACKEND_URL + 'deployments'
- response = requests.post(endpoint, json=new_component)
+ response = requests.post(endpoint, json=new_component, timeout=REQUEST_TIMEOUT)
add_log_to_component(initial_deployment.facility, timestamp, initial_deployment.user,
initial_deployment.component_name, "Initial deployment entry added by software factory admins")
return JSONResponse(content={"payload": {"Success": "Deployment added to database"}}, status_code=200)
diff --git a/deploy_controller/test_deployment_controller.py b/deploy_controller/test_deployment_controller.py
index 02aa1d2..33082e8 100644
--- a/deploy_controller/test_deployment_controller.py
+++ b/deploy_controller/test_deployment_controller.py
@@ -10,6 +10,7 @@
2) git clone https://github.com/ad-build-test/build-system-playbooks
3) Alter the paths for 'test' group in build-system-playbooks/global_inventory.ini to your username
4) Then in mock_paths(), alter the paths to your local ~/test-deployment-controller/ (Anywhere it says pnispero, change to your username)
+5) Look in the dev mongodb and make sure test-ioc and pydm-mps are not already in the deployments collection. If they are, please delete those entries before testing.
Usage: pytest test_deployment_controller.py
@@ -58,12 +59,13 @@
os.environ['PYTHON_TESTING'] = 'True'
os.environ['ELOG_USER_PASSWORD'] = "mock"
+os.environ['ELOG_SW_LOG_ID'] = "mock"
import pytest
import fakeredis
from fastapi.testclient import TestClient
from unittest.mock import patch, MagicMock, mock_open
-from deployment_controller import app, IocDict, PydmDict, RevertDict
+from deployment_controller import app, DeployDict, RevertDict
from httpx import AsyncClient, ASGITransport
import asyncio
@@ -85,7 +87,8 @@ def mock_paths():
patch('deployment_controller.TEST_INVENTORY', True), \
patch('deployment_controller.BACKEND_URL', 'https://ad-build-dev.slac.stanford.edu/api/cbs/v1/'), \
patch('deployment_controller.FACILITIES_LIST', ["test", "test2", "LCLS", "FACET", "TESTFAC", "DEV", "SANDBOX"]), \
- patch('deployment_controller.ANSIBLE_PLAYBOOKS_PATH', '/home/pnispero/test-deployment-controller/build-system-playbooks/'):
+ patch('deployment_controller.ANSIBLE_PLAYBOOKS_PATH', '/home/pnispero/test-deployment-controller/build-system-playbooks/'), \
+ patch('deployment_controller.send_deployment_to_elog', return_value='http://mock-elog-url'):
yield
####### Tests for get_deployment_component_info
@@ -127,16 +130,17 @@ async def test_deploy_ioc_new_component_success(mock_paths):
test_tag = "1.0.65"
test_user = "test_user"
- ioc_request = IocDict(
+ ioc_request = DeployDict(
facilities=[test_facility],
component_name=test_component,
tag=test_tag,
user=test_user,
+ playbook='ioc_module/ioc_deploy.yml',
)
- print("Sending request to /ioc/deployment")
+ print("Sending request to /deployment")
async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as ac:
- response = await ac.put("/ioc/deployment", json=ioc_request.model_dump())
+ response = await ac.put("/deployment", json=ioc_request.model_dump())
summary = await wait_for_deployment(ac, response)
assert "Deployment report" in summary
@@ -167,17 +171,18 @@ async def test_deploy_ioc_new_component_and_ioc_success(mock_paths):
test_ioc_list = "sioc-b34-gtest01"
test_user = "test_user"
- ioc_request = IocDict(
+ ioc_request = DeployDict(
facilities=[test_facility],
component_name=test_component,
tag=test_tag,
ioc_list=[test_ioc_list],
- user=test_user
+ user=test_user,
+ playbook='ioc_module/ioc_deploy.yml',
)
- print("Sending request to /ioc/deployment")
+ print("Sending request to /deployment")
async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as ac:
- response = await ac.put("/ioc/deployment", json=ioc_request.model_dump())
+ response = await ac.put("/deployment", json=ioc_request.model_dump())
summary = await wait_for_deployment(ac, response)
assert "Deployment report" in summary
@@ -206,17 +211,18 @@ async def test_deploy_ioc_new_ioc_success(mock_paths):
print("Starting test_deploy_ioc_new_ioc_success - add a new ioc to an existing component\n \
bs deploy --facility test -i sioc-b34-gtest02 1.0.65")
- ioc_request = IocDict(
+ ioc_request = DeployDict(
facilities=["test"],
component_name="test-ioc",
tag="1.0.65",
ioc_list=["sioc-b34-gtest02"],
- user="test_user"
+ user="test_user",
+ playbook='ioc_module/ioc_deploy.yml',
)
- print("Sending request to /ioc/deployment")
+ print("Sending request to /deployment")
async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as ac:
- response = await ac.put("/ioc/deployment", json=ioc_request.model_dump())
+ response = await ac.put("/deployment", json=ioc_request.model_dump())
summary = await wait_for_deployment(ac, response)
assert "Deployment report" in summary
@@ -234,18 +240,19 @@ async def test_deploy_ioc_new_tag_all_success_dry_run(mock_paths):
test_ioc_list = ["sioc-b34-gtest01", "sioc-b34-gtest02"]
test_user = "test_user"
- ioc_request = IocDict(
+ ioc_request = DeployDict(
component_name=test_component,
tag=test_tag,
ioc_list=test_ioc_list,
user=test_user,
- dry_run=True
+ dry_run=True,
+ playbook='ioc_module/ioc_deploy.yml',
)
- print("Sending request to /ioc/deployment")
+ print("Sending request to /deployment")
print(ioc_request.model_dump())
async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as ac:
- response = await ac.put("/ioc/deployment", json=ioc_request.model_dump())
+ response = await ac.put("/deployment", json=ioc_request.model_dump())
summary = await wait_for_deployment(ac, response)
assert "Deployment report" in summary
@@ -277,17 +284,18 @@ async def test_deploy_ioc_new_tag_all_success(mock_paths):
test_ioc_list = ["sioc-b34-gtest01", "sioc-b34-gtest02"]
test_user = "test_user"
- ioc_request = IocDict(
+ ioc_request = DeployDict(
component_name=test_component,
tag=test_tag,
ioc_list=test_ioc_list,
- user=test_user
+ user=test_user,
+ playbook='ioc_module/ioc_deploy.yml',
)
- print("Sending request to /ioc/deployment")
+ print("Sending request to /deployment")
print(ioc_request.model_dump())
async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as ac:
- response = await ac.put("/ioc/deployment", json=ioc_request.model_dump())
+ response = await ac.put("/deployment", json=ioc_request.model_dump())
summary = await wait_for_deployment(ac, response)
assert "Deployment report" in summary
@@ -337,17 +345,18 @@ async def test_deploy_ioc_new_tag_specific_ioc_success(mock_paths):
print("Starting test_deploy_ioc_new_tag_specific_ioc_success - deploy a new tag to an existing ioc in an existing component\n \
bs deploy -i sioc-b34-gtest02 1.0.67")
- ioc_request = IocDict(
+ ioc_request = DeployDict(
component_name="test-ioc",
tag="1.0.67",
ioc_list=["sioc-b34-gtest02"],
- user="test_user"
+ user="test_user",
+ playbook='ioc_module/ioc_deploy.yml',
)
- print("Sending request to /ioc/deployment")
+ print("Sending request to /deployment")
print(ioc_request.model_dump())
async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as ac:
- response = await ac.put("/ioc/deployment", json=ioc_request.model_dump())
+ response = await ac.put("/deployment", json=ioc_request.model_dump())
summary = await wait_for_deployment(ac, response)
assert "Deployment report" in summary
@@ -365,17 +374,18 @@ async def test_deploy_same_component_and_iocs_in_another_test_facility_success(m
test_ioc_list = ["sioc-b34-gtest01", "sioc-b34-gtest02"]
test_user = "test_user"
- ioc_request = IocDict(
+ ioc_request = DeployDict(
facilities=[test_facility],
component_name=test_component,
tag=test_tag,
ioc_list=test_ioc_list,
user=test_user,
+ playbook='ioc_module/ioc_deploy.yml',
)
- print("Sending request to /ioc/deployment")
+ print("Sending request to /deployment")
async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as ac:
- response = await ac.put("/ioc/deployment", json=ioc_request.model_dump())
+ response = await ac.put("/deployment", json=ioc_request.model_dump())
summary = await wait_for_deployment(ac, response)
assert "Deployment report" in summary
@@ -405,17 +415,18 @@ async def test_deploy_ioc_new_tag_specific_ioc_multiple_facilities_success(mock_
- deploy a new tag to an existing ioc in an existing component, in multiple facilities\n \
bs deploy -i sioc-b34-gtest01 1.0.67")
- ioc_request = IocDict(
+ ioc_request = DeployDict(
component_name="test-ioc",
tag="1.0.67",
ioc_list=["sioc-b34-gtest01"],
- user="test_user"
+ user="test_user",
+ playbook='ioc_module/ioc_deploy.yml',
)
- print("Sending request to /ioc/deployment")
+ print("Sending request to /deployment")
print(ioc_request.model_dump())
async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as ac:
- response = await ac.put("/ioc/deployment", json=ioc_request.model_dump())
+ response = await ac.put("/deployment", json=ioc_request.model_dump())
summary = await wait_for_deployment(ac, response)
assert "Deployment report" in summary
@@ -434,18 +445,19 @@ async def test_deploy_ioc_new_tag_specific_ioc_specific_facility_success(mock_pa
test_ioc_list = ["sioc-b34-gtest02"]
test_user = "test_user"
- ioc_request = IocDict(
+ ioc_request = DeployDict(
facilities=[test_facility],
component_name=test_component,
tag=test_tag,
ioc_list=test_ioc_list,
user=test_user,
+ playbook='ioc_module/ioc_deploy.yml',
)
- print("Sending request to /ioc/deployment")
+ print("Sending request to /deployment")
print(ioc_request.model_dump())
async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as ac:
- response = await ac.put("/ioc/deployment", json=ioc_request.model_dump())
+ response = await ac.put("/deployment", json=ioc_request.model_dump())
summary = await wait_for_deployment(ac, response)
assert "Deployment report" in summary
@@ -484,21 +496,22 @@ async def test_deploy_pydm_new_component_success(mock_paths):
print("Starting test_deploy_pydm_new_component_success - add a new component entirely\n \
bs deploy --facility test 1.0.0")
- pydm_request = PydmDict(
+ pydm_request = DeployDict(
facilities=["test"],
component_name="pydm-mps",
tag="R1.0.0",
user="test_user",
- subsystem="mps"
+ subsystem="mps",
+ playbook='pydm_module/pydm_deploy.yml',
)
- print("Sending request to /pydm/deployment")
+ print("Sending request to /deployment")
async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as ac:
- response = await ac.put("/pydm/deployment", json=pydm_request.model_dump())
+ response = await ac.put("/deployment", json=pydm_request.model_dump())
+ summary = await wait_for_deployment(ac, response)
- assert response.status_code == 200
- assert "Deployment report" in response.text
- assert "Success" in response.text
+ assert "Deployment report" in summary
+ assert "Success" in summary
@pytest.mark.asyncio
async def test_deploy_pydm_new_tag_success(mock_paths):
@@ -511,23 +524,24 @@ async def test_deploy_pydm_new_tag_success(mock_paths):
test_user = "test_user"
test_subsystem="mps"
- ioc_request = PydmDict(
+ ioc_request = DeployDict(
facilities=[test_facility],
component_name=test_component,
tag=test_tag,
user=test_user,
- subsystem=test_subsystem
+ subsystem=test_subsystem,
+ playbook='pydm_module/pydm_deploy.yml',
)
- print("Sending request to /pydm/deployment")
+ print("Sending request to /deployment")
async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as ac:
- response = await ac.put("/pydm/deployment", json=ioc_request.model_dump())
+ response = await ac.put("/deployment", json=ioc_request.model_dump())
+ summary = await wait_for_deployment(ac, response)
- print(f"Received response with status code: {response.status_code}")
+ print(f"Received response with completed summary")
- assert response.status_code == 200
- assert "Deployment report" in response.text
- assert "Success" in response.text
+ assert "Deployment report" in summary
+ assert "Success" in summary
print("Confirm deployment database contents are correct...")
response = client.request("GET", "/deployment/info", json={"component_name": "pydm-mps"})
diff --git a/examples/firmware_config.yaml b/examples/firmware_config.yaml
new file mode 100644
index 0000000..21e7d3a
--- /dev/null
+++ b/examples/firmware_config.yaml
@@ -0,0 +1,102 @@
+# Example config.yaml for a firmware application (LLRF) Multiple tests and build targets with hardware tests triggered after ALL builds pass
+repo: lcls2_llrf
+organization: ad-build-test
+url: https://github.com/ad-build-test/lcls2_llrf
+description: LCLS-II LLRF firmware
+
+# Docker image for build pods
+image: ghcr.io/ad-build-test/bookworm-env:latest
+
+# Global env vars available to all stages
+variables:
+ XILINX_VIVADO: /non-free/Xilinx/Vivado/2020.2
+
+# Software tests — each gets its own pod
+test:
+ sel4v_test:
+ command: "cd firmware/prc/sel4v && make && make clean"
+ prc_test:
+ command: "cd firmware/prc && make && make checks && make clean && make Vclm CONFIG=marble_sim"
+ resonance_control_test:
+ command: "cd firmware/resonance_control && make && make checks && make clean"
+ resonance_control_cdc_test:
+ command: "cd firmware/resonance_control && make application_top_cdc.txt"
+ prc_soft_test:
+ command: "cd software/prc && make checks && make clean"
+ injector_test:
+ command: "cd firmware/injector && make && make clean"
+ prc_cdc_test:
+ command: "cd firmware/prc && make application_top_cdc.txt CONFIG=cdc_test"
+ flake8:
+ command: "flake8 firmware/prc/*.py firmware/resonance_control/*.py software/prc/*.py"
+
+# [Required] Build — multi-target with matrix expansion
+# Each target x matrix value gets its own build pod
+build:
+ prc:
+ command: "cd firmware/prc && PATH=$XILINX_VIVADO/bin:$PATH make CONFIG=$TARGET prc.bit"
+ matrix:
+ TARGET: [qf2_v07, cmoc_qf2_v07, fiber_qf2_v07, marble, fiber_marble]
+ artifacts:
+ - "firmware/prc/*.bit"
+ resonance_control:
+ command: "cd firmware/resonance_control && PATH=$XILINX_VIVADO/bin:$PATH make CONFIG=$TARGET resonance_control.bit"
+ matrix:
+ TARGET: [qf2_v07, fiber_qf2_v07, marble, fiber_marble, marblepip]
+ artifacts:
+ - "firmware/resonance_control/*.bit"
+ injector:
+ command: "cd firmware/injector && XILINX_VIVADO=/non-free/Xilinx/Vivado/2018.3 PATH=$XILINX_VIVADO/bin:$PATH make CONFIG=$TARGET injector.bit"
+ matrix:
+ TARGET: [qf2_v07]
+ artifacts:
+ - "firmware/injector/*.bit"
+
+# Hardware tests — triggered after ALL builds pass
+hardwareTest:
+ runner: llrf-test-bench
+ prc_qf2_v07_run:
+ needs: [prc]
+ command: "cd software/prc && PYTHON=python3 sh lcls2_ci.sh -b ../../firmware/prc/prc*.bit -s 8350"
+ artifacts:
+ - "software/prc/rf_controls_*"
+ prc_qf2_cmoc_v07_run:
+ needs: [prc]
+ command: "cd software/prc && PYTHON=python3 sh cmoc_ci.sh -b ../../firmware/prc/prc*.bit"
+ artifacts:
+ - "software/prc/rf_controls_*"
+ prc_qf2_pip2_v07_run:
+ needs: [prc]
+ command: "cd software/prc && PYTHON=python3 sh pip2_ci.sh -b ../../firmware/prc/prc*.bit"
+ artifacts:
+ - "software/prc/rf_controls_*"
+ prc_fiber_qf2_v07_run:
+ needs: [prc]
+ command: "cd software/prc && PYTHON=python3 sh lcls2_ci.sh -b ../../firmware/prc/prc*.bit -a 192.168.0.201:803 -k -s 12500 -p"
+ artifacts:
+ - "software/prc/rf_controls_*"
+ injector_qf2_v07_hires_run:
+ needs: [injector]
+ command: "cd software/injector && PYTHON=python3 BIT=../../firmware/injector/injector*.bit IP=192.168.1.30 CONFIG=b46_gun sh injector_ci.sh"
+ artifacts:
+ - "software/injector/injector_rf_*"
+ injector_qf2_v07_lcls2_run:
+ needs: [injector]
+ command: "cd software/injector && PYTHON=python3 BIT=../../firmware/injector/injector*.bit IP=192.168.1.40 CONFIG=lcls2_buncher sh injector_ci.sh"
+ artifacts:
+ - "software/injector/injector_rf_*"
+ prc_marble_run:
+ needs: [prc]
+ command: "cd software/prc && PYTHON=python3 sh lcls2_ci.sh -b ../../firmware/prc/prc_marble*.bit -m -ms 39"
+ artifacts:
+ - "software/prc/rf_controls_*"
+ prc_fiber_marble_run:
+ needs: [prc]
+ command: "cd software/prc && PYTHON=python3 sh lcls2_ci.sh -b ../../firmware/prc/prc_fiber_marble*.bit -m -ms 39 -k"
+ artifacts:
+ - "software/prc/rf_controls_*"
+ resonance_control_qf2_v07_run:
+ needs: [resonance_control]
+ command: "cd software/res_ctl && PYTHON=python3 sh lbnl_rack.sh -b ../../firmware/resonance_control/resonance_control*.bit"
+ artifacts:
+ - "software/res_ctl/piezo_chirp_check__*"
diff --git a/examples/hla_config.yaml b/examples/hla_config.yaml
new file mode 100644
index 0000000..e0882e8
--- /dev/null
+++ b/examples/hla_config.yaml
@@ -0,0 +1,9 @@
+# Example config.yaml for a HLA application
+repo: slac-devices
+organization: slaclab
+url: https://github.com/slaclab/slac-devices
+description: Repository for accelerator devices used in high level applications (HLA).
+
+# Deployment playbook
+deploy:
+ playbook: hla_module/hla_deploy.yml
diff --git a/examples/ioc_config.yaml b/examples/ioc_config.yaml
new file mode 100644
index 0000000..ef0d846
--- /dev/null
+++ b/examples/ioc_config.yaml
@@ -0,0 +1,25 @@
+# Example config.yaml for an IOC application
+repo: test-ioc
+organization: ad-build-test
+url: https://github.com/ad-build-test/test-ioc
+description: Test IOC application
+
+# Docker image for build pods
+image: ghcr.io/ad-build-test/rocky9-env:latest
+
+# Build command
+build:
+ command: |
+ source /afs/slac/g/lcls/tools/script/ENVS64.bash &&
+ make
+
+# Files/directories to package for deployment
+artifacts:
+ - bin
+ - db
+ - dbd
+ - iocBoot
+
+# Deployment playbook
+deploy:
+ playbook: ioc_module/ioc_deploy.yml
diff --git a/examples/ioc_multiple_build_config.yaml b/examples/ioc_multiple_build_config.yaml
new file mode 100644
index 0000000..ab81197
--- /dev/null
+++ b/examples/ioc_multiple_build_config.yaml
@@ -0,0 +1,26 @@
+# Example config.yaml for an IOC application with multiple build configurations (e.g. for different OSes)
+repo: test-ioc
+organization: ad-build-test
+url: https://github.com/ad-build-test/test-ioc
+description: Test IOC application
+
+build:
+ rhel7:
+ image: ghcr.io/ad-build-test/rhel7-env:latest
+ command: |
+ source /afs/slac/g/lcls/tools/script/ENVS64.bash &&
+ make
+ rocky9:
+ image: ghcr.io/ad-build-test/rocky9-env:latest
+ command: |
+ source /afs/slac/g/lcls/tools/script/ENVS64.bash &&
+ make
+
+artifacts:
+ - bin
+ - db
+ - dbd
+ - iocBoot
+
+deploy:
+ playbook: ioc_module/ioc_deploy.yml
\ No newline at end of file
diff --git a/examples/pydm_config.yaml b/examples/pydm_config.yaml
new file mode 100644
index 0000000..b9246ee
--- /dev/null
+++ b/examples/pydm_config.yaml
@@ -0,0 +1,9 @@
+# Example config.yaml for a PyDM display application
+repo: mc-displays
+organization: slaclab
+url: https://github.com/slaclab/mc-displays
+description: mc-displays
+
+# Deployment playbook
+deploy:
+ playbook: pydm_module/pydm_deploy.yml
diff --git a/examples/tools_config.yaml b/examples/tools_config.yaml
new file mode 100644
index 0000000..a3074fe
--- /dev/null
+++ b/examples/tools_config.yaml
@@ -0,0 +1,9 @@
+# Example config.yaml for a TOOLS application
+repo: mc-tools-motion
+organization: slaclab
+url: https://github.com/slaclab/mc-tools-motion
+description: mc-tools
+
+# Deployment playbook
+deploy:
+ playbook: tools_module/tools_deploy.yml