{ "continuation-key": "************************************************************************************************************************************************************************************************************************************************************************************************************************", "configuration": "version: 2.1\n\norbs:\n cloudsmith: cloudsmith/cloudsmith@1.0.5\n continuation: circleci/continuation@0.3.1\n jira: circleci/jira@1.3.1\n # path-filtering: circleci/path-filtering@0.1.3\n sign-packages: *******/sign-packages@2.3.0\n\ncommands:\n restore-maven-cache:\n description: \"Maven: Calculate cache key and restore cache\"\n steps:\n - run:\n name: Calculate cache key from pom files\n command: find . -type f -name \"pom.xml\" | grep -v /target/ | sort -u | xargs cat > maven-dependency-pom-cache.key\n - restore_cache:\n keys:\n - maven-dependencies-v6-{{ checksum \"pom-version-cache.key\" }}-{{ checksum \"maven-dependency-pom-cache.key\" }}\n - maven-dependencies-v6-{{ checksum \"pom-version-cache.key\" }}-\n save-maven-cache:\n description: \"Maven: Save cache\"\n steps:\n - run:\n name: clean up artifacts that should not be there\n command: .circleci/scripts/clean-m2.sh\n - save_cache:\n key: maven-dependencies-v6-{{ checksum \"pom-version-cache.key\" }}-{{ checksum \"maven-dependency-pom-cache.key\" }}\n paths:\n - ~/.m2\n setup-github:\n description: \"Set Up Github SSH Access\"\n steps:\n - run:\n command: |\n install -d -m 700 ~/.ssh\n ssh-keyscan github.com >> ~/.ssh/known_hosts\n ssh-keyscan -p 443 ssh.github.com >> ~/.ssh/known_hosts\n chmod 600 ~/.ssh/known_hosts\n shallow-clone:\n description: \"Quick shallow checkout\"\n steps:\n - setup-github\n - run:\n name: git clone\n command: |\n git clone --no-checkout --filter=tree:0 \"${CIRCLE_REPOSITORY_URL}\" .\n if [ -n \"${CIRCLE_TAG}\" ]; then\n git checkout --force \"${CIRCLE_TAG}\"\n git reset --hard \"${CIRCLE_SHA1}\"\n else\n git checkout --force -B \"${CIRCLE_BRANCH}\" \"${CIRCLE_SHA1}\"\n fi\n cached-checkout:\n description: \"Checkout with caching\"\n steps:\n - restore_cache:\n keys:\n - source-v4-{{ .Branch }}-{{ .Revision }}\n - source-v4-{{ .Branch }}-\n - setup-github\n - checkout\n - run:\n name: git config merge.renameLimit\n command: git config merge.renameLimit 999999\n - run:\n name: git fetch origin\n command: |\n git remote prune origin || :\n git fetch origin\n save-cached-checkout:\n description: \"Cache a checkout\"\n steps:\n - save_cache:\n key: source-v4-{{ .Branch }}-{{ .Revision }}\n paths:\n - \".git\"\n checkout-for-pushing:\n description: \"Configure a cached checkout that can push upstream\"\n steps:\n - setup-github\n #- add_ssh_keys:\n # fingerprints:\n # - \"6a:c7:42:e6:e3:26:76:95:f4:0a:7a:5b:9c:b3:19:0e\"\n # #- \"66:9a:2d:a8:ad:7b:cc:7c:d2:ee:55:94:01:72:ac:2a\"\n - checkout\n - run:\n name: Create git identity\n command: |\n git config user.email \"cicd-system@*******.com\"\n git config user.name \"CI/CD System\"\n cached-download:\n description: Download with wget or curl\n parameters:\n url:\n type: string\n file:\n type: string\n steps:\n - run:\n name: create checksum file\n command: echo \"<< parameters.url >>|||<< parameters.file >>\" > /tmp/download-parameters.txt\n - restore_cache:\n keys:\n - cached-download-v1-{{ checksum \"/tmp/download-parameters.txt\" }}\n - run:\n name: download (if necessary)\n command: |\n WGET=\"$(command -v wget || :)\"\n CURL=\"$(command -v curl || :)\"\n SUDO=\"$(command -v sudo || :)\"\n \n if [ ! -e \"<< parameters.file >>\" ]; then\n if [ -x \"${WGET}\" ]; then\n if [ -x \"${SUDO}\" ]; then\n \"${SUDO}\" wget --quiet \"<< parameters.url >>\" -O \"<< parameters.file >>\"\n else\n wget --quiet \"<< parameters.url >>\" -O \"<< parameters.file >>\"\n fi\n elif [ -x \"${CURL}\" ]; then\n if [ -x \"${SUDO}\" ]; then\n \"${SUDO}\" curl -sSf -L \"<< parameters.url >>\" -o \"<< parameters.file >>\"\n else\n curl -sSf -L \"<< parameters.url >>\" -o \"<< parameters.file >>\"\n fi\n else\n echo \"unable to locate wget or curl, make sure one is installed before using cached-download\"\n exit 1\n fi\n \n case \"<< parameters.file >>\" in\n */bin/*)\n if [ -x \"${SUDO}\" ]; then\n sudo chmod a+x \"<< parameters.file >>\"\n else\n chmod a+x \"<< parameters.file >>\"\n fi\n ;;\n esac\n fi\n - save_cache:\n key: cached-download-v1-{{ checksum \"/tmp/download-parameters.txt\" }}\n paths:\n - << parameters.file >>\n\n restore-nodejs-cache:\n description: \"NodeJS: Calculate cache key and restore cache\"\n steps:\n - run:\n name: Calculate cache key\n command: find core/web-assets -name package\\*.json -o -name bower.json | grep -v /target/ | sort -u | xargs cat > nodejs-dependency-json-cache.key\n - restore_cache:\n keys:\n - nodejs-dependencies-v4-{{ checksum \"pom-version-cache.key\" }}-{{ checksum \"nodejs-dependency-json-cache.key\" }}\n - nodejs-dependencies-v4-{{ checksum \"pom-version-cache.key\" }}-\n save-nodejs-cache:\n description: \"NodeJS: Save cache\"\n steps:\n - run:\n description: clean up cache tmp directory\n command: rm -rf ~/.npm/_cacache/tmp\n - save_cache:\n key: nodejs-dependencies-v4-{{ checksum \"pom-version-cache.key\" }}-{{ checksum \"nodejs-dependency-json-cache.key\" }}\n paths:\n - ~/.npm\n restore-sonar-cache:\n description: \"Sonar: Restore sonar cache\"\n steps:\n - restore_cache:\n keys:\n - sonar-cache-v5-{{ .Branch }}-{{ checksum \"pom-version-cache.key\" }}-{{ .Revision }}\n - sonar-cache-v5-{{ .Branch }}-{{ checksum \"pom-version-cache.key\" }}-\n - sonar-cache-v5-{{ .Branch }}-\n - sonar-cache-v5-\n save-sonar-cache:\n description: \"Sonar: Save sonar cache\"\n steps:\n - save_cache:\n key: sonar-cache-v5-{{ .Branch }}-{{ checksum \"pom-version-cache.key\" }}-{{ .Revision }}\n paths:\n - /tmp/sonar-cache\n cache-workflow-assets:\n parameters:\n cache_prefix:\n description: the cache prefix\n type: string\n source_path:\n description: the source directory to cache\n type: string\n steps:\n - run:\n name: Stowing Assets in << parameters.source_path >> to cache prefix << parameters.cache_prefix >>\n command: |\n TARGET_PATH=\"/tmp/<< parameters.cache_prefix >>\"\n mkdir -p \"${TARGET_PATH}\"\n rsync -ar \"$(echo \"<< parameters.source_path >>\" | sed -e 's,/*$,,')/\" \"${TARGET_PATH}/\"\n find \"${TARGET_PATH}\" -type d -print0 | xargs -0 chmod 775\n find \"${TARGET_PATH}\" ! -type d -print0 | xargs -0 chmod 664\n - save_cache:\n key: << parameters.cache_prefix >>-v4-{{ .Branch }}-{{ .Revision }}-{{ .Environment.CIRCLE_SHA1 }}\n paths:\n - \"/tmp/<< parameters.cache_prefix >>\"\n restore-workflow-assets:\n parameters:\n cache_prefix:\n description: the cache prefix\n type: string\n target_path:\n description: the target directory to restore into\n type: string\n default: \"\"\n steps:\n - restore_cache:\n keys:\n - << parameters.cache_prefix >>-v4-{{ .Branch }}-{{ .Revision }}-{{ .Environment.CIRCLE_SHA1 }}\n - when:\n condition: << parameters.target_path >>\n steps:\n - run:\n name: Restoring assets to << parameters.target_path >> from cached prefix << parameters.cache_prefix >>\n command: |\n SOURCE_PATH=\"/tmp/<< parameters.cache_prefix >>\"\n TARGET_PATH=\"$(echo \"<< parameters.target_path >>\" | sed -e 's,/*$,,')\"\n mkdir -p \"${TARGET_PATH}\"\n rsync -ar \"${SOURCE_PATH}/\" \"${TARGET_PATH}/\"\n\n save-artifacts:\n description: Save Artifacts and Store Artifact Metadata\n parameters:\n path:\n type: string\n location:\n type: string\n steps:\n - store_artifacts:\n path: \"<< parameters.path >>/\"\n destination: \"<< parameters.location >>\"\n - run:\n name: \"enumerate artifacts in << parameters.location >>\"\n command: |\n mkdir -p ~/.artifacts\n chmod 777 ~/.artifacts\n if [ ! -d << parameters.path >> ] || [ \"$(find << parameters.path >> -type f | wc -l)\" -eq 0 ]; then\n echo \"warning: no files found in <>\"\n exit 0\n fi\n if [ -e ~/.artifacts/<< parameters.location >>.txt ]; then\n echo \"artifact file ~/.artifacts/<< parameters.location >>.txt already exists -- location must be unique\"\n exit 1\n fi\n curl \\\n -sS \\\n -L \\\n -H \"Accept: application/json\" \\\n -H \"Content-Type: application/json\" \\\n -H \"Circle-Token: ${CIRCLE_TOKEN}\" \\\n -X GET \\\n \"https://circleci.com/api/v2/project/gh/${CIRCLE_PROJECT_USERNAME}/${CIRCLE_PROJECT_REPONAME}/${CIRCLE_BUILD_NUM}/artifacts\" \\\n | grep -o 'https://[^\"]*' \\\n | grep \"/<< parameters.location >>/\" \\\n > ~/.artifacts/<< parameters.location >>.txt\n cat ~/.artifacts/<< parameters.location >>.txt\n - persist_to_workspace:\n root: ~/\n paths:\n - .artifacts/<< parameters.location >>.txt\n\n # always use attach_workspace to root=~ first\n fetch-artifacts:\n description: Download Artifacts from a Previous Job\n parameters:\n path:\n type: string\n location:\n type: string\n steps:\n - run:\n name: \"download artifacts for << parameters.location >> to << parameters.path >>\"\n command: |\n mkdir -p << parameters.path >> ~/.artifacts\n chmod 777 << parameters.path >> ~/.artifacts\n cd << parameters.path >>\n if [ -e ~/.artifacts/<< parameters.location >>.txt ]; then\n while read URL; do\n echo \"- $URL\"\n curl \\\n -sS \\\n -L \\\n -O \\\n -H \"Circle-Token: ${CIRCLE_TOKEN}\" \\\n \"${URL}\"\n done \\\n < ~/.artifacts/<< parameters.location >>.txt\n else\n echo \"artifact file ~/.artifacts/<< parameters.location >>.txt does not exist\"\n exit 1\n fi\n cd -\n\n persist-project-changes:\n description: persist project changes to the workspace, skipping files that come from git\n steps:\n - fix-workspace-permissions:\n path: project\n - run:\n name: prep the working directory\n command: |\n WORKSPACE=\"/tmp/extracted-workspace\"\n mkdir -p \"${WORKSPACE}/project\"\n\n cd ~/project\n git clean -dx --dry-run | grep -v __pycache__ | sed -e 's,^Would remove ,,' > \"${WORKSPACE}.filelist\"\n tar -cf - --files-from \"${WORKSPACE}.filelist\" | tar -xf - -C \"${WORKSPACE}/project\"\n - persist_to_workspace:\n root: /tmp/extracted-workspace/\n paths:\n - project\n\n fix-workspace-permissions:\n description: fix permissions of a path to be compatible with the docker cimg `circleci` user\n parameters:\n path:\n description: the path that should be fixed, relative to `~`\n type: string\n steps:\n - run:\n name: change ownership to `circleci` user in << parameters.path >>\n command: |\n SUDO=\"$(command -v sudo || :)\"\n CIRCLE_USER=\"$(id -u -n circleci 2>/dev/null || echo 3434)\"\n CIRCLE_GROUP=\"$(id -g -n circleci 2>/dev/null || echo 3434)\"\n\n cd ~\n if [ ! -x \"<< parameters.path >>\" ]; then\n echo \"WARNING: << parameters.path >> does not exist, but we are trying to chown it. This is probably not great. Exiting.\"\n exit 0\n fi\n if [ -n \"$SUDO\" ]; then\n \"$SUDO\" chown -R \"${CIRCLE_USER}:${CIRCLE_GROUP}\" \"<< parameters.path >>\"\n \"$SUDO\" chmod -R ug+rw \"<< parameters.path >>\"\n \"$SUDO\" find \"<< parameters.path >>\" -type d -print0 | \"$SUDO\" xargs -0 chmod ug+xs\n else\n chown -R \"${CIRCLE_USER}:${CIRCLE_GROUP}\" \"<< parameters.path >>\"\n chmod -R ug+rw \"<< parameters.path >>\"\n find \"<< parameters.path >>\" -type d -print0 | xargs -0 chmod ug+xs\n fi\n dockerhub-login:\n description: \"Connect to DockerHub\"\n steps:\n - run:\n name: Login to DockerHub\n command: |\n if [ -n \"${DOCKERHUB_LOGIN}\" ]; then\n printf \"${DOCKERHUB_PASS}\" | docker login -u ${DOCKERHUB_LOGIN} --password-stdin\n else\n echo \"WARNING: dockerhub login not found. Assuming this is a PR or other external branch build.\"\n fi\n acr-login:\n description: \"Connect to Azure Container Registry\"\n steps:\n - run:\n name: Login to ACR\n command: |\n PUBACR=*******pubacr.azurecr.io\n if [ -n \"${AZURE_SP_PASSWORD}\" ]; then\n docker login -u ${AZURE_SP} -p ${AZURE_SP_PASSWORD} ${PUBACR}\n else\n echo \"WARNING: Azure credentials not found. Assuming this is a PR or other external branch build.\"\n fi\n prep-package-build:\n description: \"Prepare for package builds\"\n steps:\n - sign-packages/setup-env:\n skip_if_forked_pr: true\n gnupg_home: ~/tmp/gpg\n - cached-checkout\n - attach_workspace:\n at: ~/\n - extract-pom-version\n - run:\n name: Check for Releasability\n command: |\n export OPENNMS_VERSION=\"$(.circleci/scripts/pom2version.sh pom.xml)\"\n .circleci/scripts/release-lint.sh \"${OPENNMS_VERSION}\"\n - restore-maven-cache\n - restore-nodejs-cache\n run-smoke-tests:\n description: \"Run the smoke tests\"\n parameters:\n suite:\n default: core\n type: string\n steps:\n - run:\n name: Enable swap\n command: |\n sudo fallocate -l 8G /swapfile\n sudo chmod 600 /swapfile\n sudo mkswap /swapfile\n sudo swapon /swapfile\n sudo sysctl vm.swappiness=5\n cat /proc/sys/vm/swappiness\n - load-oci:\n match: amd64\n - restore-maven-cache\n - run:\n name: Smoke Tests\n no_output_timeout: 30m\n command: |\n .circleci/scripts/smoke.sh << parameters.suite >>\n - run:\n name: Gather system logs\n when: always\n command: |\n mkdir -p ~/test-results/system-logs\n (dmesg || :) > ~/test-results/system-logs/dmesg 2>&1\n (ps auxf || :) > ~/test-results/system-logs/ps 2>&1\n (free -m || :) > ~/test-results/system-logs/free 2>&1\n\n (date '+%T' || :) > ~/test-results/system-logs/docker_stats 2>&1\n (docker stats --all --no-stream || :) >> ~/test-results/system-logs/docker_stats 2>&1\n (docker ps ---all || :) >> ~/test-results/system-logs/docker_stats 2>&1\n for CONTAINER in `docker ps --all --quiet`; do\n ((docker logs --timestamps \"$CONTAINER\" 2>&1 | tail -n 20 ) || :) > ~/test-results/system-logs/\"docker-${CONTAINER}.log\" 2>&1\n done\n ls -alh ~/project/smoke-test/ || :\n - run:\n name: Gather test artifacts\n when: always\n command: |\n mkdir -p ~/test-results/junit\n find . -type f -name failsafe-summary.xml -exec rm -f {} \\;\n find . -type f -regex \".*/target/[^/]*-reports[^/]*/.*xml\" -exec cp {} ~/test-results/junit/ \\;\n find . -type f -regex \".*/target/[^/]*-reports[^/]*/.*dump.*\" -exec cp {} ~/test-results/junit/ \\; || :\n mkdir -p ~/test-artifacts/recordings\n cp -R ~/project/smoke-test/target/*.{flv,mp4} ~/test-artifacts/recordings || true\n cp -R ~/project/smoke-test/target/screenshots ~/test-artifacts/ || true\n cp -R ~/project/smoke-test/target/logs ~/test-artifacts/ || true\n - store_test_results:\n path: ~/test-results\n - store_artifacts:\n when: always\n path: ~/test-results\n destination: test-results\n - store_artifacts:\n when: always\n path: ~/test-artifacts\n destination: test-artifacts\n run-build:\n description: \"Run the main build\"\n parameters:\n number-vcpu:\n default: 8\n type: integer\n node-memory:\n default: echo \"NODE_OPTIONS Not Set\"\n type: string\n vaadin-javamaxmem:\n default: 2g\n type: string\n steps:\n - cached-checkout\n - save-cached-checkout\n - extract-pom-version\n - run:\n name: Check for Releasability\n command: |\n export OPENNMS_VERSION=\"$(.circleci/scripts/pom2version.sh pom.xml)\"\n .circleci/scripts/release-lint.sh \"${OPENNMS_VERSION}\"\n - restore-maven-cache\n - restore-nodejs-cache\n - run:\n name: Compile OpenNMS\n command: |\n ulimit -n 65536 || :\n mkdir -p ~/.m2\n echo \"${GPG_SECRET_KEY}\" | base64 -d > ~/.m2/sign-key.asc\n export SIGN_KEY_PASS=\"$(echo \"${GPG_PASSPHRASE}\" | base64 -d)\"\n export OPENNMS_VERSION=\"$(.circleci/scripts/pom2version.sh pom.xml)\"\n << parameters.node-memory >>\n export MAVEN_OPTS=\"-Xmx12g -XX:ReservedCodeCacheSize=1g -XX:+TieredCompilation\"\n MAVEN_ARGS=\"install\"\n mkdir -p target/artifacts\n case \"${CIRCLE_BRANCH}\" in\n \"master-\"*|\"release-\"*|\"develop\")\n # \"production\" build will also enable javadoc and such\n MAVEN_ARGS=\"-Dbuild.type=production $MAVEN_ARGS\"\n ;;\n esac\n case \"${CIRCLE_BRANCH}\" in\n \"master-\"*)\n # \"production\" build should build SBOM manifests\n MAVEN_ARGS=\"-Dbuild.sbom=false $MAVEN_ARGS\"\n ;;\n esac\n echo \"export OPENNMS_VERSION=\\\"$OPENNMS_VERSION\\\"\" >> $BASH_ENV\n ./compile.pl -DskipTests=true -Dbuild.skip.tarball=false \\\n -Daether.connector.resumeDownloads=false \\\n -Daether.connector.basic.threads=1 \\\n -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn \\\n -DvaadinJavaMaxMemory=<< parameters.vaadin-javamaxmem >> \\\n -DmaxCpus=<< parameters.number-vcpu >> \\\n -Prun-expensive-tasks \\\n -Psmoke \\\n --batch-mode \\\n $MAVEN_ARGS || exit 1\n ./compile.pl -s .circleci/scripts/structure-settings.xml \\\n --batch-mode \\\n --fail-at-end \\\n -Prun-expensive-tasks \\\n -Pbuild-bamboo \\\n org.*******.maven.plugins:structure-maven-plugin:1.0:structure || exit 1\n - run:\n name: Check if we have generated apidocs\n command: |\n if [ -d target/site/apidocs ] && [ \"$(find target/site/apidocs -name \\*.html | wc -l)\" -gt 1 ]; then\n pushd target/site/apidocs\n tar -czf \"../../artifacts/*******-${OPENNMS_VERSION}-javadoc.tar.gz\" *\n popd\n fi\n - run:\n name: Remove Extra Maven Repository OpenNMS Files\n command: |\n # move these out of the way so they're not stored in the maven pre-cache\n cd ~/.m2/repository/org/*******\n mkdir /tmp/maven-keep\n mv $(ls -1 | grep -v -E '^(jicmp-api|jicmp6-api|jrrd-api|jrrd2-api|lib|maven)$') /tmp/maven-keep\n - persist-project-changes\n - save-maven-cache\n - run:\n name: Restore Extra Maven Repository OpenNMS Files\n command: |\n # now move them back so they end up in the workspace for builds further down the workflow\n mv /tmp/maven-keep/* ~/.m2/repository/org/*******/\n - save-nodejs-cache\n - store_artifacts:\n path: ~/project/target/artifacts\n destination: artifacts\n - persist_to_workspace:\n root: ~/\n paths:\n - project/target/structure-graph.json\n - .m2/repository/org/*******\n # is this even necessary anymore?\n - .artifacts\n run-integration-tests:\n parameters:\n rerun-failtest-count:\n default: 0\n type: integer\n failure-option:\n default: --fail-fast\n type: string\n changes-only:\n default: true\n type: boolean\n steps:\n - restore-maven-cache\n - run:\n name: Integration Tests\n no_output_timeout: 15m\n command: |\n export CCI_RERUN_FAILTEST=<< parameters.rerun-failtest-count >>\n export CCI_FAILURE_OPTION=<< parameters.failure-option >>\n export CCI_CHANGES_ONLY=<< parameters.changes-only >>\n .circleci/scripts/itest.sh\n - run:\n name: Gather test results\n when: always\n command: |\n mkdir -p ~/test-results/junit\n find . -type f -name failsafe-summary.xml -exec rm -f {} \\;\n find . -type f -regex \".*/target/[^/]*-reports[^/]*/.*xml\" -exec cp {} ~/test-results/junit/ \\;\n find . -type f -regex \".*/target/[^/]*-reports[^/]*/.*dump.*\" -exec cp {} ~/test-results/junit/ \\; || :\n - run:\n name: Gather tests\n when: always\n command: |\n mkdir -p ~/generated-tests\n cp target/find-tests/* ~/generated-tests/ || :\n cp /tmp/this_node* ~/generated-tests/ || :\n - run:\n name: Save Code Coverage Data\n when: always\n command: |\n .circleci/scripts/codecoverage-save.sh integration-test\n - persist_to_workspace:\n root: ~/\n paths:\n - code-coverage\n - project/target/find-tests\n - project/target/structure-graph.json\n - run:\n name: Gather system logs\n when: always\n command: |\n mkdir -p ~/test-results/system-logs\n (dmesg || :) > ~/test-results/system-logs/dmesg 2>&1\n (ps auxf || :) > ~/test-results/system-logs/ps 2>&1\n (free -m || :) > ~/test-results/system-logs/free 2>&1\n\n (date '+%T' || :) > ~/test-results/system-logs/docker_stats 2>&1\n (docker stats --all --no-stream || :) >> ~/test-results/system-logs/docker_stats 2>&1\n (docker ps ---all || :) >> ~/test-results/system-logs/docker_stats 2>&1\n for CONTAINER in `docker ps --all --quiet`; do\n ((docker logs --timestamps \"$CONTAINER\" 2>&1 | tail -n 20 ) || :) > ~/test-results/system-logs/\"docker-${CONTAINER}.log\" 2>&1\n done\n - store_test_results:\n path: ~/test-results\n - store_artifacts:\n when: always\n path: ~/test-results\n destination: test-results\n - store_artifacts:\n when: always\n path: ~/build-results\n destination: build-results\n - store_artifacts:\n when: always\n path: ~/generated-tests\n destination: generated-tests\n run-empty:\n description: \"Run a simple ls command\"\n steps:\n - run:\n name: ls\n command: |\n ls\n load-oci:\n parameters:\n match:\n description: regular expression match for OCI filename\n type: string\n steps:\n - download-download-artifacts\n - run:\n name: restore OCI files matching \"<< parameters.match >>\"\n command: |\n if [ -n \"<< parameters.match >>\" ]; then\n download-artifacts.pl --include-failed --ci --workflow=\"${CIRCLE_WORKFLOW_ID}\" --match=\"<< parameters.match >>\" oci \"${CIRCLE_BRANCH}\" /tmp/oci-artifacts\n else\n download-artifacts.pl --include-failed --ci --workflow=\"${CIRCLE_WORKFLOW_ID}\" oci \"${CIRCLE_BRANCH}\" /tmp/oci-artifacts\n fi\n\n cd /tmp/oci-artifacts\n if [ \"$(ls -1 *.oci | wc -l)\" -eq 0 ]; then\n echo \"ERROR: No OCI files to load. Something probably went wrong earlier.\"\n exit 1\n fi\n\n docker container prune -f\n for FILE in *.oci; do\n echo \"Loading ${FILE} into Docker...\"\n _tag=\"*******/$(printf \"${FILE}\" | sed -e 's,\\.oci$,,'):latest\"\n if [ -z \"$(docker image ls -q \"${_tag}\")\" ]; then\n echo \"Didn't find existing image '${_tag}' -- loading from ${FILE}\"\n docker image load -i \"${FILE}\"\n fi\n if [ -z \"$(docker image ls -q \"${_tag}\")\" ]; then\n echo \"ERROR: After loading ${FILE} we still don't have an image with tag '${_tag}'; see 'docker image ls' output following\"\n docker image ls\n exit 1\n fi\n _unprefixed=\"$(echo \"${_tag}\" | cut -d/ -f2 | sed -e 's,-linux-.*$,,')\"\n echo \"Tagging docker image ${_tag} as [*******/]${_unprefixed}:latest\"\n docker image tag \"${_tag}\" \"*******/${_unprefixed}:latest\"\n echo \"NO LONGER TAGGING docker image as ${_unprefixed}:latest WITHOUT *******/\"\n done\n\n build-image-single-arch:\n parameters:\n architecture:\n type: string\n container_name:\n type: string\n container_dir:\n type: string\n tarball_match:\n type: string\n tarball_path:\n type: string\n steps:\n - setup_remote_docker:\n docker_layer_caching: true\n - cached-checkout\n - download-download-artifacts\n - run:\n name: download tarball dependency to << parameters.tarball_path >>\n command: download-artifacts.pl --include-failed --ci --workflow=\"${CIRCLE_WORKFLOW_ID}\" --match=\"<< parameters.tarball_match >>\" tar.gz \"${CIRCLE_BRANCH}\" \"$(pwd)/<< parameters.tarball_path >>\"\n - run:\n name: build << parameters.container_name >>=<< parameters.architecture >> container image\n command: |\n # set up multi-arch\n docker container prune -f\n docker run --rm --privileged tonistiigi/binfmt:latest --install \"<< parameters.architecture >>\"\n\n # Shouldn't need it for Foundation-2024+; Adding this line to resolve an issue with libc-bin segfault (NMS-17379)\n # docker run --rm --privileged multiarch/qemu-user-static --reset -p yes -c yes\n\n cd *******-container/<< parameters.container_dir >>\n export ARCH=\"$(printf \"<< parameters.architecture >>\" | tr / -)\"\n export TAG=\"<< parameters.container_name >>-${ARCH}\"\n make DOCKER_ARCH=\"<< parameters.architecture >>\" \\\n DOCKER_OCI=\"images/${TAG}.oci\" \\\n DOCKER_TAG=\"*******/${TAG}\" \\\n BUILD_NUMBER=\"${CIRCLE_BUILD_NUM}\" \\\n BUILD_URL=\"${CIRCLE_BUILD_URL}\" \\\n BUILD_BRANCH=\"${CIRCLE_BRANCH}\" \\\n oci\n - store_artifacts:\n path: ~/project/*******-container/<< parameters.container_dir >>/images/\n destination: /\n\n generate-sbom:\n parameters:\n container_dir:\n type: string\n steps:\n - run:\n name: install Syft\n command: |\n set -o pipefail\n curl -sSfL https://raw.githubusercontent.com/anchore/syft/main/install.sh | sh -s -- -b \"${HOME}/bin\"\n - run:\n name: generate SBOM\n command: |\n cd ~/project/*******-container/\"<< parameters.container_dir >>\"/images/\n if [ -z \"$(ls *.oci 2>/dev/null)\" ]; then\n echo 'ERROR: no *.oci files found in *******-container/<< parameters.container_dir >>/images/'\n exit 1\n fi\n\n mkdir -p /tmp/sboms\n for IMAGE_FILE in *.oci; do\n IMAGE_ROOT=\"$(echo \"${IMAGE_FILE}\" | sed -e 's,\\.oci$,,')\"\n syft scan \"docker-archive:${IMAGE_FILE}\" -o cyclonedx --quiet >\"/tmp/sboms/${IMAGE_ROOT}-sbom.xml\"\n done\n - store_artifacts:\n path: /tmp/sboms/\n destination: /\n\n scan-image-trivy:\n parameters:\n architecture:\n type: string\n container_name:\n type: string\n container_dir:\n type: string\n tarball_match:\n type: string\n tarball_path:\n type: string\n steps:\n - setup_remote_docker:\n docker_layer_caching: false\n - cached-checkout\n - download-download-artifacts\n - run:\n name: download tarball dependency to << parameters.tarball_path >>\n command: download-artifacts.pl --include-failed --workflow=\"${CIRCLE_WORKFLOW_ID}\" --match=\"<< parameters.tarball_match >>\" tar.gz \"${CIRCLE_BRANCH}\" \"$(pwd)/<< parameters.tarball_path >>\"\n - run:\n name: build << parameters.container_name >>=<< parameters.architecture >> container image\n command: |\n # set up multi-arch\n docker container prune -f\n docker run --rm --privileged tonistiigi/binfmt:latest --install \"<< parameters.architecture >>\"\n\n # export DOCKER_CONTENT_TRUST=1\n cd *******-container/<< parameters.container_dir >>\n export ARCH=\"$(printf \"<< parameters.architecture >>\" | tr / -)\"\n export TAG=\"<< parameters.container_name >>-${ARCH}\"\n make DOCKER_ARCH=\"<< parameters.architecture >>\" \\\n DOCKER_OCI=\"images/${TAG}.oci\" \\\n DOCKER_TAG=\"*******/${TAG}\" \\\n BUILD_NUMBER=\"${CIRCLE_BUILD_NUM}\" \\\n BUILD_URL=\"${CIRCLE_BUILD_URL}\" \\\n BUILD_BRANCH=\"${CIRCLE_BRANCH}\" \\\n oci\n - run:\n name: Install trivy\n command: |\n curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sh -s -- -b /tmp/bin\n - run:\n name: Scan the local image with trivy\n command: |\n docker image load -i ~/project/*******-container/*/images/*.oci\n export ARCH=\"$(printf \"<< parameters.architecture >>\" | tr / -)\"\n export TAG=\"<< parameters.container_name >>-${ARCH}\"\n \n #Create artifacts directory in the workspace\n mkdir -p \"${HOME}/project/artifacts\"\n\n /tmp/bin/trivy image --severity HIGH,CRITICAL --pkg-types os,library --scanners vuln --ignorefile ~/project/.circleci/trivy-config/trivyignore --timeout 30m --format json -o /tmp/filtered_vulnerabilities.json --no-progress *******/${TAG}\n /tmp/bin/trivy image --pkg-types os,library --scanners vuln --ignorefile ~/project/.circleci/trivy-config/trivyignore --timeout 30m --format json -o /tmp/report.json --no-progress *******/${TAG}\n /tmp/bin/trivy image --ignorefile ~/project/.circleci/trivy-config/trivyignore --timeout 30m --exit-code 0 -o /tmp/fullreport.txt --no-progress *******/${TAG}\n\n # Clean report.txt by removing rows with 0 vulnerabilities\n awk 'BEGIN { FS=\"│\"; in_table=0; skip=0 }\n index($0,\"┌\")==1 { print; in_table=1; next }\n !in_table { print; next }\n index($0,\"├\")==1 {\n if (skip) { skip=0; next }\n print; next\n }\n index($0,\"└\")==1 {\n if (skip) { skip=0; print; in_table=0; next }\n print; in_table=0; next\n }\n skip { next }\n $4 ~ /^[[:space:]]*0[[:space:]]*$/ { skip=1; next }\n { print }' /tmp/fullreport.txt > /tmp/report_cleaned.txt && mv /tmp/report_cleaned.txt /tmp/report.txt\n \n # Copy to artifacts with job-specific names\n JOB_NAME_SANITIZED=$(echo \"${CIRCLE_JOB}\" | tr / -)\n cp /tmp/report.json ~/project/artifacts/${JOB_NAME_SANITIZED}_report.json\n cp /tmp/report.txt ~/project/artifacts/${JOB_NAME_SANITIZED}_report.txt\n cp /tmp/filtered_vulnerabilities.json ~/project/artifacts/${JOB_NAME_SANITIZED}_filtered_vulnerabilities.json\n\n - persist_to_workspace:\n root: ~/project\n paths:\n - artifacts/*\n\n - store_artifacts:\n path: ~/project/artifacts\n destination: trivy-reports\n\n trivy-analyze:\n steps:\n - download-download-artifacts\n - cached-checkout\n - attach_workspace:\n at: ~/project\n - run:\n name: Verify Python script exists\n command: |\n echo \"Workspace contents:\"\n ls -la /home/circleci/project/.circleci/pyscripts/\n find /home/circleci/project -name analyze_trivy_report.py || true\n find ~/project -name analyze_trivy_report.py || true\n\n if [[ ! -f ~/project/.circleci/pyscripts/analyze_trivy_report.py ]]; then\n echo \"Error: Python script not found at ~/project/.circleci/pyscripts/analyze_trivy_report.py\"\n echo \"Trying to locate script...\"\n PY_SCRIPT=$(find ~/project -name analyze_trivy_report.py | head -1)\n if [[ -n \"$PY_SCRIPT\" ]]; then\n echo \"Found script at: $PY_SCRIPT\"\n mkdir -p ~/project/.circleci/pyscripts/\n cp \"$PY_SCRIPT\" ~/project/.circleci/pyscripts/\n else\n echo \"Error: Could not find analyze_trivy_report.py anywhere in workspace!\"\n exit 1\n fi\n fi\n\n echo \"Script verification passed - proceeding with analysis\"\n - run:\n name: Prepare Trivy reports\n command: |\n # Remove '-analyze' from job name to match artifact\n SCAN_JOB_NAME=$(echo \"${CIRCLE_JOB}\" | sed 's/-analyze//g')\n SANITIZED_SCAN_JOB_NAME=$(echo \"${SCAN_JOB_NAME}\" | tr / -)\n\n # Create required symlinks\n ln -sf ~/project/artifacts/${SANITIZED_SCAN_JOB_NAME}_report.json /tmp/report.json\n ln -sf ~/project/artifacts/${SANITIZED_SCAN_JOB_NAME}_filtered_vulnerabilities.json /tmp/filtered_vulnerabilities.json\n\n - run:\n name: Analyze trivy scan\n command: |\n cd ~/project\n python3 .circleci/pyscripts/analyze_trivy_report.py /tmp/report.json \n - store_artifacts:\n path: report.txt\n destination: report.txt\n - store_artifacts:\n path: report.csv\n destination: report.csv\n \n - run:\n name: Analyze filtered vulnerabilities\n command: |\n cd ~/project\n python3 .circleci/pyscripts/analyze_trivy_report.py /tmp/filtered_vulnerabilities.json\n - store_artifacts:\n path: filtered_vulnerabilities.txt\n destination: filtered_vulnerabilities.txt\n - store_artifacts:\n path: filtered_vulnerabilities.csv\n destination: filtered_vulnerabilities.csv\n\n - run:\n name: Create Jira Issues\n context: Jira-secrets\n command: |\n if [[ ! -f filtered_vulnerabilities.txt ]]; then\n echo \"No filtered_vulnerabilities.txt found. Skipping Jira issue creation.\"\n exit 0\n fi\n \n echo \"Creating Jira Issues from filtered_vulnerabilities.txt...\"\n python3 .circleci/pyscripts/create_jira_issues.py\n extract-pom-version:\n description: \"Extracting Maven POM version\"\n steps:\n - run:\n name: Extract Maven POM version\n command: .circleci/scripts/pom2version.sh pom.xml > pom-version-cache.key\n download-download-artifacts:\n steps:\n - run:\n name: download the download-artifacts.pl script and its dependencies\n command: |\n do_sudo() {\n local _sudo=\"$(command -v sudo)\"\n if [ -x \"${_sudo}\" ]; then\n \"${_sudo}\" \"$@\"\n else\n \"$@\"\n fi\n }\n\n if [ ! -x /usr/local/bin/download-artifacts.pl ]; then\n do_sudo wget https://raw.githubusercontent.com/OpenNMS/*******-repo/master/script/download-artifacts.pl -O /usr/local/bin/download-artifacts.pl\n do_sudo chmod a+x /usr/local/bin/download-artifacts.pl\n fi\n\n do_sudo apt-get -y --allow-releaseinfo-change update && \\\n do_sudo apt-get -y -q --no-install-recommends install \\\n libdatetime-format-iso8601-perl \\\n libjson-pp-perl \\\n libwww-perl \\\n liblwp-protocol-https-perl \\\n liblwp-useragent-determined-perl\n\nworkflows:\n combined-builds:\n jobs:\n - build:\n context:\n - CircleCI\n - gpg-signing\n filters:\n branches:\n ignore:\n - /^from-foundation.*/\n post-steps:\n - integration-test:\n context:\n - CircleCI\n - docker-publish-account\n requires:\n - build\n post-steps:\n - build-debian:\n context:\n - CircleCI\n - gpg-signing\n requires:\n - build\n post-steps:\n - build-rpm:\n context:\n - CircleCI\n - gpg-signing\n requires:\n - build\n post-steps:\n - horizon-image-single-arch:\n context:\n - CircleCI\n - docker-publish-account\n matrix:\n parameters:\n architecture: ['linux/arm64']\n requires:\n - smoke-test-core\n - smoke-test-minion\n - smoke-test-sentinel\n post-steps:\n - horizon-image-single-arch-linux-amd64:\n context:\n - CircleCI\n - docker-publish-account\n requires:\n - tarball-assembly-only\n post-steps:\n - minion-image-single-arch:\n context:\n - CircleCI\n - docker-publish-account\n matrix:\n parameters:\n architecture: ['linux/arm64']\n requires:\n - smoke-test-core\n - smoke-test-minion\n - smoke-test-sentinel\n post-steps:\n - minion-image-single-arch-linux-amd64:\n context:\n - CircleCI\n - docker-publish-account\n requires:\n - tarball-assembly-only\n post-steps:\n - sentinel-image-single-arch:\n context:\n - CircleCI\n - docker-publish-account\n matrix:\n parameters:\n architecture: ['linux/arm64']\n requires:\n - smoke-test-core\n - smoke-test-minion\n - smoke-test-sentinel\n post-steps:\n - sentinel-image-single-arch-linux-amd64:\n context:\n - CircleCI\n - docker-publish-account\n requires:\n - tarball-assembly-only\n post-steps:\n - smoke-test-core:\n context:\n - CircleCI\n - docker-publish-account\n requires:\n - horizon-image-single-arch-linux-amd64\n - sentinel-image-single-arch-linux-amd64\n - minion-image-single-arch-linux-amd64\n post-steps:\n - smoke-test-minion:\n context:\n - CircleCI\n - docker-publish-account\n requires:\n - horizon-image-single-arch-linux-amd64\n - minion-image-single-arch-linux-amd64\n - sentinel-image-single-arch-linux-amd64\n post-steps:\n - smoke-test-sentinel:\n context:\n - CircleCI\n - docker-publish-account\n requires:\n - horizon-image-single-arch-linux-amd64\n - minion-image-single-arch-linux-amd64\n - sentinel-image-single-arch-linux-amd64\n post-steps:\n - tarball-assembly-only:\n context:\n - CircleCI\n requires:\n - build\n post-steps:\n\njobs:\n build:\n executor: build-executor\n # Building currently requires the xlarge containers in order for the webpack compilation\n # in the core/web-assets module to complete reliably\n resource_class: xlarge\n steps:\n - run-build:\n number-vcpu: 8\n build-debian:\n executor: build-executor\n resource_class: large\n steps:\n - prep-package-build\n - run:\n name: Build Debian Packages\n command: |\n export NODE_OPTIONS=--max_old_space_size=1024\n export CCI_MAXCPU=2\n export MAVEN_OPTS=\"-Xmx5g -XX:ReservedCodeCacheSize=1g -XX:+TieredCompilation\"\n export DEBEMAIL=\"*******@*******.org\"\n export DEBFULLNAME=\"OpenNMS Build Account\"\n .circleci/scripts/makedeb.sh *******\n .circleci/scripts/makedeb.sh minion\n .circleci/scripts/makedeb.sh sentinel\n - sign-packages/sign-debs:\n skip_if_forked_pr: true\n gnupg_home: ~/tmp/gpg\n gnupg_key: *******@*******.org\n packages: target/debs/*.deb\n - run:\n name: Gather system logs\n when: always\n command: |\n mkdir -p ~/build-results/system-logs\n (dmesg || :) > ~/build-results/system-logs/dmesg 2>&1\n (ps auxf || :) > ~/build-results/system-logs/ps 2>&1\n (free -m || :) > ~/build-results/system-logs/free 2>&1\n (docker stats --no-stream || :) > ~/build-results/system-logs/docker_stats 2>&1\n - store_artifacts:\n when: always\n path: ~/build-results\n destination: build-results\n - store_artifacts:\n path: ~/project/target/debs\n destination: debs\n - cache-workflow-assets:\n cache_prefix: deb\n source_path: target/debs\n build-rpm:\n executor: build-executor\n resource_class: large\n steps:\n - prep-package-build\n - run:\n name: Build RPMs\n command: |\n export NODE_OPTIONS=--max_old_space_size=1024\n export CCI_MAXCPU=2\n export MAVEN_OPTS=\"-Xmx5g -XX:ReservedCodeCacheSize=1g -XX:+TieredCompilation\"\n .circleci/scripts/makerpm.sh tools/packages/*******/*******.spec\n .circleci/scripts/makerpm.sh tools/packages/minion/minion.spec\n .circleci/scripts/makerpm.sh tools/packages/sentinel/sentinel.spec\n - sign-packages/sign-rpms:\n skip_if_forked_pr: true\n gnupg_home: ~/tmp/gpg\n gnupg_key: *******@*******.org\n packages: target/rpm/RPMS/noarch/*.rpm\n - store_artifacts:\n path: ~/project/target/rpm/RPMS/noarch\n destination: rpms\n - cache-workflow-assets:\n cache_prefix: rpm\n source_path: target/rpm/RPMS/noarch\n empty:\n executor: build-executor\n # Building currently requires the xlarge containers in order for the webpack compilation\n # in the core/web-assets module to complete reliably\n resource_class: small\n steps:\n - run-empty\n build-ui:\n executor: ui-executor\n steps:\n - checkout\n - run:\n name: Prebuild\n command: |\n yarn global add node-gyp\n - run:\n name: Build\n command: |\n cd ui && yarn install && yarn build && yarn test\n build-docs:\n executor: docs-executor\n steps:\n - cached-checkout\n - run:\n name: Validate Xrefs in docs\n command: |\n NODE_PATH=\"$(npm -g root)\" antora --generator @antora/xref-validator antora-playbook-local.yml\n - run:\n name: Build docs with Antora\n command: | \n DOCSEARCH_ENABLED=true DOCSEARCH_ENGINE=lunr NODE_PATH=\"$(npm -g root)\" antora --stacktrace generate antora-playbook-local.yml\n - run:\n name: Rename Site zip file to include the version\n command: | \n export OPENNMS_VERSION=\"$(.circleci/scripts/pom2version.sh pom.xml)\"\n mkdir ~/artifacts\n cp ~/project/build/site.zip ~/artifacts/site-$OPENNMS_VERSION.zip\n - store_artifacts:\n path: ~/artifacts\n destination: /\n \n tarball-assembly-only:\n executor: build-executor\n resource_class: large\n parameters:\n number-vcpu:\n default: 6\n type: integer\n vaadin-javamaxmem:\n default: 1g\n type: string\n steps:\n - cached-checkout\n - attach_workspace:\n at: ~/\n - restore-maven-cache\n - run:\n name: Assemble tarballs and related artifacts\n command: |\n ulimit -n 65536 || :\n export MAVEN_OPTS=\"-Xmx8g -XX:ReservedCodeCacheSize=1g -XX:+TieredCompilation\"\n export MAVEN_ARGS=\"install\"\n export MAVEN_ARGS=\"install\"\n # release branches should enable extra \"production\" stuff like license indexing\n case \"${CIRCLE_BRANCH}\" in\n \"master-\"*|\"release-\"*|\"develop\")\n MAVEN_ARGS=\"-Dbuild.type=production $MAVEN_ARGS\"\n ;;\n esac\n case \"${CIRCLE_BRANCH}\" in\n \"master-\"*)\n MAVEN_ARGS=\"-Dbuild.sbom=false $MAVEN_ARGS\"\n ;;\n esac\n ulimit -n 65536 || :\n ./assemble.pl -DskipTests=true -Dbuild.skip.tarball=false \\\n -Daether.connector.resumeDownloads=false \\\n -Daether.connector.basic.threads=1 \\\n -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn \\\n -DvaadinJavaMaxMemory=<< parameters.vaadin-javamaxmem >> \\\n -DmaxCpus=<< parameters.number-vcpu >> \\\n -Denable.license=true \\\n -Pbuild-bamboo \\\n -Prun-expensive-tasks \\\n -D*******.home=/opt/******* \\\n --batch-mode \\\n $MAVEN_ARGS || exit 1\n - run:\n name: Collect Artifacts\n command: |\n mkdir -p target/{artifacts,config-schema,tarballs}\n OPENNMS_VERSION=\"$(.circleci/scripts/pom2version.sh pom.xml)\"\n find ./target -name \"*.tar.gz\" -type f -not -iname '*source*' -exec cp {} \"./target/tarballs/*******-${OPENNMS_VERSION}.tar.gz\" \\;\n find ./*******-full-assembly/target -name \"*-core.tar.gz\" -type f -not -iname '*source*' -exec cp {} \"./target/tarballs/*******-${OPENNMS_VERSION}-core.tar.gz\" \\;\n find ./*******-assemblies/minion/target -name \"*.tar.gz\" -type f -not -iname '*source*' -exec cp {} \"./target/tarballs/minion-${OPENNMS_VERSION}.tar.gz\" \\;\n find ./*******-assemblies/sentinel/target -name \"*.tar.gz\" -type f -not -iname '*source*' -exec cp {} \"./target/tarballs/sentinel-${OPENNMS_VERSION}.tar.gz\" \\;\n cp ./*******-assemblies/xsds/target/*-xsds.tar.gz \"./target/artifacts/*******-${OPENNMS_VERSION}-xsds.tar.gz\"\n cp target/*-source.tar.gz ./target/artifacts/\n cp *******-full-assembly/target/generated-sources/license/THIRD-PARTY.txt ./target/artifacts/\n - store_artifacts:\n when: always\n path: ~/project/target/artifacts\n destination: artifacts\n - store_artifacts:\n when: always\n path: ~/project/target/tarballs\n destination: tarballs\n horizon-image-single-arch:\n executor: base-executor\n resource_class: medium\n parameters:\n architecture:\n type: string\n steps:\n - build-image-single-arch:\n architecture: << parameters.architecture >>\n container_name: horizon\n container_dir: core\n tarball_match: -core\n tarball_path: *******-full-assembly/target/\n - generate-sbom:\n container_dir: core\n sentinel-image-single-arch:\n executor: base-executor\n resource_class: medium\n parameters:\n architecture:\n type: string\n steps:\n - build-image-single-arch:\n architecture: << parameters.architecture >>\n container_name: sentinel\n container_dir: sentinel\n tarball_match: sentinel\n tarball_path: /*******-assemblies/sentinel/target\n - generate-sbom:\n container_dir: sentinel\n minion-image-single-arch:\n executor: base-executor\n resource_class: medium\n parameters:\n architecture:\n type: string\n steps:\n - build-image-single-arch:\n architecture: << parameters.architecture >>\n container_name: minion\n container_dir: minion\n tarball_match: minion\n tarball_path: /*******-assemblies/minion/target\n - generate-sbom:\n container_dir: minion\n minion-image-single-arch-linux-amd64:\n executor: base-executor\n resource_class: medium\n steps:\n - build-image-single-arch:\n architecture: linux/amd64\n container_name: minion\n container_dir: minion\n tarball_match: minion\n tarball_path: /*******-assemblies/minion/target\n - run:\n name: copy minion config schema for archiving\n command: |\n cd ~/project\n mkdir -p target/config-schema\n cp *******-container/minion/minion-config-schema.yml target/config-schema/\n - save-artifacts:\n path: target/config-schema/\n location: minion-config-schema\n - generate-sbom:\n container_dir: minion\n minion-image-single-arch-linux-amd64-trivy:\n executor: base-executor\n resource_class: medium\n steps:\n - scan-image-trivy:\n architecture: linux/amd64\n container_name: minion\n container_dir: minion\n tarball_match: minion\n tarball_path: /*******-assemblies/minion/target\n - trivy-analyze\n minion-image-single-arch-linux-amd64-trivy-analyze:\n executor: base-executor\n resource_class: medium\n steps:\n - trivy-analyze\n horizon-image-single-arch-linux-amd64:\n executor: base-executor\n resource_class: medium\n steps:\n - build-image-single-arch:\n architecture: linux/amd64\n container_name: horizon\n container_dir: core\n tarball_match: -core\n tarball_path: *******-full-assembly/target/\n - generate-sbom:\n container_dir: core\n horizon-image-single-arch-linux-amd64-trivy:\n executor: base-executor\n resource_class: medium\n steps:\n - scan-image-trivy:\n architecture: linux/amd64\n container_name: horizon\n container_dir: core\n tarball_match: -core\n tarball_path: *******-full-assembly/target/\n - trivy-analyze\n horizon-image-single-arch-linux-amd64-trivy-analyze:\n executor: base-executor\n resource_class: medium\n steps:\n - trivy-analyze\n sentinel-image-single-arch-linux-amd64:\n executor: base-executor\n resource_class: medium\n steps:\n - build-image-single-arch:\n architecture: linux/amd64\n container_name: sentinel\n container_dir: sentinel\n tarball_match: sentinel\n tarball_path: /*******-assemblies/sentinel/target\n - generate-sbom:\n container_dir: sentinel\n sentinel-image-single-arch-linux-amd64-trivy:\n executor: base-executor\n resource_class: medium\n steps:\n - scan-image-trivy:\n architecture: linux/amd64\n container_name: sentinel\n container_dir: sentinel\n tarball_match: sentinel\n tarball_path: /*******-assemblies/sentinel/target\n - trivy-analyze\n sentinel-image-single-arch-linux-amd64-trivy-analyze:\n executor: base-executor\n resource_class: medium\n steps:\n - trivy-analyze\n integration-test:\n executor: integration-test-executor\n parallelism: 8\n resource_class: xlarge\n steps:\n - cached-checkout\n - attach_workspace:\n at: ~/\n - run-integration-tests:\n rerun-failtest-count: 1\n integration-test-with-coverage:\n executor: integration-test-executor\n parallelism: 10\n resource_class: xlarge\n steps:\n - cached-checkout\n - attach_workspace:\n at: ~/\n - run-integration-tests:\n rerun-failtest-count: 0\n failure-option: --fail-never\n changes-only: false\n code-coverage:\n executor: coverage-executor\n resource_class: xlarge\n steps:\n - cached-checkout\n - attach_workspace:\n at: ~/\n - cached-download:\n url: https://repo1.maven.org/maven2/org/jacoco/org.jacoco.cli/0.8.11/org.jacoco.cli-0.8.11-nodeps.jar\n file: /tmp/jacoco-cli.jar\n - cached-download:\n url: https://binaries.sonarsource.com/Distribution/sonar-scanner-cli/sonar-scanner-cli-5.0.1.3006.zip\n file: /tmp/sonar-scanner-cli.zip\n - extract-pom-version\n - restore-maven-cache\n - restore-sonar-cache\n - run:\n name: Restore Target Directories (Code Coverage)\n when: always\n command: |\n .circleci/scripts/codecoverage-restore.sh\n - run:\n name: Run SonarQube Code Analysis\n when: always\n command: |\n export MAVEN_OPTS=\"-Xmx12g -XX:ReservedCodeCacheSize=2g -XX:+TieredCompilation -XX:+UseShenandoahGC\"\n .circleci/scripts/sonar.sh\n - save-sonar-cache\n smoke-test-core:\n executor: smoke-test-executor\n parallelism: 10\n resource_class: medium+\n steps:\n - cached-checkout\n - attach_workspace:\n at: ~/\n - run-smoke-tests:\n suite: core\n smoke-test-minion:\n executor: smoke-test-executor\n parallelism: 4\n resource_class: medium+\n steps:\n - cached-checkout\n - attach_workspace:\n at: ~/\n - run-smoke-tests:\n suite: minion\n smoke-test-sentinel:\n executor: smoke-test-executor\n parallelism: 4\n resource_class: medium+\n steps:\n - cached-checkout\n - attach_workspace:\n at: ~/\n - run-smoke-tests:\n suite: sentinel\n smoke-test-minimal:\n executor: smoke-test-executor\n steps:\n - cached-checkout\n - attach_workspace:\n at: ~/\n - run-smoke-tests:\n suite: minimal\n create-merge-foundation-branch:\n executor: base-executor\n steps:\n #- add_ssh_keys:\n # fingerprints:\n # - \"6a:c7:42:e6:e3:26:76:95:f4:0a:7a:5b:9c:b3:19:0e\"\n # #- \"66:9a:2d:a8:ad:7b:cc:7c:d2:ee:55:94:01:72:ac:2a\"\n - run:\n name: \"Branch Merge Parameters\"\n command: |\n echo \"previous: << pipeline.parameters.previous_branch >>, main: << pipeline.parameters.main_branch >>, next: << pipeline.parameters.next_branch >>\"\n - when:\n condition: << pipeline.parameters.next_branch >>\n steps:\n - checkout-for-pushing\n - run:\n name: Checkout target branch and merge from source\n command: |\n export GIT_MERGE_AUTOEDIT=no\n git fetch --all\n git checkout << pipeline.parameters.next_branch >>\n git reset --hard origin/<< pipeline.parameters.next_branch >>\n git merge origin/<< pipeline.parameters.main_branch >>\n - run:\n name: Push to github\n command: git push -f origin << pipeline.parameters.next_branch >>:merge-foundation/<< pipeline.parameters.main_branch_label >>-to-<< pipeline.parameters.next_branch_label >>\n create-merge-omega-branch:\n executor: base-executor\n steps:\n - when:\n condition: << pipeline.parameters.main_branch >>\n steps:\n - restore_cache:\n keys:\n - omega-v2-{{ .Branch }}-{{ .Revision }}\n - omega-v2-{{ .Branch }}-\n - omega-v2-\n - checkout-for-pushing\n - run:\n name: Add Omega remote if necessary\n command: |\n REMOTE_OMEGA=\"$(git remote | grep -c -E '^omega$' || :)\"\n if [ \"$REMOTE_OMEGA\" -eq 0 ]; then\n git remote add omega git@github.com:OpenNMS/*******-omega.git\n fi\n - run:\n name: git fetch omega\n command: |\n git fetch omega\n - save_cache:\n key: omega-v2-{{ .Branch }}-{{ .Revision }}\n paths:\n - \".git\"\n - run:\n name: Checkout target branch and merge from source\n command: |\n if [ \"$CIRCLE_PROJECT_REPONAME\" == \"*******-omega\" ]; then\n exit 0\n fi\n\n export GIT_MERGE_AUTOEDIT=no\n if git rev-parse << pipeline.parameters.main_branch >> >/dev/null 2>&1; then\n git checkout << pipeline.parameters.main_branch >>\n else\n git checkout -b << pipeline.parameters.main_branch >> omega/<< pipeline.parameters.main_branch >>\n fi\n git reset --hard omega/<< pipeline.parameters.main_branch >>\n git merge origin/<< pipeline.parameters.main_branch >>\n - run:\n name: Push to Omega github\n command: git push -f omega << pipeline.parameters.main_branch >>:<< pipeline.parameters.main_branch >>\n # note, this is always run as part of the _next_ branch\n # for example, if main_branch is `foundation-2016` and next_branch is `foundation-2017`,\n # it will include the contents of the `foundation-2017` branch, thus we need to actually\n # look _backwards_ to the previous_branch and main_branch to merge the correct bits.\n merge-foundation-branch:\n executor: base-executor\n steps:\n #- add_ssh_keys:\n # fingerprints:\n # - \"6a:c7:42:e6:e3:26:76:95:f4:0a:7a:5b:9c:b3:19:0e\"\n # #- \"66:9a:2d:a8:ad:7b:cc:7c:d2:ee:55:94:01:72:ac:2a\"\n - run:\n name: \"Branch Merge Parameters\"\n command: |\n echo \"previous: << pipeline.parameters.previous_branch >>, main: << pipeline.parameters.main_branch >>, next: << pipeline.parameters.next_branch >>\"\n - when:\n condition: << pipeline.parameters.previous_branch >>\n steps:\n - checkout-for-pushing\n - run:\n name: Checkout target and merge with merge branch\n command: |\n export GIT_MERGE_AUTOEDIT=no\n git fetch --all\n git checkout << pipeline.parameters.main_branch >>\n git reset --hard origin/<< pipeline.parameters.main_branch >>\n git merge origin/merge-foundation/<< pipeline.parameters.previous_branch_label >>-to-<< pipeline.parameters.main_branch_label >>\n - run:\n name: Push to github\n command: git push origin << pipeline.parameters.main_branch >>:<< pipeline.parameters.main_branch >>\n create-merge-meridian-branch:\n executor: base-executor\n steps:\n - when:\n condition: << pipeline.parameters.main_branch >>\n steps:\n #- add_ssh_keys:\n # fingerprints:\n # - \"6a:c7:42:e6:e3:26:76:95:f4:0a:7a:5b:9c:b3:19:0e\"\n # #- \"4a:0a:cb:11:a3:33:b1:14:e9:cb:db:41:76:fa:a3:bf\"\n - restore_cache:\n keys:\n - meridian-v2-{{ .Branch }}-{{ .Revision }}\n - meridian-v2-{{ .Branch }}-\n - meridian-v2-\n - checkout-for-pushing\n - run:\n name: Add Meridian remote if necessary\n command: |\n REMOTE_MERIDIAN=\"$(git remote | grep -c -E '^meridian$' || :)\"\n if [ \"$REMOTE_MERIDIAN\" -eq 0 ]; then\n git remote add meridian git@github.com:OpenNMS/*******-prime.git\n fi\n - run:\n name: git fetch meridian\n command: |\n git fetch meridian\n - save_cache:\n key: meridian-v2-{{ .Branch }}-{{ .Revision }}\n paths:\n - \".git\"\n - run:\n name: Checkout target branch and merge from source\n command: |\n export GIT_MERGE_AUTOEDIT=no\n if git rev-parse from-<< pipeline.parameters.main_branch >> >/dev/null 2>&1; then\n git checkout from-<< pipeline.parameters.main_branch >>\n else\n git checkout -b from-<< pipeline.parameters.main_branch >> meridian/from-<< pipeline.parameters.main_branch >>\n fi\n git reset --hard meridian/from-<< pipeline.parameters.main_branch >>\n git merge origin/<< pipeline.parameters.main_branch >>\n - run:\n name: Push to Meridian github\n command: git push -f meridian from-<< pipeline.parameters.main_branch >>:from-<< pipeline.parameters.main_branch >>\n merge-poweredby-branch:\n executor: base-executor\n steps:\n - when:\n condition: << pipeline.parameters.main_branch >>\n steps:\n #- add_ssh_keys:\n # fingerprints:\n # - \"6a:c7:42:e6:e3:26:76:95:f4:0a:7a:5b:9c:b3:19:0e\"\n # #- \"aa:e8:af:11:e4:78:e7:75:b7:a1:69:d0:c8:17:0c:7a\"\n - restore_cache:\n keys:\n - poweredby-v2-{{ .Branch }}-{{ .Revision }}\n - poweredby-v2-{{ .Branch }}-\n - poweredby-v2-\n - checkout-for-pushing\n - run:\n name: Merge Foundation to PoweredBy\n command: .circleci/scripts/merge-poweredby.sh\n - save_cache:\n key: poweredby-v2-{{ .Branch }}-{{ .Revision }}\n paths:\n - \".git\"\n publish:\n docker:\n - image: cimg/python:3.10\n resource_class: small\n environment:\n DOCKER_CLI_EXPERIMENTAL: enabled\n steps:\n - shallow-clone\n - setup_remote_docker\n - cloudsmith/ensure-api-key\n - cloudsmith/install-cli\n - cached-download:\n url: https://github.com/theupdateframework/notary/releases/download/v0.6.1/notary-Linux-amd64\n file: /usr/local/bin/notary\n - download-download-artifacts\n - run:\n name: download resources from parent jobs\n command: |\n for TYPE in oci rpm deb yml; do\n download-artifacts.pl \\\n --vault-layout \\\n --include-failed \\\n --ci \\\n --workflow=\"${CIRCLE_WORKFLOW_ID}\" \\\n \"${TYPE}\" \\\n \"${CIRCLE_BRANCH}\" \\\n /tmp/artifacts\n download-artifacts.pl \\\n --vault-layout \\\n --include-failed \\\n --ci \\\n --workflow=\"${CIRCLE_WORKFLOW_ID}\" \\\n --match=\"sbom\" \\\n xml \\\n \"${CIRCLE_BRANCH}\" \\\n /tmp/artifacts\n done\n - run:\n name: Import OCI Files\n command: |\n find /tmp/artifacts/oci -name \\*.oci | while read -r OCI; do\n docker image load --quiet -i \"${OCI}\"\n done\n - run:\n name: Publish to Docker Hub\n command: .circleci/scripts/publish-dockerhub.sh\n - run:\n name: Publish to Cloudsmith\n command: .circleci/scripts/publish-cloudsmith.sh\n# - run:\n# name: Publish to Azure\n# command: .circleci/scripts/publish-azure.sh\n publish-maven:\n executor: build-executor\n resource_class: xlarge\n steps:\n - cached-checkout\n - attach_workspace:\n at: ~/\n - restore-maven-cache\n - run:\n name: Create Repository Bundle\n no_output_timeout: 80m\n command: |\n ulimit -n 65536 || :\n mkdir -p ~/.m2\n echo \"${GPG_SECRET_KEY}\" | base64 -d > ~/.m2/sign-key.asc\n export SIGN_KEY_PASS=\"$(echo \"${GPG_PASSPHRASE}\" | base64 -d)\"\n export MAVEN_OPTS=\"-Xmx8g -XX:ReservedCodeCacheSize=1g -XX:+TieredCompilation\"\n # release branches should enable extra \"production\" stuff like license indexing\n case \"${CIRCLE_BRANCH}\" in\n \"master-\"*|\"release-\"*|\"develop\")\n # allow these branches to pass through, everything else should skip\n ;;\n *)\n echo \"deployment should only happen on major release branches\"\n exit 0\n ;;\n esac\n cd deploy\n\n # Use Maven Central Plugin to build the repository layout\n ../compile.pl \\\n -s settings.xml \\\n -DskipTests=true \\\n -DskipITs=true \\\n -DskipSurefire=true \\\n -DskipFailsafe=true \\\n -Dbuild.sbom=false \\\n -Dbuild.skip.tarball=true \\\n -Daether.connector.resumeDownloads=false \\\n -Daether.connector.basic.threads=1 \\\n -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn \\\n -DvaadinJavaMaxMemory=2g \\\n -DmaxCpus=8 \\\n -Prun-expensive-tasks \\\n -Pproduction \\\n -Prelease \\\n --batch-mode \\\n -DnexusUrl=\"https://central.sonatype.com/\" \\\n -DpublishingServerId=\"central-portal\" \\\n -DskipPublishing=true \\\n install \\\n org.sonatype.central:central-publishing-maven-plugin:publish \\\n || exit 1\n\n sudo apt update\n sudo apt install -y python3 python3-pip python3-venv\n \n if [[ $(command -v cloudsmith) == \"\" ]]; then\n cd /tmp\n python3 -m venv venv\n export PATH=/tmp/venv/bin:$PATH\n /tmp/venv/bin/pip3 install cloudsmith-cli\n else\n echo \"Cloudsmith CLI is already installed.\"\n fi\n\n ROOT_VERSION=$(~/project/.circleci/scripts/pom2version.sh ~/project/pom.xml || echo \"0.0.0\")\n\n if [[ \"$ROOT_VERSION\" == *\"-SNAPSHOT\" ]]; then\n ln -s ~/project/deploy/target/central-deferred ~/project/deploy/target/central-publishing\n cd ~/project/deploy/target/central-deferred\n zip -r \"central-bundle.zip\" org\n rm -r org\n fi\n ~/project/.circleci/scripts/publish-maven-cloudsmith.sh\n \n \n\nexecutors:\n base-executor:\n docker:\n - image: cimg/base:stable-20.04\n build-executor:\n docker:\n - image: *******/build-env:circleci-ubuntu-jdk17\n coverage-executor:\n docker:\n - image: *******/build-env:circleci-ubuntu-jdk17\n docs-executor:\n docker:\n - image: *******/antora:3.1.4-b10433\n integration-test-executor:\n machine:\n image: ubuntu-2204:current\n smoke-test-executor:\n machine:\n image: ubuntu-2204:current\n ui-executor:\n docker:\n - image: cimg/node:18.20.2\n\n# NOTE: the \"_label\" versions of these are for the case when your source or target\n# branches have slashes in them, that way the merge branch gets created properly\nparameters:\n minimal:\n description: whether to do a minimal (build-and-merge only) build\n type: boolean\n default: false\n previous_branch:\n description: the previous branch, if any\n type: string\n default: release-34.x\n previous_branch_label:\n description: the previous branch, if any (escaped, no slashes)\n type: string\n default: release-34.x\n main_branch:\n description: the auto-merge main branch\n type: string\n default: develop\n main_branch_label:\n description: the auto-merge main branch (escaped, no slashes)\n type: string\n default: develop\n next_branch:\n description: the auto-merge target branch\n type: string\n default: \"\"\n next_branch_label:\n description: the auto-merge target branch (escaped, no slashes)\n type: string\n default: \"\"\n\n ### sub-tree \"module\" build setup ###\n trigger-docs:\n description: whether to trigger the documentation build\n type: boolean\n default: false\n trigger-build:\n description: whether to trigger the main build\n type: boolean\n default: false\n trigger-ui:\n description: whether to trigger the featherds UI build\n type: boolean\n default: false\n trigger-coverage:\n description: whether to trigger a code coverage build\n type: boolean\n default: false\n\n ### config.yml parameters for when triggers are used (not used in main.yml, but needed defined here for trigger executions) ###\n trigger-prebuild:\n type: boolean\n default: false\n trigger-coverage-api:\n type: boolean\n default: false\n", "parameters": { "trigger-build": true } } % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 72841 100 16 100 72825 114 507k --:--:-- --:--:-- --:--:-- 508k {"message":"OK"}