diff --git a/.github/actions/get-ocp-range/action.yaml b/.github/actions/get-ocp-range/action.yaml new file mode 100644 index 0000000000..1d75e6a776 --- /dev/null +++ b/.github/actions/get-ocp-range/action.yaml @@ -0,0 +1,35 @@ +name: 'Get OCP range' +description: 'Get the range of OCP versions corresponding to the provided range of Kubernetes versions' +inputs: + kube-version-range: + description: 'Range of Kubernetes versions' + required: true +outputs: + ocp-version-range: + description: "Corresponsing range of OCP versions" + value: ${{ steps.run-get-ocp-range.outputs.ocp-version-range }} +runs: + using: "composite" + steps: + - name: Setup Go + uses: actions/setup-go@v4 + with: + go-version: '>=1.20' + + - name: Install get-ocp-range + shell: bash + run: go install github.com/opdev/getocprange/cmd/get-ocp-range@latest + + - name: Run get-ocp-range + id: run-get-ocp-range + shell: bash + run: | + echo "::debug::Received kubeVersionRange to translate '${{ inputs.kube-version-range }}'" + OCP_VERSION_RANGE=$(get-ocp-range '${{ inputs.kube-version-range }}') + echo "ocp-version-range=$OCP_VERSION_RANGE" >> $GITHUB_OUTPUT + echo "::debug::Successfully translated kubeVersionRange to OCPVersionRange $OCP_VERSION_RANGE" + + - name: Display error message if get-ocp-range failed + if: ${{ failure() && steps.run-get-ocp-range.outcome == 'failure' }} + shell: bash + run: echo "::error file=.github/actions/get-ocp-range/action.yaml::Error running get-ocp-range" \ No newline at end of file diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 3205bf6cd7..874a34829b 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -5,9 +5,14 @@ on: types: [opened, synchronize, reopened, edited, ready_for_review, labeled] jobs: - chart-certification: - name: Chart Certification + setup: + name: Setup CI runs-on: ubuntu-22.04 + outputs: + run_build: ${{ steps.check_build_required.outputs.run-build }} + verifier-action-image: ${{ steps.set-env.outputs.verifier-action-image }} + insecure_skip_tls_verify: ${{ steps.set-env.outputs.insecure_skip_tls_verify }} + if: | github.event.pull_request.draft == false && (github.event.action != 'labeled' || github.event.label.name == 'force-publish') && @@ -17,7 +22,7 @@ jobs: uses: actions/checkout@v3 - name: Set up Python 3.x Part 1 - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: "3.9" @@ -25,8 +30,10 @@ jobs: run: | # set up python python3 -m venv ve1 - cd scripts && ../ve1/bin/pip3 install -r requirements.txt && cd .. - cd scripts && ../ve1/bin/python3 setup.py install && cd .. + cd scripts + ../ve1/bin/pip3 install -r requirements.txt + ../ve1/bin/python3 setup.py install + cd .. - name: Check for CI changes id: check_ci_changes @@ -89,33 +96,65 @@ jobs: fi echo "insecure_skip_tls_verify=true" >> $GITHUB_OUTPUT + + chart-verifier: + name: Run chart-verifier + runs-on: ubuntu-22.04 + needs: [setup] + + outputs: + report_content: ${{ steps.check_report.outputs.report_content }} + redhat_to_community: ${{ steps.check_report.outputs.redhat_to_community }} + message_file: ${{ steps.pr_comment.outputs.message-file }} + web_catalog_only: ${{ steps.check_pr_content.outputs.web_catalog_only }} + chart_entry_name: ${{ steps.check_pr_content.outputs.chart-entry-name }} + release_tag: ${{ steps.check_pr_content.outputs.release_tag }} + + steps: - name: Checkout - if: ${{ steps.check_build_required.outputs.run-build == 'true' }} + uses: actions/checkout@v3 + + - name: Checkout PR Branch + if: ${{ needs.setup.outputs.run_build == 'true' }} uses: actions/checkout@v3 with: ref: ${{ github.event.pull_request.head.ref }} repository: ${{ github.event.pull_request.head.repo.full_name }} path: "pr-branch" + - name: Set up Python 3.x Part 1 + uses: actions/setup-python@v4 + with: + python-version: "3.9" + + - name: Set up Python 3.x Part 2 + run: | + # set up python + python3 -m venv ve1 + cd scripts + ../ve1/bin/pip3 install -r requirements.txt + ../ve1/bin/python3 setup.py install + cd .. + - name: Check PR Content id: check_pr_content - if: ${{ steps.check_build_required.outputs.run-build == 'true' }} + if: ${{ needs.setup.outputs.run_build == 'true' }} continue-on-error: true env: GITHUB_REF: ${{ github.ref }} BOT_TOKEN: ${{ secrets.BOT_TOKEN }} run: | INDEX_BRANCH=$(if [ "${GITHUB_REF}" = "refs/heads/main" ]; then echo "refs/heads/gh-pages"; else echo "${GITHUB_REF}-gh-pages"; fi) - ./ve1/bin/check-pr-content --index-branch=${INDEX_BRANCH} --repository=${{ github.repository }} --api-url=${{ github.event.pull_request._links.self.href }} + ./ve1/bin/check-pr-content --index-branch=${INDEX_BRANCH} --repository=${{ github.repository }} --api-url=${{ github.event.pull_request._links.self.href }} - name: Add 'content-ok' label - uses: actions/github-script@v3 + uses: actions/github-script@v6 if: ${{ steps.check_pr_content.outcome == 'success'}} continue-on-error: true with: github-token: ${{secrets.GITHUB_TOKEN}} script: | - github.issues.addLabels({ + github.rest.issues.addLabels({ issue_number: context.issue.number, owner: context.repo.owner, repo: context.repo.repo, @@ -123,13 +162,13 @@ jobs: }) - name: Remove 'content-ok' label - uses: actions/github-script@v3 + uses: actions/github-script@v6 if: ${{ steps.check_pr_content.outcome == 'failure' && contains( github.event.pull_request.labels.*.name, 'content-ok') }} continue-on-error: true with: github-token: ${{secrets.GITHUB_TOKEN}} script: | - github.issues.removeLabel({ + github.rest.issues.removeLabel({ issue_number: context.issue.number, owner: context.repo.owner, repo: context.repo.repo, @@ -143,14 +182,14 @@ jobs: exit 1 - name: Remove 'authorized-request' label from PR - uses: actions/github-script@v3 - if: ${{ steps.check_build_required.outputs.run-build == 'true' && contains( github.event.pull_request.labels.*.name, 'authorized-request') }} + uses: actions/github-script@v6 + if: ${{ needs.setup.outputs.run_build == 'true' && contains( github.event.pull_request.labels.*.name, 'authorized-request') }} continue-on-error: true with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | var issue_number = ${{ github.event.number }}; - github.issues.removeLabel({ + github.rest.issues.removeLabel({ owner: context.repo.owner, repo: context.repo.repo, issue_number: Number(issue_number), @@ -162,10 +201,10 @@ jobs: with: source: github skip_cache: true - chart-verifier: ${{ steps.set-env.outputs.verifier-action-image }} + chart-verifier: "${{ needs.setup.outputs.verifier-action-image }}" - name: determine verify requirements - if: ${{ steps.check_build_required.outputs.run-build == 'true' }} + if: ${{ needs.setup.outputs.run_build == 'true' }} id: verify_requires env: BOT_TOKEN: ${{ secrets.BOT_TOKEN }} @@ -193,7 +232,7 @@ jobs: with: openshift_server_url: ${{ steps.login-params.outputs.API_SERVER }} openshift_token: ${{ secrets.CLUSTER_TOKEN }} - insecure_skip_tls_verify: ${{ steps.set-env.outputs.insecure_skip_tls_verify }} + insecure_skip_tls_verify: ${{ needs.setup.outputs.insecure_skip_tls_verify }} - name: create service account id: create_service_account @@ -206,7 +245,7 @@ jobs: echo "delete_namespace=true" >> $GITHUB_OUTPUT echo $KUBECONFIG - - uses: redhat-actions/chart-verifier@v1.1 + - uses: redhat-actions/chart-verifier@v1 id: run-verifier if: ${{ steps.verify_requires.outputs.report_needed == 'true' }} with: @@ -222,20 +261,53 @@ jobs: error_message="The chart verifier returned an error when trying to obtain a verification report for the chart." echo "verifier_error_message=$error_message" >> $GITHUB_OUTPUT + - name: Get profile version set in report provided by the user + id: get-profile-version + if: ${{ needs.setup.outputs.run_build == 'true' && steps.verify_requires.outputs.report_provided == 'true' }} + uses: mikefarah/yq@v4.35.1 + with: + cmd: yq '.metadata.tool.profile.version' ${{ format('./pr-branch/{0}', steps.verify_requires.outputs.provided_report_relative_path) }} + + - name: Get the range of Kubernetes versions set in the report provided by the user + id: get-kube-range + if: ${{ needs.setup.outputs.run_build == 'true' && steps.verify_requires.outputs.report_provided == 'true' }} + continue-on-error: true + uses: mikefarah/yq@v4.35.1 + with: + cmd: yq '.metadata.chart.kubeversion' ${{ format('./pr-branch/{0}', steps.verify_requires.outputs.provided_report_relative_path) }} + + - name: Get the corresponding range of OCP versions + id: get-ocp-range + if: ${{ needs.setup.outputs.run_build == 'true' && steps.verify_requires.outputs.report_provided == 'true' }} + continue-on-error: true + uses: ./.github/actions/get-ocp-range + with: + kube-version-range: ${{ steps.get-kube-range.outputs.result }} + + - name: Only ignore errors in get-ocp-range for profile in version v1.0 + if: ${{ (steps.get-kube-range.outcome == 'failure' || steps.get-ocp-range.outcome == 'failure') && steps.get-profile-version.outputs.result != 'v1.0' }} + run: | + echo "::error file=.github/workflows/build.yaml::Failure in get-ocp-range, mandatory for profile version ${{ steps.get-profile-version.outputs.result }}" + exit 1 + - name: Check Report id: check_report - if: ${{ steps.check_build_required.outputs.run-build == 'true' }} + if: ${{ needs.setup.outputs.run_build == 'true' }} env: BOT_TOKEN: ${{ secrets.BOT_TOKEN }} VENDOR_TYPE: ${{ steps.check_pr_content.outputs.category }} - WEB_CATALOG_ONLY: ${{ steps.check_pr_content.outputs.webCatalogOnly }} + WEB_CATALOG_ONLY: ${{ steps.check_pr_content.outputs.web_catalog_only }} REPORT_GENERATED: ${{ steps.verify_requires.outputs.report_needed }} GENERATED_REPORT_PATH: ${{ steps.run-verifier.outputs.report_file }} REPORT_SUMMARY_PATH: ${{ steps.run-verifier.outputs.report_info_file }} WORKFLOW_WORKING_DIRECTORY: "../pr" + OCP_VERSION_RANGE: ${{ steps.get-ocp-range.outputs.ocp-version-range }} run: | cd pr-branch - ../ve1/bin/chart-pr-review --directory=../pr --verify-user=${{ github.event.pull_request.user.login }} --api-url=${{ github.event.pull_request._links.self.href }} + ../ve1/bin/chart-pr-review \ + --directory=../pr \ + --verify-user=${{ github.event.pull_request.user.login }} \ + --api-url=${{ github.event.pull_request._links.self.href }} cd .. - name: Delete Namespace @@ -244,19 +316,19 @@ jobs: KUBECONFIG: /tmp/ci-kubeconfig run: | API_SERVER=$( echo -n ${{ secrets.API_SERVER }} | base64 -d) - oc login --token=${{ secrets.CLUSTER_TOKEN }} --server=${API_SERVER} --insecure-skip-tls-verify=${{ steps.set-env.outputs.insecure_skip_tls_verify }} + oc login --token=${{ secrets.CLUSTER_TOKEN }} --server=${API_SERVER} --insecure-skip-tls-verify=${{ needs.setup.outputs.insecure_skip_tls_verify }} ve1/bin/sa-for-chart-testing --delete charts-${{ github.event.number }} - name: Save PR artifact env: BOT_TOKEN: ${{ secrets.BOT_TOKEN }} - if: ${{ always() && steps.check_build_required.outputs.run-build == 'true' }} + if: ${{ always() && needs.setup.outputs.run_build == 'true' }} run: | ve1/bin/pr-artifact --directory=./pr --pr-number=${{ github.event.number }} --api-url=${{ github.event.pull_request._links.self.href }} - name: Prepare PR comment id: pr_comment - if: ${{ always() && steps.check_build_required.outputs.run-build == 'true' }} + if: ${{ always() && needs.setup.outputs.run_build == 'true' }} env: BOT_TOKEN: ${{ secrets.BOT_TOKEN }} PR_CONTENT_ERROR_MESSAGE: ${{ steps.check_pr_content.outputs.pr-content-error-message }} @@ -268,15 +340,15 @@ jobs: ve1/bin/pr-comment ${{ steps.check_pr_content.outcome }} ${{ steps.run-verifier.outcome }} ${{ steps.check_report.conclusion }} - name: Comment on PR - if: ${{ always() && steps.check_build_required.outputs.run-build == 'true' }} - uses: actions/github-script@v3 + if: ${{ always() && needs.setup.outputs.run_build == 'true' }} + uses: actions/github-script@v6 with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | var fs = require('fs'); var issue_number = ${{ github.event.number }}; var comment = fs.readFileSync('./pr/comment', {encoding:'utf8', flag:'r'}); - github.issues.createComment({ + github.rest.issues.createComment({ owner: context.repo.owner, repo: context.repo.repo, issue_number: Number(issue_number), @@ -284,8 +356,8 @@ jobs: }); - name: Add 'authorized-request' label to PR - if: ${{ always() && steps.check_pr_content.outcome == 'success' && steps.run-verifier.outcome != 'failure' && steps.check_build_required.outputs.run-build == 'true' }} - uses: actions/github-script@v3 + if: ${{ always() && steps.check_pr_content.outcome == 'success' && steps.run-verifier.outcome != 'failure' && needs.setup.outputs.run_build == 'true' }} + uses: actions/github-script@v6 with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | @@ -294,7 +366,7 @@ jobs: var vendor_label = fs.readFileSync('./pr/vendor'); var chart_name = fs.readFileSync('./pr/chart'); if (vendor_label.toString() !== "" && chart_name.toString() !== "") { - github.issues.addLabels({ + github.rest.issues.addLabels({ issue_number: Number(issue_number), owner: context.repo.owner, repo: context.repo.repo, @@ -321,35 +393,74 @@ jobs: MERGE_LABELS: "" - name: Check for PR merge - if: ${{ steps.check_build_required.outputs.run-build == 'true' }} + if: ${{ needs.setup.outputs.run_build == 'true' }} env: BOT_TOKEN: ${{ secrets.BOT_TOKEN }} run: | ./ve1/bin/check-auto-merge --api-url=${{ github.event.pull_request._links.self.href }} + + release: + name: Release Chart + runs-on: ubuntu-22.04 + needs: [setup, chart-verifier] + + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Checkout PR Branch + if: ${{ needs.setup.outputs.run_build == 'true' }} + uses: actions/checkout@v3 + with: + ref: ${{ github.event.pull_request.head.ref }} + repository: ${{ github.event.pull_request.head.repo.full_name }} + path: "pr-branch" + + - name: Set up Python 3.x Part 1 + uses: actions/setup-python@v4 + with: + python-version: "3.9" + + - name: Set up Python 3.x Part 2 + run: | + # set up python + python3 -m venv ve1 + cd scripts + ../ve1/bin/pip3 install -r requirements.txt + ../ve1/bin/python3 setup.py install + cd .. + + - name: install chart verifier for action + uses: redhat-actions/openshift-tools-installer@v1 + with: + source: github + skip_cache: true + chart-verifier: ${{ needs.setup.outputs.verifier-action-image }} + - name: Block until there is no running workflow - if: ${{ steps.check_build_required.outputs.run-build == 'true' }} + if: ${{ needs.setup.outputs.run_build == 'true' }} uses: softprops/turnstyle@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Configure Git - if: ${{ steps.check_build_required.outputs.run-build == 'true' }} + if: ${{ needs.setup.outputs.run_build == 'true' }} run: | git config --global user.name "github-actions[bot]" git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com" - name: Release Charts - if: ${{ steps.check_build_required.outputs.run-build == 'true' }} + if: ${{ needs.setup.outputs.run_build == 'true' }} env: BOT_TOKEN: ${{ secrets.BOT_TOKEN }} GITHUB_REF: ${{ github.ref }} - GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} - REPORT_CONTENT: ${{steps.check_report.outputs.report_content}} - CHART_ENTRY_NAME: ${{ steps.check_pr_content.outputs.chart-entry-name }} - CHART_NAME_WITH_VERSION: ${{ steps.check_pr_content.outputs.chart-name-with-version }} - REDHAT_TO_COMMUNITY: ${{ steps.check_report.outputs.redhat_to_community }} - WEB_CATALOG_ONLY: ${{ steps.check_pr_content.outputs.webCatalogOnly }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + REPORT_CONTENT: ${{ needs.chart-verifier.outputs.report_content }} + CHART_ENTRY_NAME: ${{ needs.chart-verifier.outputs.chart_entry_name }} + REDHAT_TO_COMMUNITY: ${{ needs.chart-verifier.outputs.redhat_to_community }} + WEB_CATALOG_ONLY: ${{ needs.chart-verifier.outputs.web_catalog_only }} + OCP_VERSION_RANGE: ${{ steps.get-ocp-range.outputs.ocp-version-range }} id: release-charts run: | tar zxvf ./scripts/dependencies/helm-chart-releaser/chart-releaser_1.2.0_linux_amd64.tar.gz @@ -357,15 +468,19 @@ jobs: INDEX_BRANCH=$(if [ "${GITHUB_REF}" = "refs/heads/main" ]; then echo "refs/heads/gh-pages"; else echo "${GITHUB_REF}-gh-pages"; fi) CWD=`pwd` cd pr-branch - ../ve1/bin/chart-repo-manager --repository=${{ github.repository }} --index-branch=${INDEX_BRANCH} --api-url=${{ github.event.pull_request._links.self.href }} --pr-number=${{ github.event.number }} + ../ve1/bin/chart-repo-manager \ + --repository=${{ github.repository }} \ + --index-branch=${INDEX_BRANCH} \ + --api-url=${{ github.event.pull_request._links.self.href }} \ + --pr-number=${{ github.event.number }} cd ${CWD} - name: Release - if: ${{ steps.release-charts.outputs.tag != '' }} + if: ${{ needs.chart-verifier.outputs.web_catalog_only == 'False' }} uses: softprops/action-gh-release@v0.1.12 continue-on-error: true with: - tag_name: ${{ steps.release-charts.outputs.tag }} + tag_name: ${{ needs.chart-verifier.outputs.release_tag }} files: | ${{ steps.release-charts.outputs.report_file }} ${{ steps.release-charts.outputs.public_key_file }} @@ -374,7 +489,7 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Add metrics - if: ${{ always() && steps.check_build_required.outputs.run-build == 'true' && env.GITHUB_REPOSITORY != 'openshift-helm-charts/sandbox' }} + if: ${{ always() && needs.setup.outputs.run_build == 'true' && env.GITHUB_REPOSITORY != 'openshift-helm-charts/sandbox' }} env: BOT_TOKEN: ${{ secrets.BOT_TOKEN }} run: | @@ -392,7 +507,7 @@ jobs: echo "add PR run metric" ve1/bin/metrics --write-key="${WRITE_KEY}" \ --metric-type="pull_request" \ - --message-file="${{ steps.pr_comment.outputs.message-file }}" \ + --message-file="${{ needs.chart-verifier.outputs.message_file }}" \ --pr-number="${{ github.event.number }}" \ --pr-action="${{ github.event.action }}" \ --repository="${GITHUB_REPOSITORY}" \ diff --git a/.github/workflows/python-style.yml b/.github/workflows/python-style.yml new file mode 100644 index 0000000000..4a7c34b252 --- /dev/null +++ b/.github/workflows/python-style.yml @@ -0,0 +1,29 @@ +name: Python Style + +on: + pull_request: + paths: + # Only trigger on core script changes + - 'scripts/**.py' + +jobs: + enforce: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + - name: Set up Python 3.x Part 1 + uses: actions/setup-python@v4 + with: + python-version: "3.9" + - name: Install style tooling + working-directory: scripts + run: make venv.codestyle + - name: Run formatter + working-directory: scripts + run: make ci.format + # Temporarily auto-pass linting until we are able to manually review and + # address. + - name: Run linter + working-directory: scripts + run: make lint || true \ No newline at end of file diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 03744250c5..198e2b304b 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -104,7 +104,7 @@ jobs: PR_NUMBER: ${{ github.event.pull_request.number }} PR_BODY: "Test triggered by ${{ github.event.pull_request.html_url }}." run: | - echo "Full test in pr : {{steps.check_request.outputs.full_tests_in_pr }}" + echo "Full test in pr : ${{ steps.check_request.outputs.full_tests_in_pr }}" if ${{steps.check_if_release_pr.outputs.charts_release_branch == 'true' || steps.check_request.outputs.full_tests_in_pr == 'true' }} ; then echo "Release PR from dev to charts, oer PR with new full test, so running full tests" ve1/bin/behave tests/functional/behave_features/ --tags=full --logging-level=WARNING --no-capture --no-color diff --git a/docs/README.md b/docs/README.md index 9f0831c0db..b3b704a210 100644 --- a/docs/README.md +++ b/docs/README.md @@ -23,36 +23,32 @@ submit a chart and the report together. **Table of Contents:** -- [OpenShift Helm Charts Repository](#openshift-helm-charts-repository) - - [Submitting Chart Related Changes](#submitting-chart-related-changes) - - [Preparation](#preparation) - - [Submitting a Chart without Chart Verification Report](#submitting-a-chart-without-chart-verification-report) - - [Submitting a Chart Verification Report without the Chart](#submitting-a-chart-verification-report-without-the-chart) - - [Signed Report](#signed-report) - - [Report for a Signed chart](#report-for-a-signed-chart) - - [Submitting a Chart Verification Report with the Chart](#submitting-a-chart-verification-report-with-the-chart) - - [Signed Report](#signed-report-1) - - [Signed Chart](#signed-chart) - - [Post Submission Manual Review](#post-submission-manual-review) - - [Troubleshooting Pull Request Failures](#troubleshooting-pull-request-failures) - - [Error when submitting files not part of any chart](#error-when-submitting-files-not-part-of-any-chart) - - [Missing OWNERS file in the chart directory](#missing-owners-file-in-the-chart-directory) - - [Pull request author is not part of OWNERS file](#pull-request-author-is-not-part-of-owners-file) - - [Vendor label mismatch with the directory structure](#vendor-label-mismatch-with-the-directory-structure) - - [Chart name mismatch with the directory structure](#chart-name-mismatch-with-the-directory-structure) - - [Error when submitting both chart source and tarball](#error-when-submitting-both-chart-source-and-tarball) - - [Error when submitting files not related to any chart](#error-when-submitting-files-not-related-to-any-chart) - - [Error when digest in the report is not matching with the chart digest](#error-when-digest-in-the-report-is-not-matching-with-the-chart-digest) - - [Error with the chart URL when submitting report](#error-with-the-chart-url-when-submitting-report) - - [Chart name and version mismatch errors](#chart-name-and-version-mismatch-errors) - - [Report failures](#report-failures) - - [Signed chart failures](#signed-chart-failures) - - [Web catalog only delivery](#web-catalog-only-delivery) - - [Frequently Asked Questions](#frequently-asked-questions) - - [Can I test the pull request in my fork before submitting?](#can-i-test-the-pull-request-in-my-fork-before-submitting) - - [Can I use any command-line interface to create pull request?](#can-i-use-any-command-line-interface-to-create-pull-request) - - [How to update OWNERS file?](#how-to-update-owners-file) - - [Support](#support) +* [OpenShift Helm Charts Repository](#openshift-helm-charts-repository) + * [Submitting Chart Related Changes](#submitting-chart-related-changes) + * [Preparation](#preparation) + * [Submitting a Chart without Chart Verification Report](#submitting-a-chart-without-chart-verification-report) + * [Submitting a Chart Verification Report without the Chart](#submitting-a-chart-verification-report-without-the-chart) + * [Submitting a Chart Verification Report with the Chart](#submitting-a-chart-verification-report-with-the-chart) + * [Post Submission Manual Review](#post-submission-manual-review) + * [Troubleshooting Pull Request Failures](#troubleshooting-pull-request-failures) + * [Error when submitting files not part of any chart](#error-when-submitting-files-not-part-of-any-chart) + * [Missing OWNERS file in the chart directory](#missing-owners-file-in-the-chart-directory) + * [Pull request author is not part of OWNERS file](#pull-request-author-is-not-part-of-owners-file) + * [Vendor label mismatch with the directory structure](#vendor-label-mismatch-with-the-directory-structure) + * [Chart name mismatch with the directory structure](#chart-name-mismatch-with-the-directory-structure) + * [Error when submitting both chart source and tarball](#error-when-submitting-both-chart-source-and-tarball) + * [Error when submitting files not related to any chart](#error-when-submitting-files-not-related-to-any-chart) + * [Error when digest in the report is not matching with the chart digest](#error-when-digest-in-the-report-is-not-matching-with-the-chart-digest) + * [Error with the chart URL when submitting report](#error-with-the-chart-url-when-submitting-report) + * [Chart name and version mismatch errors](#chart-name-and-version-mismatch-errors) + * [Report failures](#report-failures) + * [Signed chart failures](#signed-chart-failures) + * [Web catalog only delivery](#web-catalog-only-delivery) + * [Frequently Asked Questions](#frequently-asked-questions) + * [Can I test the pull request in my fork before submitting?](#can-i-test-the-pull-request-in-my-fork-before-submitting) + * [Can I use any command-line interface to create pull request?](#can-i-use-any-command-line-interface-to-create-pull-request) + * [How to update OWNERS file?](#how-to-update-owners-file) + * [Support](#support) --- @@ -361,7 +357,7 @@ If the vendor label in the `OWNERS` file is wrong, you can follow the [partner connect documentation][partners] to update the `OWNERS` file. If the directory structure (organization name) is wrong, please reach out to -[Technology Partner Success Desk][partner-acceleration-desk]. +[Technology Partner Success Desk][partner-success-desk]. ### Chart name mismatch with the directory structure @@ -376,7 +372,7 @@ If the chart name in the `OWNERS` file is wrong, you can follow the [partner connect documentation][partners] to update the `OWNERS` file. If the directory structure (chart name) is wrong, please reach out to -[Partner Acceleration Desk][partner-acceleration-desk]. +[Technology Partner Success Desk][partner-success-desk]. ### Error when submitting both chart source and tarball @@ -462,7 +458,7 @@ of [chart-verifier][chart-verifier] tool. ``` If the error is still persisting after upgrading to latest chart-verifier, -please contact [Partner Acceleration Desk][partner-acceleration-desk]. +please contact [Technology Partner Success Desk][partner-success-desk]. If the report has some failure, it will be displayed like this: @@ -519,7 +515,7 @@ There are three methods of distribution for certified helm charts. - Web catalog only - This submission should be report only using a private chart URL. -For more information on the different Helm Chart Distribution methods, see: [Creating a Helm Chart Certification Project](https://access.redhat.com/documentation/en-us/red_hat_software_certification/8.67/html/red_hat_software_certification_workflow_guide/proc_creating-a-helm-chart-project_openshift-sw-cert-workflow-validating-helm-charts-for-certification) +For more information on the different Helm Chart Distribution methods, see: [Creating a Helm Chart Certification Project](https://redhat-connect.gitbook.io/partner-guide-for-red-hat-openshift-and-container/helm-chart-certification/creating-a-helm-chart-certification-project) ## Frequently Asked Questions @@ -539,27 +535,26 @@ Yes, you can use the [GitHub CLI to create pull request][gh-cli-pr]. ### How to update OWNERS file? -Partners can refer to the [partner documentation for Submitting your Helm chart for certification][owner-file]. +Partners can refer to the [partner documentation][partners]. For Red Hat and Community charts, submit a PR towards `main` branch with an _OWNERS_ file under your chart directory within your orgranization directory. ## Support You can use the issue tracker in this repository to report bugs. If you are a -partner, please refer to the [Partner Acceleration Desk -documentation][partner-acceleration-desk]. +partner, please refer to the [Technology Partner Success Desk +documentation][partner-success-desk]. --- [^Top](#openshift-helm-charts-repository) -[partners]: https://access.redhat.com/documentation/en-us/red_hat_software_certification/ -[owner-file]: https://access.redhat.com/documentation/en-us/red_hat_software_certification/8.67/html/red_hat_software_certification_workflow_guide/submitting-your-helm-chart-for-certification_openshift-sw-cert-workflow-complete-pre-certification-checklist-for-helmcharts +[partners]: https://redhat-connect.gitbook.io/certification-guides/ [chart-verifier]: https://github.com/redhat-certification/chart-verifier [index-url]: https://charts.openshift.io [pat]: https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token [encyrpted-secret]: https://docs.github.com/en/actions/reference/encrypted-secrets [gh-cli-pr]: https://cli.github.com/manual/gh_pr_create -[partner-acceleration-desk]: https://access.redhat.com/articles/6463941 +[partner-success-desk]: https://redhat-connect.gitbook.io/red-hat-partner-connect-general-guide/managing-your-account/getting-help/technology-partner-success-desk [new-issue]: https://github.com/openshift-helm-charts/repo/issues/new/choose [ascii-armor]: https://www.redhat.com/sysadmin/creating-gpg-keypairs diff --git a/scripts/Makefile b/scripts/Makefile new file mode 100644 index 0000000000..eed6497c86 --- /dev/null +++ b/scripts/Makefile @@ -0,0 +1,56 @@ +PY_BIN ?= python3 + +# The virtualenv containing code style tools. +VENV_CODESTYLE = venv.codestyle +VENV_CODESTYLE_BIN = $(VENV_CODESTYLE)/bin + +# The virtualenv containing our CI scripts +VENV_TOOLS = venv.tools +VENV_TOOLS_BIN = $(VENV_TOOLS)/bin + +# This is what we pass to git ls-files. +LS_FILES_INPUT_STR ?= 'src/*.py' + +.PHONY: default +default: format lint + +# The same as format, but will throw a non-zero exit code +# if the formatter had to make changes. +.PHONY: ci.format +ci.format: format + git diff --exit-code + +venv.codestyle: + $(MAKE) venv.codestyle.always-reinstall + +# This target will always install the codestyle venv. +# Useful for development cases. +.PHONY: venv.codestyle.always-reinstall +venv.codestyle.always-reinstall: + $(PY_BIN) -m venv $(VENV_CODESTYLE) + ./$(VENV_CODESTYLE_BIN)/pip install --upgrade \ + black \ + ruff + +.PHONY: format +format: venv.codestyle + ./$(VENV_CODESTYLE_BIN)/black \ + --verbose \ + $$(git ls-files $(LS_FILES_INPUT_STR)) + +.PHONY: lint +lint: venv.codestyle + ./$(VENV_CODESTYLE_BIN)/ruff \ + check \ + $$(git ls-files $(LS_FILES_INPUT_STR)) + +venv.tools: + $(MAKE) venv.tools.always-reinstall + +# This target will always install the tools at the venv. +# Useful for development cases. +.PHONY: venv.tools.always-reinstall +venv.tools.always-reinstall: + $(PY_BIN) -m venv $(VENV_TOOLS) + ./$(VENV_TOOLS_BIN)/pip install -r requirements.txt + ./$(VENV_TOOLS_BIN)/python setup.py install diff --git a/scripts/ruff.toml b/scripts/ruff.toml new file mode 100644 index 0000000000..af87f9db59 --- /dev/null +++ b/scripts/ruff.toml @@ -0,0 +1,4 @@ +ignore = [ + "E402", # import ordering (komish): import ordering isn't handled by Black so we need to handle this manually. + "E501", # line length (komish): line length is not enforced by Black so we need to handle these manually. +] \ No newline at end of file diff --git a/scripts/src/chartprreview/chartprreview.py b/scripts/src/chartprreview/chartprreview.py index e705edeb31..4d0f7d5dbc 100644 --- a/scripts/src/chartprreview/chartprreview.py +++ b/scripts/src/chartprreview/chartprreview.py @@ -23,6 +23,7 @@ from report import verifier_report from signedchart import signedchart from pullrequest import prartifact +from reporegex import matchers from tools import gitutils @@ -45,9 +46,22 @@ def get_vendor_type(directory): def get_modified_charts(directory, api_url): + """Get the category, organization, chart name, and new version corresponding to + the chart being added. + + Args: + directory (str): Local directory in which to write the error logs + api_url (str): URL of the GitHub PR + + Returns: + (str, str, str, str): category, organization, chart, and version (e.g. partner, + hashicorp, vault, 1.4.0) + """ print("[INFO] Get modified charts. %s" % directory) files = prartifact.get_modified_files(api_url) - pattern = re.compile(r"charts/(\w+)/([\w-]+)/([\w-]+)/([\w\.-]+)/.*") + pattern = re.compile( + matchers.submission_path_matcher(strict_categories=False) + r"/.*" + ) for file_path in files: m = pattern.match(file_path) if m: @@ -60,6 +74,15 @@ def get_modified_charts(directory, api_url): def verify_user(directory, username, category, organization, chart): + """Check that the user that submitted the PR is in the OWNERS file for this chart. + + Args: + directory (str): Local directory in which to write the error logs + username (str): Github username that submitted the PR. + category (str): Type of profile (community, partners, or redhat) + organization (str): Name of the organization (ex: hashicorp) + chart (str): Name of the chart (ex: vault) + """ print( "[INFO] Verify user. %s, %s, %s, %s" % (username, category, organization, chart) ) @@ -78,8 +101,21 @@ def verify_user(directory, username, category, organization, chart): def check_owners_file_against_directory_structure( - directory, username, category, organization, chart + directory, category, organization, chart ): + """Check that the content of the OWNERS file correspond to the directory structure + the chart is under. + + Following assertion must be true: + - the chart.name key must correspond to the name of the chart directory + - the vendor.label key must correspond to the organization directory + + Args: + directory (str): Local directory in which to write the error logs + category (str): Type of profile (community, partners, or redhat) + organization (str): Name of the organization (ex: hashicorp) + chart (str): Name of the chart (ex: vault) + """ print( "[INFO] Check owners file against directory structure. %s, %s, %s" % (category, organization, chart) @@ -105,7 +141,15 @@ def check_owners_file_against_directory_structure( sys.exit(1) -def verify_signature(directory, category, organization, chart, version): +def verify_signature(category, organization, chart, version): + """Verify that the PGP signature (report.yaml.asc) can decrypt report.yaml + + Args: + category (str): Type of profile (community, partners, or redhat) + organization (str): Name of the organization (ex: hashicorp) + chart (str): Name of the chart (ex: vault) + version (str): The version of the chart (ex: 1.4.0) + """ print("[INFO] Verify signature. %s, %s, %s" % (organization, chart, version)) sign = os.path.join( "charts", category, organization, chart, version, "report.yaml.asc" @@ -136,6 +180,18 @@ def verify_signature(directory, category, organization, chart, version): def match_checksum( directory, generated_report_info_path, category, organization, chart, version ): + """Check that the provided report and the generated report have the same chart + digest + + Args: + directory (str): Local directory in which to write the error logs + generated_report_info_path (str): Path to the processed JSON report generated + in the pipeline + category (str): Type of profile (community, partners, or redhat) + organization (str): Name of the organization (ex: hashicorp) + chart (str): Name of the chart (ex: vault) + version (str): The version of the chart (ex: 1.4.0) + """ print("[INFO] Check digests match. %s, %s, %s" % (organization, chart, version)) submitted_report_path = os.path.join( "charts", category, organization, chart, version, "report.yaml" @@ -157,6 +213,13 @@ def match_checksum( def check_url(directory, report_path): + """Check that the chart URL provided in report.yaml is valid and that the chart + digest matches the one provided in report.yaml + + Args: + directory (str): Local directory in which to write the error logs + report_path (str): Path to report.yaml + """ print("[INFO] Check chart_url is a valid url. %s" % report_path) chart_url = report_info.get_report_chart_url(report_path=report_path) @@ -195,6 +258,17 @@ def check_url(directory, report_path): def match_name_and_version( directory, category, organization, chart, version, generated_report_path ): + """Check that the chart name and version in the provided report.yaml and in the + report generated in the pipeline match the underlying directory structure. + + Args: + directory (str): Local directory in which to write the error logs + category (str): Type of profile (community, partners, or redhat) + organization (str): Name of the organization (ex: hashicorp) + chart (str): Name of the chart (ex: vault) + version (str): The version of the chart (ex: 1.4.0) + generated_report_path (str): Path to the report generated in the pipeline + """ print( "[INFO] Check chart has same name and version as directory structure. %s, %s, %s" % (organization, chart, version) @@ -253,6 +327,24 @@ def match_name_and_version( def check_report_success(directory, api_url, report_path, report_info_path, version): + """Check the content of report.yaml + + * Check that the version in the report matches with the directory structure. + * Check that the vendor type in the report matches with the directory structure. + * Check the presence of the required annotations. + * Check that the report doesn't contains failed checks. + * Check that the testedOpenShiftVersion and certifiedOpenShiftVersions labels + contain SemVer compatible versions. + + Also adds the content of report.yaml to the GITHUB_OUTPUT. + + Args: + directory (str): Local directory in which to write the error logs + api_url (str): URL of the GitHub PR + report_path (str): Path to report.yaml + report_info_path (str): Path to processed JSON report + version (str): The version of the chart (ex: 1.4.0) + """ print("[INFO] Check report success. %s" % report_path) data = open(report_path).read() print("[INFO] Full report: ") @@ -413,19 +505,29 @@ def main(): help="API URL for the pull request", ) args = parser.parse_args() + category, organization, chart, version = get_modified_charts( args.directory, args.api_url ) verify_user(args.directory, args.username, category, organization, chart) check_owners_file_against_directory_structure( - args.directory, args.username, category, organization, chart + args.directory, category, organization, chart ) + + report_generated = os.environ.get("REPORT_GENERATED") + generated_report_path = os.environ.get("GENERATED_REPORT_PATH") + generated_report_info_path = os.environ.get("REPORT_SUMMARY_PATH") + env = Env() + web_catalog_only = env.bool("WEB_CATALOG_ONLY", False) + submitted_report_path = os.path.join( "charts", category, organization, chart, version, "report.yaml" ) - if os.path.exists(submitted_report_path): - report_valid, message = verifier_report.validate(submitted_report_path) + ocp_version_range = os.environ.get("OCP_VERSION_RANGE") + report_valid, message = verifier_report.validate( + submitted_report_path, ocp_version_range + ) if not report_valid: msg = f"Submitted report is not valid: {message}" print(f"[ERROR] {msg}") @@ -449,15 +551,8 @@ def main(): "[INFO] PGP key in OWNERS file matches with key digest in report." ) - report_generated = os.environ.get("REPORT_GENERATED") - generated_report_path = os.environ.get("GENERATED_REPORT_PATH") - generated_report_info_path = os.environ.get("REPORT_SUMMARY_PATH") - env = Env() - web_catalog_only = env.bool("WEB_CATALOG_ONLY", False) - - if os.path.exists(submitted_report_path): print("[INFO] Report exists: ", submitted_report_path) - verify_signature(args.directory, category, organization, chart, version) + verify_signature(category, organization, chart, version) report_path = submitted_report_path report_info_path = "" if report_generated and report_generated == "True": diff --git a/scripts/src/chartprreview/chartprreview_test.py b/scripts/src/chartprreview/chartprreview_test.py index e37de1ed81..e641aa5565 100644 --- a/scripts/src/chartprreview/chartprreview_test.py +++ b/scripts/src/chartprreview/chartprreview_test.py @@ -54,7 +54,6 @@ def test_verify_user(): def test_check_owners_file_against_directory_structure(tmpdir): - original_cwd = os.getcwd() p = ( tmpdir.mkdir("charts") .mkdir("partners") @@ -68,17 +67,15 @@ def test_check_owners_file_against_directory_structure(tmpdir): print("new_cwd", new_cwd) with pytest.raises(SystemExit): check_owners_file_against_directory_structure( - "baijum", "partners", "test-org", "test-chart" + "partners", "test-org", "test-chart" ) p.write(owners_with_wrong_chart_name) with pytest.raises(SystemExit): check_owners_file_against_directory_structure( - "baijum", "partners", "test-org", "test-chart" + "partners", "test-org", "test-chart" ) p.write(owners_with_correct_values) - check_owners_file_against_directory_structure( - "baijum", "partners", "test-org", "test-chart" - ) + check_owners_file_against_directory_structure("partners", "test-org", "test-chart") def test_write_error_log(tmpdir): diff --git a/scripts/src/chartrepomanager/chartrepomanager.py b/scripts/src/chartrepomanager/chartrepomanager.py index 4300bb6974..28569708b4 100644 --- a/scripts/src/chartrepomanager/chartrepomanager.py +++ b/scripts/src/chartrepomanager/chartrepomanager.py @@ -24,12 +24,15 @@ from chartrepomanager import indexannotations from signedchart import signedchart from pullrequest import prartifact +from reporegex import matchers from tools import gitutils def get_modified_charts(api_url): files = prartifact.get_modified_files(api_url) - pattern = re.compile(r"charts/(\w+)/([\w-]+)/([\w-]+)/([\w\.-]+)/.*") + pattern = re.compile( + matchers.submission_path_matcher(strict_categories=False) + r"/.*" + ) for file_path in files: m = pattern.match(file_path) if m: @@ -243,10 +246,13 @@ def create_index_from_chart( return crt -def create_index_from_report(category, report_path): - print("[INFO] create index from report. %s, %s" % (category, report_path)) +def create_index_from_report(category, ocp_version_range, report_path): + print( + "[INFO] create index from report. %s, %s, %s" + % (category, ocp_version_range, report_path) + ) - annotations = indexannotations.getIndexAnnotations(report_path) + annotations = indexannotations.getIndexAnnotations(ocp_version_range, report_path) print("category:", category) redhat_to_community = bool(os.environ.get("REDHAT_TO_COMMUNITY")) @@ -454,15 +460,15 @@ def update_index_and_push( def update_chart_annotation( - category, organization, chart_file_name, chart, report_path + category, organization, chart_file_name, chart, ocp_version_range, report_path ): print( - "[INFO] Update chart annotation. %s, %s, %s, %s" - % (category, organization, chart_file_name, chart) + "[INFO] Update chart annotation. %s, %s, %s, %s, %s" + % (category, organization, chart_file_name, chart, ocp_version_range) ) dr = tempfile.mkdtemp(prefix="annotations-") - annotations = indexannotations.getIndexAnnotations(report_path) + annotations = indexannotations.getIndexAnnotations(ocp_version_range, report_path) print("category:", category) redhat_to_community = bool(os.environ.get("REDHAT_TO_COMMUNITY")) @@ -570,6 +576,7 @@ def main(): env = Env() web_catalog_only = env.bool("WEB_CATALOG_ONLY", False) + ocp_version_range = os.environ.get("OCP_VERSION_RANGE", "N/A") print(f"[INFO] webCatalogOnly/providerDelivery is {web_catalog_only}") @@ -607,7 +614,12 @@ def main(): print("[INFO] Updating chart annotation") update_chart_annotation( - category, organization, chart_file_name, chart, report_path + category, + organization, + chart_file_name, + chart, + ocp_version_range, + report_path, ) chart_url = f"https://github.com/{args.repository}/releases/download/{organization}-{chart}-{version}/{chart_file_name}" print("[INFO] Helm package was released at %s" % chart_url) @@ -631,20 +643,17 @@ def main(): if signedchart.check_report_for_signed_chart(report_path): public_key_file = get_key_file(category, organization, chart, version) print("[INFO] Creating index from report") - chart_entry, chart_url = create_index_from_report(category, report_path) + chart_entry, chart_url = create_index_from_report( + category, ocp_version_range, report_path + ) if not web_catalog_only: - tag = os.environ.get("CHART_NAME_WITH_VERSION") - if not tag: - print("[ERROR] Internal error: missing chart name with version (tag)") - sys.exit(1) - gitutils.add_output("tag", tag) - current_dir = os.getcwd() gitutils.add_output("report_file", f"{current_dir}/report.yaml") if public_key_file: print(f"[INFO] Add key file for release : {current_dir}/{public_key_file}") gitutils.add_output("public_key_file", f"{current_dir}/{public_key_file}") + print("Sleeping for 10 seconds") time.sleep(10) update_index_and_push( diff --git a/scripts/src/chartrepomanager/indexannotations.py b/scripts/src/chartrepomanager/indexannotations.py index 3916e0c5df..9533be960f 100644 --- a/scripts/src/chartrepomanager/indexannotations.py +++ b/scripts/src/chartrepomanager/indexannotations.py @@ -1,110 +1,26 @@ import sys import semantic_version -import requests -import yaml sys.path.append("../") from report import report_info -kubeOpenShiftVersionMap = {} +def getIndexAnnotations(ocp_version_range, report_path): + """Get the annotations set in the report file. -def getKubVersionMap(): - if not kubeOpenShiftVersionMap: - content = requests.get( - "https://github.com/redhat-certification/chart-verifier/blob/main/internal/tool/kubeOpenShiftVersionMap.yaml?raw=true" - ) + This function replaces the certifiedOpenShiftVersions annotation with the + testedOpenShiftVersion annotation. It also adds the + supportedOpenShiftVersions in the case it is not already set. - version_data = yaml.safe_load(content.text) - for kubeVersion in version_data["versions"]: - kubeOpenShiftVersionMap[kubeVersion["kube-version"]] = kubeVersion[ - "ocp-version" - ] + It leaves all other annotations untouched. - return kubeOpenShiftVersionMap + Args: + ocp_version_range (str): Range of supported OCP versions + report_path (str): Path to the report.yaml file - -def getOCPVersions(kubeVersion): - if kubeVersion == "": - return "N/A" - - checkKubeVersion = kubeVersion - - try: - semantic_version.NpmSpec(kubeVersion) - except ValueError: - print( - f"Value error with kubeVersion - NpmSpec : {kubeVersion}, see if it fixable" - ) - - try: - # Kubversion is bad, see if we can fix it - separator = checkKubeVersion.find(" - ") - if separator != -1: - lowVersion = checkKubeVersion[:separator].strip() - highVersion = checkKubeVersion[separator + 3 :].strip() - checkKubeVersion = f"{semantic_version.Version.coerce(lowVersion)} - {semantic_version.Version.coerce(highVersion)}" - else: - firstDigit = -1 - for i, c in enumerate(checkKubeVersion): - if c.isdigit(): - firstDigit = i - break - if firstDigit != -1: - versionInRange = checkKubeVersion[firstDigit:].strip() - preVersion = checkKubeVersion[:firstDigit].strip() - checkKubeVersion = ( - f"{preVersion}{semantic_version.Version.coerce(versionInRange)}" - ) - - # see if the updates have helped - semantic_version.NpmSpec(checkKubeVersion) - print(f"Fixed value error in kubeVersion : {checkKubeVersion}") - - except ValueError: - print(f"Unable to fix value error in kubeVersion : {kubeVersion}") - return "N/A" - - minOCP = "" - maxOCP = "" - getKubVersionMap() - for kubeVersionKey in kubeOpenShiftVersionMap: - # print(f"\n Map entry : {kubeVersionKey}: {kubeOpenShiftVersionMap[kubeVersionKey]}") - # print(f" MinOCP : {minOCP}, maxOCP: {maxOCP}") - coercedKubeVersionKey = semantic_version.Version.coerce(kubeVersionKey) - if coercedKubeVersionKey in semantic_version.NpmSpec(checkKubeVersion): - coercedOCPVersionValue = semantic_version.Version.coerce( - kubeOpenShiftVersionMap[kubeVersionKey] - ) - if ( - minOCP == "" - or semantic_version.Version.coerce(minOCP) > coercedOCPVersionValue - ): - minOCP = kubeOpenShiftVersionMap[kubeVersionKey] - # print(f" Found new min : {checkKubeVersion}: {minOCP}") - if ( - maxOCP == "" - or semantic_version.Version.coerce(maxOCP) < coercedOCPVersionValue - ): - maxOCP = kubeOpenShiftVersionMap[kubeVersionKey] - # print(f" Found new Max : {checkKubeVersion}: {maxOCP}") - - # check if minOCP is open ended - if minOCP != "" and semantic_version.Version( - "1.999.999" - ) in semantic_version.NpmSpec(checkKubeVersion): - ocp_versions = f">={minOCP}" - elif minOCP == "": - ocp_versions = "N/A" - elif maxOCP == "" or maxOCP == minOCP: - ocp_versions = minOCP - else: - ocp_versions = f"{minOCP} - {maxOCP}" - - return ocp_versions - - -def getIndexAnnotations(report_path): + Returns: + dict: mapping of annotations names to their values + """ annotations = report_info.get_report_annotations(report_path) set_annotations = {} @@ -127,11 +43,8 @@ def getIndexAnnotations(report_path): set_annotations[annotation] = annotations[annotation] if not OCPSupportedSet: - chart = report_info.get_report_chart(report_path) - OCPVersions = "N/A" - if "kubeVersion" in chart and chart["kubeVersion"]: - kubeVersion = chart["kubeVersion"] - OCPVersions = getOCPVersions(kubeVersion) - set_annotations["charts.openshift.io/supportedOpenShiftVersions"] = OCPVersions + set_annotations[ + "charts.openshift.io/supportedOpenShiftVersions" + ] = ocp_version_range return set_annotations diff --git a/scripts/src/checkprcontent/checkpr.py b/scripts/src/checkprcontent/checkpr.py index 79a9de267a..4ecc69bbf0 100644 --- a/scripts/src/checkprcontent/checkpr.py +++ b/scripts/src/checkprcontent/checkpr.py @@ -8,6 +8,8 @@ import semver import yaml +from reporegex import matchers + try: from yaml import CLoader as Loader except ImportError: @@ -20,7 +22,6 @@ from tools import gitutils ALLOW_CI_CHANGES = "allow/ci-changes" -TYPE_MATCH_EXPRESSION = "(partners|redhat|community)" def check_web_catalog_only(report_in_pr, num_files_in_pr, report_file_match): @@ -90,9 +91,9 @@ def check_web_catalog_only(report_in_pr, num_files_in_pr, report_file_match): if web_catalog_only: print("[INFO] webCatalogOnly/providerDelivery is a go") - gitutils.add_output("webCatalogOnly", "True") + gitutils.add_output("web_catalog_only", "True") else: - gitutils.add_output("webCatalogOnly", "False") + gitutils.add_output("web_catalog_only", "False") print("[INFO] webCatalogOnly/providerDelivery is a no-go") @@ -109,15 +110,11 @@ def get_file_match_compiled_patterns(): charts/partners/hashicorp/vault/0.20.0//report.yaml """ - pattern = re.compile( - r"charts/" + TYPE_MATCH_EXPRESSION + "/([\w-]+)/([\w-]+)/([\w\.-]+)/.*" - ) - reportpattern = re.compile( - r"charts/" + TYPE_MATCH_EXPRESSION + "/([\w-]+)/([\w-]+)/([\w\.-]+)/report.yaml" - ) - tarballpattern = re.compile( - r"charts/(partners|redhat|community)/([\w-]+)/([\w-]+)/([\w\.-]+)/(.*\.tgz$)" - ) + base = matchers.submission_path_matcher() + + pattern = re.compile(base + r"/.*") + reportpattern = re.compile(base + r"/report.yaml") + tarballpattern = re.compile(base + r"/(.*\.tgz$)") return pattern, reportpattern, tarballpattern @@ -151,7 +148,7 @@ def ensure_only_chart_is_modified(api_url, repository, branch): _, _, chart_name, chart_version, tar_name = tar_match.groups() expected_tar_name = f"{chart_name}-{chart_version}.tgz" if tar_name != expected_tar_name: - msg = f"[ERROR] the tgz file is named incorrectly. Expected: {expected_tar_name}" + msg = f"[ERROR] the tgz file is named incorrectly. Expected: {expected_tar_name}. Got: {tar_name}" print(msg) gitutils.add_output("pr-content-error-message", msg) exit(1) @@ -232,7 +229,7 @@ def ensure_only_chart_is_modified(api_url, repository, branch): sys.exit(1) tag_name = f"{organization}-{chart}-{version}" - gitutils.add_output("chart-name-with-version", tag_name) + gitutils.add_output("release_tag", tag_name) tag_api = f"https://api.github.com/repos/{repository}/git/ref/tags/{tag_name}" headers = { "Accept": "application/vnd.github.v3+json", diff --git a/scripts/src/indexfile/index.py b/scripts/src/indexfile/index.py index 85e7ffa990..b45201ddde 100644 --- a/scripts/src/indexfile/index.py +++ b/scripts/src/indexfile/index.py @@ -5,7 +5,6 @@ import sys sys.path.append("../") -from chartrepomanager import indexannotations INDEX_FILE = "https://charts.openshift.io/index.yaml" @@ -110,39 +109,3 @@ def get_latest_charts(): latest_charts.append(chart_in_process) return latest_charts - - -if __name__ == "__main__": - get_chart_info("redhat-dotnet-0.0.1") - - chart_list = get_latest_charts() - - for chart in chart_list: - print(f'[INFO] found latest chart : {chart["name"]} {chart["version"]}') - - OCP_VERSION = semantic_version.Version.coerce("4.11") - - for chart in chart_list: - if ( - "supportedOCP" in chart - and chart["supportedOCP"] != "N/A" - and chart["supportedOCP"] != "" - ): - if OCP_VERSION in semantic_version.NpmSpec(chart["supportedOCP"]): - print( - f'PASS: Chart supported OCP version {chart["supportedOCP"]} includes: {OCP_VERSION}' - ) - else: - print( - f' ERROR: Chart supported OCP version {chart["supportedOCP"]} does not include {OCP_VERSION}' - ) - elif "kubeVersion" in chart and chart["kubeVersion"] != "": - supportedOCPVersion = indexannotations.getOCPVersions(chart["kubeVersion"]) - if OCP_VERSION in semantic_version.NpmSpec(supportedOCPVersion): - print( - f'PASS: Chart kubeVersion {chart["kubeVersion"]} (OCP: {supportedOCPVersion}) includes OCP version: {OCP_VERSION}' - ) - else: - print( - f' ERROR: Chart kubeVersion {chart["kubeVersion"]} (OCP: {supportedOCPVersion}) does not include {OCP_VERSION}' - ) diff --git a/scripts/src/metrics/metrics.py b/scripts/src/metrics/metrics.py index c2a289d844..00e96aa834 100644 --- a/scripts/src/metrics/metrics.py +++ b/scripts/src/metrics/metrics.py @@ -11,8 +11,11 @@ from indexfile import index from pullrequest import prepare_pr_comment as pr_comment from collections import OrderedDict +from reporegex import matchers -file_pattern = re.compile(r"charts/([\w-]+)/([\w-]+)/([\w\.-]+)/([\w\.-]+)/.*") +file_pattern = re.compile( + matchers.submission_path_matcher(strict_categories=False) + r"/.*" +) chart_downloads_event = "Chart Downloads v1.0" ignore_users = [ "zonggen", diff --git a/scripts/src/owners/owners_file.py b/scripts/src/owners/owners_file.py index addcdcbff3..7cbeac1aab 100644 --- a/scripts/src/owners/owners_file.py +++ b/scripts/src/owners/owners_file.py @@ -66,9 +66,4 @@ def get_users_included(owner_data): def get_pgp_public_key(owner_data): - pgp_public_key = "null" - try: - pgp_public_key = owner_data["publicPgpKey"] - except Exception: - pass - return pgp_public_key + return owner_data.get("publicPgpKey", "") diff --git a/scripts/src/pullrequest/prartifact.py b/scripts/src/pullrequest/prartifact.py index 8f994bf8cd..a0f74f7ba1 100644 --- a/scripts/src/pullrequest/prartifact.py +++ b/scripts/src/pullrequest/prartifact.py @@ -30,6 +30,14 @@ def get_modified_charts(api_url): def get_modified_files(api_url): + """Populates and returns the list of files modified by this the PR + + Args: + api_url (str): URL of the GitHub PR + + Returns: + list[str]: List of modified files + """ if not pr_files: page_number = 1 max_page_size, page_size = 100, 100 diff --git a/scripts/src/release/releasechecker.py b/scripts/src/release/releasechecker.py index 8cf557ebec..135c9b78aa 100644 --- a/scripts/src/release/releasechecker.py +++ b/scripts/src/release/releasechecker.py @@ -31,6 +31,7 @@ import sys from release import release_info from release import releaser +from reporegex import matchers sys.path.append("../") from owners import checkuser @@ -38,7 +39,6 @@ from pullrequest import prartifact VERSION_FILE = "release/release_info.json" -TYPE_MATCH_EXPRESSION = "(partners|redhat|community)" CHARTS_PR_BASE_REPO = gitutils.CHARTS_REPO CHARTS_PR_HEAD_REPO = gitutils.CHARTS_REPO DEV_PR_BASE_REPO = gitutils.DEVELOPMENT_REPO @@ -69,7 +69,7 @@ def check_file_in_pr(api_url, pattern, error_value): def check_if_only_charts_are_included(api_url): print("[INFO] check if only chart files are included") chart_pattern = re.compile( - r"charts/" + TYPE_MATCH_EXPRESSION + "/([\w-]+)/([\w-]+)/.*" + matchers.submission_path_matcher(include_version_matcher=False) + r"./*" ) return check_file_in_pr(api_url, chart_pattern, ERROR_IF_MATCH_NOT_FOUND) @@ -77,7 +77,7 @@ def check_if_only_charts_are_included(api_url): def check_if_no_charts_are_included(api_url): print("[INFO] check if no chart files are included") chart_pattern = re.compile( - r"charts/" + TYPE_MATCH_EXPRESSION + "/([\w-]+)/([\w-]+)/.*" + matchers.submission_path_matcher(include_version_matcher=False) + r"./*" ) return check_file_in_pr(api_url, chart_pattern, ERROR_IF_MATCH_FOUND) diff --git a/scripts/src/reporegex/__init__.py b/scripts/src/reporegex/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/scripts/src/reporegex/matchers.py b/scripts/src/reporegex/matchers.py new file mode 100644 index 0000000000..2519a720f6 --- /dev/null +++ b/scripts/src/reporegex/matchers.py @@ -0,0 +1,41 @@ +def submission_path_matcher( + base_dir="charts", strict_categories=True, include_version_matcher=True +): + """Returns a regex string with various submission-related groupings. + + The groupings returned (in order) are: category, organization, chart name, + and optionally, version. + + Callers should append any relevant path matching to the end of the string + returned from this function. E.g. "/.*" + + Args: + base_dir: The base path of the expression statement. Should + not be empty. + strict_categories: Whether the category matcher should match only the + relevant categories, or any word at all. + include_version_matcher: Whether or not the version matcher should be + appended. In some cases, the caller of this regex doesn't care about the + versioning detail. + + Returns: + A regular expression-compatible string with the mentioned groupings. + """ + + relaxedCategoryMatcher = "\w+" + strictCategoryMatcher = "partners|redhat|community" + + categoryMatcher = ( + strictCategoryMatcher if strict_categories else relaxedCategoryMatcher + ) + organizationMatcher = "[\w-]+" + chartMatcher = "[\w-]+" + versionMatcher = "[\w\.\-+]+" + + matcher = ( + rf"{base_dir}/({categoryMatcher})/({organizationMatcher})/({chartMatcher})" + ) + if include_version_matcher: + matcher += rf"/({versionMatcher})" + + return matcher diff --git a/scripts/src/report/get_verify_params.py b/scripts/src/report/get_verify_params.py index 9dbf9d35c7..58152d8f2b 100644 --- a/scripts/src/report/get_verify_params.py +++ b/scripts/src/report/get_verify_params.py @@ -8,14 +8,22 @@ from tools import gitutils +def get_report_full_path(category, organization, chart, version): + return os.path.join( + os.getcwd(), get_report_relative_path(category, organization, chart, version) + ) + + +def get_report_relative_path(category, organization, chart, version): + return os.path.join("charts", category, organization, chart, version, "report.yaml") + + def generate_verify_options(directory, category, organization, chart, version): print("[INFO] Generate verify options. %s, %s, %s" % (organization, chart, version)) src = os.path.join( os.getcwd(), "charts", category, organization, chart, version, "src" ) - report_path = os.path.join( - os.getcwd(), "charts", category, organization, chart, version, "report.yaml" - ) + report_path = get_report_full_path(category, organization, chart, version) tar = os.path.join( os.getcwd(), "charts", @@ -32,14 +40,16 @@ def generate_verify_options(directory, category, organization, chart, version): flags = f"--set profile.vendortype={category}" cluster_needed = True + report_provided = False if os.path.exists(report_path): print("[INFO] report is included") flags = f"{flags} -e has-readme" cluster_needed = False + report_provided = True if os.path.exists(src) and not os.path.exists(tar): print("[INFO] chart src included") - return flags, src, True, cluster_needed + return flags, src, True, cluster_needed, report_provided elif os.path.exists(tar) and not os.path.exists(src): print("[INFO] tarball included") if not os.path.exists(report_path): @@ -50,14 +60,14 @@ def generate_verify_options(directory, category, organization, chart, version): if signed_flags: print(f"[INFO] include flags for signed chart: {signed_flags}") flags = f"{flags} {signed_flags}" - return flags, tar, True, cluster_needed + return flags, tar, True, cluster_needed, report_provided elif os.path.exists(tar) and os.path.exists(src): msg = "[ERROR] Both chart source directory and tarball should not exist" chartprreview.write_error_log(directory, msg) sys.exit(1) else: print("[INFO] report only") - return "", "", False, False + return "", "", False, False, report_provided def main(): @@ -85,8 +95,17 @@ def main(): args.directory, args.api_url ) - flags, chart_uri, report_needed, cluster_needed = generate_verify_options( - args.directory, category, organization, chart, version + ( + flags, + chart_uri, + report_needed, + cluster_needed, + report_provided, + ) = generate_verify_options(args.directory, category, organization, chart, version) + gitutils.add_output("report_provided", report_provided) + gitutils.add_output( + "provided_report_relative_path", + get_report_relative_path(category, organization, chart, version), ) gitutils.add_output("report_needed", report_needed) gitutils.add_output("cluster_needed", cluster_needed) diff --git a/scripts/src/report/verifier_report.py b/scripts/src/report/verifier_report.py index 24da56e2bc..e168f2a3c5 100644 --- a/scripts/src/report/verifier_report.py +++ b/scripts/src/report/verifier_report.py @@ -34,7 +34,6 @@ from yaml import Loader sys.path.append("../") -from chartrepomanager import indexannotations from report import report_info MIN_SUPPORTED_OPENSHIFT_VERSION = semantic_version.SimpleSpec(">=4.1.0") @@ -45,6 +44,15 @@ def get_report_data(report_path): + """Load and returns the report data contained in report.yaml + + Args: + report_path (str): Path to the report.yaml file. + + Returns: + (bool, dict): A boolean indicating if the loading was successfull and the + content of the report.yaml file. + """ try: with open(report_path) as report_data: report_content = yaml.load(report_data, Loader=Loader) @@ -55,6 +63,16 @@ def get_report_data(report_path): def get_result(report_data, check_name): + """Parse the report.yaml content for the result of a given check. + + Args: + report_data (dict): The content of the report.yaml file. + check_name (str): The name of the check to get the result for. + + Returns: + (bool, str): a boolean to True if the test passed, false otherwise + and the corresponding "reason" field. + """ outcome = False reason = "Not Found" for result in report_data["results"]: @@ -117,6 +135,14 @@ def get_package_digest(report_data): def get_public_key_digest(report_data): + """Get the public key digest from report.yaml + + Args: + report_data (dict): the report.yaml content + + Returns: + str: The public key digest from report.yaml. Set to None if not found. + """ public_key_digest = None try: digests = report_data["metadata"]["tool"]["digests"] @@ -129,6 +155,14 @@ def get_public_key_digest(report_data): def report_is_valid(report_data): + """Check that the report.yaml contains the expected YAML structure + + Args: + dict: The content of report.yaml + + Returns: + bool: set to True if the report contains the correct structure, False otherwise. + """ outcome = True if "kind" not in report_data or report_data["kind"] != "verify-report": @@ -152,7 +186,25 @@ def report_is_valid(report_data): return outcome -def validate(report_path): +def validate(report_path, ocp_version_range): + """Validate report.yaml by running a serie of checks. + + * Checks that the report.yaml contains valid YAML. + * Checks that the report.yaml contains the correct structure. + * Checks that the Chart has been successully tested (result of /chart-testing). + * Checks that the profile version used is valid SemVer. + * Checks that the expected annotation is present. + * Checks that the reported version of OCP and Kubernetes are valid and are coherent. + + Args: + report_path (str): Path to the report.yaml file + ocp_version_range (str): Range of supported OCP versions + + Returns: + (bool, str): if the checks all passed, this returns a bool set to True and an + empty str. Otherwise, this returns a bool set to True and the + corresponding error message. + """ is_valid_yaml, report_data = get_report_data(report_path) if not is_valid_yaml: @@ -171,7 +223,7 @@ def validate(report_path): v1_0_profile = False if profile_version.major == 1 and profile_version.minor == 0: v1_0_profile = True - except Exception: + except ValueError: message = f"Invalid profile version in report : {profile_version_string}" print(message) return False, message @@ -203,36 +255,16 @@ def validate(report_path): has_kubeversion_outcome, _ = get_chart_testing_result(report_data) if has_kubeversion_outcome: - chart = report_info.get_report_chart(report_path) - if KUBE_VERSION_ATTRIBUTE in chart: - kube_supported_ocp_versions_string = indexannotations.getOCPVersions( - chart[KUBE_VERSION_ATTRIBUTE] - ) - try: - kube_supported_versions = semantic_version.NpmSpec( - kube_supported_ocp_versions_string - ) - except ValueError: - if v1_0_profile: - return True, "" - else: - return ( - False, - f"Kube Version {chart[KUBE_VERSION_ATTRIBUTE]} translates to an invalid OCP version range {kube_supported_ocp_versions_string}", - ) - else: - if v1_0_profile: - return True, "" - else: - return False, f"{KUBE_VERSION_ATTRIBUTE} missing from chart!" + if not v1_0_profile: + chart = report_info.get_report_chart(report_path) + kube_supported_versions = semantic_version.NpmSpec(ocp_version_range) - if tested_version not in kube_supported_versions: - return ( - False, - f"Tested OpenShift version {str(tested_version)} not within specified kube-versions : {kube_supported_ocp_versions_string}", - ) + if tested_version not in kube_supported_versions: + return ( + False, + f"Tested OpenShift version {str(tested_version)} not within specified kube-versions : {ocp_version_range}", + ) - if not v1_0_profile: if SUPPORTED_VERSIONS_ANNOTATION in annotations: supported_versions_string = annotations[ SUPPORTED_VERSIONS_ANNOTATION diff --git a/scripts/src/saforcertadmin/create_sa.sh b/scripts/src/saforcertadmin/create_sa.sh index 9831c50cfe..dc59c7f2a2 100755 --- a/scripts/src/saforcertadmin/create_sa.sh +++ b/scripts/src/saforcertadmin/create_sa.sh @@ -2,7 +2,7 @@ user_name='rh-cert-user' oc create sa $user_name -token_secret=$(oc get sa $user_name -o json | jq -r '.secrets[].name | select( index("token") )') +token_secret=$(oc get secrets --field-selector=type=kubernetes.io/service-account-token -o=jsonpath="{.items[?(@.metadata.annotations.kubernetes\.io/service-account\.name=='"$user_name"')].metadata.name}") token=$(oc get secret $token_secret -o json | jq -r .data.token | base64 -d) oc apply -f cluster_role_binding.yaml diff --git a/scripts/src/signedchart/signedchart.py b/scripts/src/signedchart/signedchart.py index 11958ff346..df84c2c8ee 100644 --- a/scripts/src/signedchart/signedchart.py +++ b/scripts/src/signedchart/signedchart.py @@ -9,6 +9,7 @@ from report import verifier_report from owners import owners_file from pullrequest import prartifact +from reporegex import matchers def check_and_prepare_signed_chart(api_url, report_path, owner_path, key_file_path): @@ -41,10 +42,12 @@ def get_verifier_flags(tar_file, owners_file, temp_dir): def is_chart_signed(api_url, report_path): if api_url: files = prartifact.get_modified_files(api_url) - tgz_pattern = re.compile(r"charts/(\w+)/([\w-]+)/([\w-]+)/([\w\.-]+)/.*.tgz") + tgz_pattern = re.compile( + matchers.submission_path_matcher(strict_categories=False) + r".*.tgz" + ) tgz_found = False prov_pattern = re.compile( - r"charts/(\w+)/([\w-]+)/([\w-]+)/([\w\.-]+)/.*.tgz.prov" + matchers.submission_path_matcher(strict_categories=False) + r".*.tgz.prov" ) prov_found = False @@ -73,27 +76,47 @@ def get_pgp_key_from_owners(owner_path): found, owner_data = owners_file.get_owner_data_from_file(owner_path) if found: pgp_key = owners_file.get_pgp_public_key(owner_data) - if pgp_key == "null": - pgp_key = "" - return pgp_key return "" def check_report_for_signed_chart(report_path): + """Check that the report has passed the "signature-is-valid" test + + Args: + report_path (str): Path to the report.yaml file + + Returns: + bool: set to True if the report has passed the "signature-is-valid" test, + False otherwise + """ found, report_data = verifier_report.get_report_data(report_path) if found: - outcome, reason = verifier_report.get_signature_is_valid_result(report_data) + _, reason = verifier_report.get_signature_is_valid_result(report_data) if "Chart is signed" in reason: return True return False def check_pgp_public_key(owner_pgp_key, report_path): - # return True if one of: - # - report not found - # - report is not for a signed chart - # - digests match + """Check if the PGP key in the OWNERS file matches the one from report.yaml + + This checks passes if one of the following condition is met: + - The PGP keys match. + - The report is not for a signed chart + - The report is not found + + Consequently, the check fails if the report is found and one the following is true: + - The PGP keys do not match + - The report is for a signed chart but no PGP key is provided in report.yaml + + Args: + owner_pgp_key (str): The PGP key present in the OWNERS file. + report_path (str): Path to the report.yaml file. + + Returns: + bool: Set to True if the check passes, to False otherwise. + """ found, report_data = verifier_report.get_report_data(report_path) if found: pgp_public_key_digest_owners = subprocess.getoutput( diff --git a/scripts/src/workflowtesting/checkprforci.py b/scripts/src/workflowtesting/checkprforci.py index 0e7f487a7b..7e862045d4 100644 --- a/scripts/src/workflowtesting/checkprforci.py +++ b/scripts/src/workflowtesting/checkprforci.py @@ -19,7 +19,7 @@ def check_if_ci_only_is_modified(api_url): files = prartifact.get_modified_files(api_url) workflow_files = [ - re.compile(r".github/workflows/.*"), + re.compile(r".github/(workflows|actions)/.*"), re.compile(r"scripts/.*"), re.compile(r"tests/.*"), ] @@ -33,19 +33,29 @@ def check_if_ci_only_is_modified(api_url): re.compile(r"docs/([\w-]+)\.md"), ] + print(f"[INFO] The following files were modified in this PR: {files}") + workflow_found = False others_found = False tests_included = False for filename in files: if any([pattern.match(filename) for pattern in workflow_files]): + print(f"[DEBUG] Modified file {filename} is a workflow file.") workflow_found = True + # Tests are considered workflow files AND test files to inform other actions + # so we detect both separately. if any([pattern.match(filename) for pattern in test_files]): + print(f"[DEBUG] Modified file {filename} is also a test file.") tests_included = True elif any([pattern.match(filename) for pattern in skip_build_files]): + print(f"[DEBUG] Modified file {filename} is a skippable file.") others_found = True else: - return False + print( + f"[DEBUG] Modified file {filename} did not match any file paths of interest. Ignoring." + ) + continue if others_found and not workflow_found: gitutils.add_output("do-not-build", "true")