name: "e2e" description: "e2e" inputs: e2e-test-id: description: "Test id" required: true e2e-artifact-id: description: "Artifact id" required: true e2e-test-folder: description: "Test folder" required: true e2e-test-provider: description: "Test provider" required: true e2e-test-auth: description: "Test auth" required: true output: description: "Output path" required: true check-cs-env: required: true description: check cs env default: "false" check-ps-env: required: true description: check ps env default: "false" check-external-cs-env: required: true description: check external cs env default: "false" check-ps-cloud-env: required: true description: check ps cloud env default: "false" e2e-tar-name: # description: tarball name required: false default: e2e.tar.gz apa-proxy: # description: "proxy host" required: true deps: description: "Library dependencies" required: false default: "" runs: using: "composite" steps: - name: Determine if affected shell: bash id: determine-affected run: | isAffected=false affectedLibs=$(npx nx print-affected --type=lib --select=projects ${NX_CALCULATION_FLAGS} --plain) if [[ $affectedLibs =~ "${{ inputs.e2e-test-id }}" ]]; then isAffected=true fi; echo "Determine if ${{ inputs.e2e-test-id }} is affected: $isAffected"; echo "isAffected=$isAffected" >> $GITHUB_OUTPUT - name: print value shell: bash run: | echo "value: ${{ steps.determine-affected.outputs.isAffected }}" - name: use APA as PROXY host if apa-proxy is set shell: bash run: | if [[ -n "${{ inputs.apa-proxy }}" ]]; then echo "APA proxy set." echo "PROXY_HOST_BPM=${E2E_HOST_APA}" >> $GITHUB_ENV echo "PROXY_HOST_ECM=${E2E_IDENTITY_HOST_APA}" >> $GITHUB_ENV echo "HOST_SSO=${E2E_IDENTITY_HOST_APA}" >> $GITHUB_ENV fi - name: install aws cli shell: bash run: pip install awscli - name: download smartrunner test results from s3 bucket if they exist shell: bash env: REMOTE_PATH: smart-runner/${{ github.run_id}}/${{ inputs.e2e-test-folder }}-${{ inputs.e2e-artifact-id}}/e2e.tar.gz run: | set -u; mkdir -p "${SMART_RUNNER_PATH}" if [[ $(aws s3 ls "s3://${S3_BUILD_BUCKET_SHORT_NAME}/adf/${REMOTE_PATH}" > /dev/null; echo $?) -eq 0 ]]; then echo "downloading test files" aws s3 cp "s3://${S3_BUILD_BUCKET_SHORT_NAME}/adf/${REMOTE_PATH}" .; tar xzf ${{ inputs.e2e-tar-name }}; else echo "nothing to download"; fi - name: check EXTERNAL-CS is UP shell: bash if: ${{ inputs.check-external-cs-env == 'true' && steps.determine-affected.outputs.isAffected == 'true' }} run: | echo "running: check EXTERNAL-CS is UP" set -u; ./node_modules/@alfresco/adf-cli/bin/adf-cli \ check-cs-env \ --host "$EXTERNAL_ACS_HOST" \ -u "$E2E_USERNAME" \ -p "$E2E_PASSWORD" || exit 1 - name: Check CS is UP shell: bash if: ${{ inputs.check-cs-env == 'true' && steps.determine-affected.outputs.isAffected == 'true' }} run: | echo "Running: Check CS is UP" set -u; ./node_modules/@alfresco/adf-cli/bin/adf-cli \ check-cs-env \ --host "$E2E_HOST" \ -u "$E2E_USERNAME" \ -p "$E2E_PASSWORD" || exit 1 - name: check PS is UP shell: bash if: ${{ inputs.check-ps-env == 'true' && steps.determine-affected.outputs.isAffected == 'true' }} run: | echo "Running: check PS is UP" set -u; ./node_modules/@alfresco/adf-cli/bin/adf-cli init-aps-env \ --host "$E2E_HOST" \ -u "$E2E_USERNAME" \ -p "$E2E_PASSWORD" \ --license "$AWS_S3_BUCKET_ACTIVITI_LICENSE" || exit 1 - name: check PS-CLOUD is UP shell: bash if: ${{ inputs.check-ps-cloud-env == 'true' && steps.determine-affected.outputs.isAffected == 'true' }} run: | echo "running: check PS-CLOUD is UP" set -u; ./node_modules/@alfresco/adf-cli/bin/adf-cli init-aae-env \ --oauth "$E2E_IDENTITY_HOST_APA" \ --host "$E2E_HOST_APA" \ --modelerUsername "$E2E_MODELER_USERNAME" \ --modelerPassword "$E2E_MODELER_PASSWORD" \ --devopsUsername "$E2E_DEVOPS_USERNAME" \ --devopsPassword "$E2E_DEVOPS_PASSWORD" \ --clientId 'alfresco' || exit 1 - name: variables sanitization env: FOLDER: "${{ inputs.e2e-test-folder }}" PROVIDER: "${{ inputs.e2e-test-provider }}" AUTH_TYPE: "${{ inputs.e2e-test-auth }}" E2E_TEST_ID: "${{ inputs.e2e-test-id }}" DEPS: "${{ inputs.deps }}" shell: bash run: | set -u; echo $PROXY_HOST_BPM echo "GIT_HASH=$GIT_HASH" >> $GITHUB_ENV - name: run test id: e2e_run if: ${{ steps.determine-affected.outputs.isAffected == 'true' }} env: FOLDER: "${{ inputs.e2e-test-folder }}" PROVIDER: "${{ inputs.e2e-test-provider }}" AUTH_TYPE: "${{ inputs.e2e-test-auth }}" E2E_TEST_ID: "${{ inputs.e2e-test-id }}" DEPS: "${{ inputs.deps }}" shell: bash run: | set -u; if [[ ${{ inputs.e2e-test-folder }} == 'content-services/upload' ]]; then export DISPLAY=:99 chromedriver --url-base=/wd/hub & sudo Xvfb -ac :99 -screen 0 1280x1024x24 > /dev/null 2>&1 & # optional bash ./scripts/github/e2e/e2e.sh "$E2E_TEST_ID" "$DEPS" "browser" || exit 1 else bash ./scripts/github/e2e/e2e.sh "$E2E_TEST_ID" "$DEPS" || exit 1 fi - name: Trace failing e2e if: ${{ steps.determine-affected.outputs.isAffected == 'true' && github.event_name == 'schedule' && failure() }} uses: ./.github/actions/artifact-append with: artifact-name: "global-e2e-result-${{ inputs.e2e-test-id }}" file-name: e2e-failures.txt content: "${{ inputs.e2e-test-id }}" - name: upload artifacts on gh id: upload_gh if: ${{ steps.determine-affected.outputs.isAffected == 'true' }} uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 with: name: e2e-artifact-output-${{inputs.e2e-artifact-id}} path: /home/runner/work/alfresco-ng2-components/alfresco-ng2-components/e2e-output-* - name: upload smart-runner tests results on s3 to cache tests shell: bash if: always() env: REMOTE_PATH: "smart-runner/${{ github.run_id}}/${{ inputs.e2e-test-folder }}-${{inputs.e2e-artifact-id}}/e2e.tar.gz" # description: always upload newer results run: | tar czf "${{ inputs.e2e-tar-name }}" "${SMART_RUNNER_PATH}" aws s3 cp "${{ inputs.e2e-tar-name }}" "s3://${S3_BUILD_BUCKET_SHORT_NAME}/adf/${REMOTE_PATH}"