mirror of
https://github.com/Alfresco/alfresco-ng2-components.git
synced 2025-07-31 17:38:48 +00:00
[AAE-7986] Migrate ADF from Travis to Github actions (#8050)
* rebased * [ci:force]
This commit is contained in:
13
.github/actions/before-install/action.yml
vendored
Normal file
13
.github/actions/before-install/action.yml
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
name: "before-install"
|
||||
description: "launch before_install.sh and then parse env vars"
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Before install
|
||||
shell: bash
|
||||
run: |
|
||||
bash -xv ./scripts/ci/job_hooks/before_install.sh "/tmp/github_vars.env"
|
||||
while read ENVVAR; do
|
||||
echo "$ENVVAR" >> $GITHUB_ENV
|
||||
done < "/tmp/github_vars.env"
|
||||
|
53
.github/actions/download-build-artifacts/action.yml
vendored
Normal file
53
.github/actions/download-build-artifacts/action.yml
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
name: "Download build artifacts"
|
||||
description: "Download build artifacts"
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
## NXCACHE
|
||||
- name: update NX cache folder
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
path: .
|
||||
name: nxcache
|
||||
## DIST
|
||||
- name: update Dist folder
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
path: .
|
||||
name: dist
|
||||
## ADF CLI
|
||||
- name: update Dist folder
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
path: .
|
||||
name: adf-cli
|
||||
## ADF TESTING
|
||||
- name: update Dist folder
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
path: .
|
||||
name: adf-testing
|
||||
- name: extract nxcache
|
||||
run: |
|
||||
tar xzf nxcache.tar.gz
|
||||
shell: bash
|
||||
- name: extract dist
|
||||
run: |
|
||||
tar xzf dist.tar.gz
|
||||
shell: bash
|
||||
- name: extract adf cli
|
||||
run: |
|
||||
tar xzf adf-cli.tar.gz
|
||||
tar xzf adf-testing.tar.gz
|
||||
shell: bash
|
||||
- name: show files
|
||||
shell: bash
|
||||
run: |
|
||||
pwd
|
||||
ls -lha
|
||||
echo "====DIST===="
|
||||
find dist -maxdepth 1 -type d
|
||||
echo "====NXCACHE===="
|
||||
find nxcache -maxdepth 1 -type d
|
||||
echo "====ADF===="
|
||||
find node_modules/@alfresco/ -maxdepth 1 -type d
|
161
.github/actions/e2e/action.yml
vendored
Normal file
161
.github/actions/e2e/action.yml
vendored
Normal file
@@ -0,0 +1,161 @@
|
||||
name: "e2e"
|
||||
description: "e2e"
|
||||
inputs:
|
||||
e2e-test-id:
|
||||
description: "Test id"
|
||||
required: true
|
||||
e2e-test-folder:
|
||||
description: "Test folder"
|
||||
required: true
|
||||
e2e-test-provider:
|
||||
description: "Test provider"
|
||||
required: true
|
||||
e2e-test-auth:
|
||||
description: "Test auth"
|
||||
required: true
|
||||
output:
|
||||
description: "Output path"
|
||||
required: true
|
||||
check-cs-env:
|
||||
required: true
|
||||
description: check cs env
|
||||
default: "false"
|
||||
check-ps-env:
|
||||
required: true
|
||||
description: check ps env
|
||||
default: "false"
|
||||
check-external-cs-env:
|
||||
required: true
|
||||
description: check external cs env
|
||||
default: "false"
|
||||
check-ps-cloud-env:
|
||||
required: true
|
||||
description: check ps cloud env
|
||||
default: "false"
|
||||
e2e-tar-name: #
|
||||
required: false
|
||||
default: e2e.tar.gz
|
||||
apa-proxy: #
|
||||
description: "proxy host"
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: use APA as PROXY host if apa-proxy is set
|
||||
shell: bash
|
||||
run: |
|
||||
if [[ -n "${{ inputs.apa-proxy }}" ]]; then
|
||||
echo "APA proxy set."
|
||||
echo "PROXY_HOST_BPM=${E2E_HOST_APA}" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: install aws cli
|
||||
shell: bash
|
||||
run: pip install awscli
|
||||
- name: download smartrunner test results from s3 bucket if they exist
|
||||
shell: bash
|
||||
env:
|
||||
REMOTE_PATH: smart-runner/${{ github.run_id}}/${{ inputs.e2e-test-folder }}-${{ inputs.e2e-test-id}}/e2e.tar.gz
|
||||
run: |
|
||||
set -u;
|
||||
mkdir -p "${SMART_RUNNER_PATH}"
|
||||
if [[ $(aws s3 ls "${S3_BUILD_BUCKET}/adf/${REMOTE_PATH}" > /dev/null; echo $?) -eq 0 ]]; then
|
||||
echo "downloading test files"
|
||||
aws s3 cp "${S3_BUILD_BUCKET}/adf/${REMOTE_PATH}" .;
|
||||
tar xzf ${{ inputs.e2e-tar-name }};
|
||||
else
|
||||
echo "nothing to download";
|
||||
fi
|
||||
|
||||
- name: check EXTERNAL-CS is UP
|
||||
shell: bash
|
||||
if: ${{ inputs.check-external-cs-env == 'true' }}
|
||||
run: |
|
||||
set -u;
|
||||
./node_modules/@alfresco/adf-cli/bin/adf-cli \
|
||||
check-cs-env \
|
||||
--host "$EXTERNAL_ACS_HOST" \
|
||||
-u "$E2E_USERNAME" \
|
||||
-p "$E2E_PASSWORD" || exit 1
|
||||
|
||||
- name: Check CS is UP
|
||||
shell: bash
|
||||
if: ${{ inputs.check-cs-env == 'true' }}
|
||||
run: |
|
||||
set -u;
|
||||
./node_modules/@alfresco/adf-cli/bin/adf-cli \
|
||||
check-cs-env \
|
||||
--host "$E2E_HOST" \
|
||||
-u "$E2E_USERNAME" \
|
||||
-p "$E2E_PASSWORD" || exit 1
|
||||
|
||||
- name: check PS is UP
|
||||
shell: bash
|
||||
if: ${{ inputs.check-ps-env == 'true' }}
|
||||
run: |
|
||||
set -u;
|
||||
./node_modules/@alfresco/adf-cli/bin/adf-cli init-aps-env \
|
||||
--host "$E2E_HOST" \
|
||||
-u "$E2E_USERNAME" \
|
||||
-p "$E2E_PASSWORD" \
|
||||
--license "$AWS_S3_BUCKET_ACTIVITI_LICENSE" || exit 1
|
||||
|
||||
- name: check PS-CLOUD is UP
|
||||
shell: bash
|
||||
if: ${{ inputs.check-ps-cloud-env == 'true' }}
|
||||
run: |
|
||||
set -u;
|
||||
./node_modules/@alfresco/adf-cli/bin/adf-cli init-aae-env \
|
||||
--oauth "$E2E_HOST" \
|
||||
--host "$E2E_HOST_APA" \
|
||||
--modelerUsername "$E2E_MODELER_USERNAME" \
|
||||
--modelerPassword "$E2E_MODELER_PASSWORD" \
|
||||
--devopsUsername "$E2E_DEVOPS_USERNAME" \
|
||||
--devopsPassword "$E2E_DEVOPS_PASSWORD" \
|
||||
--clientId 'alfresco' || exit 1
|
||||
|
||||
- name: variables sanification
|
||||
env:
|
||||
FOLDER: "${{ inputs.e2e-test-folder }}"
|
||||
PROVIDER: "${{ inputs.e2e-test-provider }}"
|
||||
AUTH_TYPE: "${{ inputs.e2e-test-auth }}"
|
||||
E2E_TEST_ID: "${{ inputs.e2e-test-id }}"
|
||||
shell: bash
|
||||
run: |
|
||||
set -u;
|
||||
echo $PROXY_HOST_BPM
|
||||
NX_CALCULATION_FLAGS=$(echo $NX_CALCULATION_FLAGS | sed "s@'@@g")
|
||||
echo "NX_CALCULATION_FLAGS=$NX_CALCULATION_FLAGS" >> $GITHUB_ENV
|
||||
GIT_HASH=$(echo $GIT_HASH | sed "s@'@@g")
|
||||
echo "GIT_HASH=$GIT_HASH" >> $GITHUB_ENV
|
||||
- name: run test with retries
|
||||
id: retry_run
|
||||
env:
|
||||
FOLDER: "${{ inputs.e2e-test-folder }}"
|
||||
PROVIDER: "${{ inputs.e2e-test-provider }}"
|
||||
AUTH_TYPE: "${{ inputs.e2e-test-auth }}"
|
||||
E2E_TEST_ID: "${{ inputs.e2e-test-id }}"
|
||||
uses: nick-fields/retry@v2.8.2
|
||||
with:
|
||||
timeout_minutes: 40
|
||||
max_attempts: 2
|
||||
retry_wait_seconds: 30
|
||||
shell: bash
|
||||
command: |
|
||||
set -u;
|
||||
export GH_ACTION_RETRY_COUNT=$(cat ${GITHUB_OUTPUT} | grep -E '^[0-9]{1,2}$' | tail -n1)
|
||||
echo "RETRY GH_ACTION_RETRY_COUNT = <$GH_ACTION_RETRY_COUNT>"
|
||||
bash ./scripts/travis/e2e/e2e.sh "$E2E_TEST_ID" || exit 1
|
||||
|
||||
- name: upload smartrunner tests results on s3 to cache tests
|
||||
shell: bash
|
||||
if: always()
|
||||
env:
|
||||
REMOTE_PATH: "smart-runner/${{ github.run_id}}/${{ inputs.e2e-test-folder }}-${{ inputs.e2e-test-id}}/e2e.tar.gz"
|
||||
# description: always upload newer results
|
||||
run: |
|
||||
tar czf "${{ inputs.e2e-tar-name }}" "${SMART_RUNNER_PATH}"
|
||||
aws s3 cp "${{ inputs.e2e-tar-name }}" "${S3_BUILD_BUCKET}/adf/$REMOTE_PATH"
|
||||
|
||||
|
19
.github/actions/get-latest-tag-sha/action.yml
vendored
Normal file
19
.github/actions/get-latest-tag-sha/action.yml
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
name: 'Get tag sha'
|
||||
description: 'get the sha of the latest or a specific tag'
|
||||
|
||||
outputs:
|
||||
tag_sha:
|
||||
description: "long sha of the tag"
|
||||
value: ${{ steps.sha_out.outputs.tag_sha }}
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
|
||||
steps:
|
||||
- name: get tag short sha
|
||||
shell: bash
|
||||
id: sha_out
|
||||
run: |
|
||||
tag_sha=$(git rev-list -n 1 $(git tag --sort=-creatordate | head -n 1))
|
||||
echo "tag_sha=$tag_sha" >> $GITHUB_OUTPUT
|
||||
echo $tag_sha
|
64
.github/actions/setup/action.yml
vendored
Normal file
64
.github/actions/setup/action.yml
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
name: 'Setup'
|
||||
description: 'Initialize cache, env var load'
|
||||
inputs:
|
||||
enable-cache:
|
||||
description: 'enable caching'
|
||||
required: false
|
||||
type: boolean
|
||||
default: 'true'
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: install NPM
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 14
|
||||
cache-dependency-path: package-lock.json
|
||||
- name: get latest tag sha
|
||||
id: tag-sha
|
||||
uses: ./.github/actions/get-latest-tag-sha
|
||||
# CACHE
|
||||
- name: Node cache
|
||||
id: node-cache
|
||||
if: ${{ inputs.enable-cache == 'true' }}
|
||||
uses: actions/cache@v3
|
||||
env:
|
||||
cache-name: node-cache
|
||||
with:
|
||||
path: |
|
||||
node_modules
|
||||
~/.npm
|
||||
nxcache
|
||||
dist
|
||||
key: .npm-${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }}-${{ steps.tag-sha.outputs.tag_sha }}
|
||||
restore-keys: |
|
||||
node-${{ runner.os }}-build-${{ env.cache-name }}-
|
||||
node-${{ runner.os }}-build-
|
||||
node-${{ runner.os }}-
|
||||
- name: pip cache
|
||||
uses: actions/cache@v3
|
||||
if: ${{ inputs.enable-cache == 'true' }}
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip-
|
||||
restore-keys: |
|
||||
${{ runner.os }}
|
||||
# ENV LOAD
|
||||
- name: load .travis.yml env.global variables
|
||||
uses: Alfresco/alfresco-build-tools/.github/actions/travis-env-load@v1.17.0
|
||||
with:
|
||||
yml_path: .travis.yml
|
||||
- name: load "TRAVIS_EVENT_TYPE"
|
||||
uses: ./.github/actions/travis-env-var-facade
|
||||
- name: before install script
|
||||
uses: ./.github/actions/before-install
|
||||
# sanitize nx calculation flags string
|
||||
- name: input validation for NX CALCULATION FLAGS
|
||||
shell: bash
|
||||
run: |
|
||||
export NX_CALCULATION_FLAGS=$(echo $NX_CALCULATION_FLAGS | sed "s@'@@g")
|
||||
echo "NX_CALCULATION_FLAGS=$NX_CALCULATION_FLAGS" >> $GITHUB_ENV
|
||||
- name: link nx executable
|
||||
run: sudo ln -s $(npm bin)/nx /usr/bin/nx
|
||||
shell: bash
|
34
.github/actions/travis-env-var-facade/action.yml
vendored
Normal file
34
.github/actions/travis-env-var-facade/action.yml
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
# TRAVIS_EVENT_TYPE= Indicates how the build was triggered. One of push, pull_request, api, cron
|
||||
name: "travis-event-type-env-var"
|
||||
description: "Mimic loading of a TRAVIS_EVENT_TYPE env var"
|
||||
|
||||
inputs:
|
||||
event_name:
|
||||
description: "override github.event_name"
|
||||
required: false
|
||||
default: ${{ github.event_name }}
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Parse env global
|
||||
shell: bash
|
||||
run: |
|
||||
EVENT_TYPE=""
|
||||
TRAVIS_PULL_REQUEST_BRANCH=""
|
||||
TRAVIS_BRANCH=""
|
||||
REGEX="(repository|workflow)_dispatch"
|
||||
if [[ "${{ inputs.event_name }}" == "schedule" ]]; then
|
||||
EVENT_TYPE="cron";
|
||||
elif [[ "${{ inputs.event_name }}" == "pull_request" ]]; then
|
||||
EVENT_TYPE="pull_request";
|
||||
TRAVIS_BRANCH=${GITHUB_BASE_REF}
|
||||
TRAVIS_PULL_REQUEST_BRANCH=${GITHUB_BASE_REF}
|
||||
elif [[ "${{ inputs.event_name }}" == "push" ]]; then
|
||||
EVENT_TYPE="push";
|
||||
TRAVIS_BRANCH=${GITHUB_REF##*/}
|
||||
elif [[ "${{ inputs.event_name }}" =~ $REGEX ]]; then
|
||||
EVENT_TYPE="api";
|
||||
fi
|
||||
echo "TRAVIS_EVENT_TYPE=${EVENT_TYPE}" >> $GITHUB_ENV
|
||||
echo "TRAVIS_PULL_REQUEST_BRANCH=${TRAVIS_PULL_REQUEST_BRANCH}" >> $GITHUB_ENV
|
||||
echo "TRAVIS_BRANCH=${TRAVIS_BRANCH}" >> $GITHUB_ENV
|
41
.github/actions/upload-build-artifacts/action.yml
vendored
Normal file
41
.github/actions/upload-build-artifacts/action.yml
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
name: "Upload build artifacts"
|
||||
description: "Upload build artifacts"
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
## NXCACHE
|
||||
- name: tar artifacts
|
||||
run: |
|
||||
tar czf dist.tar.gz dist
|
||||
tar czf nxcache.tar.gz nxcache
|
||||
tar czf adf-cli.tar.gz node_modules/@alfresco/adf-cli
|
||||
tar czf adf-testing.tar.gz node_modules/@alfresco/adf-testing
|
||||
shell: bash
|
||||
- name: update NX cache folder
|
||||
uses: actions/upload-artifact@v3
|
||||
continue-on-error: true
|
||||
with:
|
||||
path: nxcache.tar.gz
|
||||
name: nxcache
|
||||
## DIST
|
||||
- name: update Dist folder
|
||||
uses: actions/upload-artifact@v3
|
||||
continue-on-error: true
|
||||
with:
|
||||
path: dist.tar.gz
|
||||
name: dist
|
||||
## ADF CLI
|
||||
- name: save ADF cli
|
||||
uses: actions/upload-artifact@v3
|
||||
continue-on-error: true
|
||||
with:
|
||||
path: adf-cli.tar.gz
|
||||
name: adf-cli
|
||||
## ADF TESTING
|
||||
- name: save ADF testing
|
||||
uses: actions/upload-artifact@v3
|
||||
continue-on-error: true
|
||||
with:
|
||||
path: adf-testing.tar.gz
|
||||
name: adf-testing
|
Reference in New Issue
Block a user