mirror of
https://github.com/Alfresco/alfresco-ng2-components.git
synced 2025-05-12 17:04:57 +00:00
[AAE-7986] Migrate ADF from Travis to Github actions (#8050)
* rebased * [ci:force]
This commit is contained in:
parent
812352ecdc
commit
70e6d642f3
@ -13,6 +13,14 @@ trim_trailing_whitespace = true
|
|||||||
indent_style = space
|
indent_style = space
|
||||||
indent_size = 2
|
indent_size = 2
|
||||||
|
|
||||||
|
[*.yaml]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 2
|
||||||
|
|
||||||
|
[*.yml]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 2
|
||||||
|
|
||||||
[*.md]
|
[*.md]
|
||||||
max_line_length = off
|
max_line_length = off
|
||||||
trim_trailing_whitespace = false
|
trim_trailing_whitespace = false
|
||||||
|
13
.github/actions/before-install/action.yml
vendored
Normal file
13
.github/actions/before-install/action.yml
vendored
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
name: "before-install"
|
||||||
|
description: "launch before_install.sh and then parse env vars"
|
||||||
|
runs:
|
||||||
|
using: "composite"
|
||||||
|
steps:
|
||||||
|
- name: Before install
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
bash -xv ./scripts/ci/job_hooks/before_install.sh "/tmp/github_vars.env"
|
||||||
|
while read ENVVAR; do
|
||||||
|
echo "$ENVVAR" >> $GITHUB_ENV
|
||||||
|
done < "/tmp/github_vars.env"
|
||||||
|
|
53
.github/actions/download-build-artifacts/action.yml
vendored
Normal file
53
.github/actions/download-build-artifacts/action.yml
vendored
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
name: "Download build artifacts"
|
||||||
|
description: "Download build artifacts"
|
||||||
|
runs:
|
||||||
|
using: "composite"
|
||||||
|
steps:
|
||||||
|
## NXCACHE
|
||||||
|
- name: update NX cache folder
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
path: .
|
||||||
|
name: nxcache
|
||||||
|
## DIST
|
||||||
|
- name: update Dist folder
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
path: .
|
||||||
|
name: dist
|
||||||
|
## ADF CLI
|
||||||
|
- name: update Dist folder
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
path: .
|
||||||
|
name: adf-cli
|
||||||
|
## ADF TESTING
|
||||||
|
- name: update Dist folder
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
path: .
|
||||||
|
name: adf-testing
|
||||||
|
- name: extract nxcache
|
||||||
|
run: |
|
||||||
|
tar xzf nxcache.tar.gz
|
||||||
|
shell: bash
|
||||||
|
- name: extract dist
|
||||||
|
run: |
|
||||||
|
tar xzf dist.tar.gz
|
||||||
|
shell: bash
|
||||||
|
- name: extract adf cli
|
||||||
|
run: |
|
||||||
|
tar xzf adf-cli.tar.gz
|
||||||
|
tar xzf adf-testing.tar.gz
|
||||||
|
shell: bash
|
||||||
|
- name: show files
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
pwd
|
||||||
|
ls -lha
|
||||||
|
echo "====DIST===="
|
||||||
|
find dist -maxdepth 1 -type d
|
||||||
|
echo "====NXCACHE===="
|
||||||
|
find nxcache -maxdepth 1 -type d
|
||||||
|
echo "====ADF===="
|
||||||
|
find node_modules/@alfresco/ -maxdepth 1 -type d
|
161
.github/actions/e2e/action.yml
vendored
Normal file
161
.github/actions/e2e/action.yml
vendored
Normal file
@ -0,0 +1,161 @@
|
|||||||
|
name: "e2e"
|
||||||
|
description: "e2e"
|
||||||
|
inputs:
|
||||||
|
e2e-test-id:
|
||||||
|
description: "Test id"
|
||||||
|
required: true
|
||||||
|
e2e-test-folder:
|
||||||
|
description: "Test folder"
|
||||||
|
required: true
|
||||||
|
e2e-test-provider:
|
||||||
|
description: "Test provider"
|
||||||
|
required: true
|
||||||
|
e2e-test-auth:
|
||||||
|
description: "Test auth"
|
||||||
|
required: true
|
||||||
|
output:
|
||||||
|
description: "Output path"
|
||||||
|
required: true
|
||||||
|
check-cs-env:
|
||||||
|
required: true
|
||||||
|
description: check cs env
|
||||||
|
default: "false"
|
||||||
|
check-ps-env:
|
||||||
|
required: true
|
||||||
|
description: check ps env
|
||||||
|
default: "false"
|
||||||
|
check-external-cs-env:
|
||||||
|
required: true
|
||||||
|
description: check external cs env
|
||||||
|
default: "false"
|
||||||
|
check-ps-cloud-env:
|
||||||
|
required: true
|
||||||
|
description: check ps cloud env
|
||||||
|
default: "false"
|
||||||
|
e2e-tar-name: #
|
||||||
|
required: false
|
||||||
|
default: e2e.tar.gz
|
||||||
|
apa-proxy: #
|
||||||
|
description: "proxy host"
|
||||||
|
required: true
|
||||||
|
|
||||||
|
runs:
|
||||||
|
using: "composite"
|
||||||
|
steps:
|
||||||
|
- name: use APA as PROXY host if apa-proxy is set
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
if [[ -n "${{ inputs.apa-proxy }}" ]]; then
|
||||||
|
echo "APA proxy set."
|
||||||
|
echo "PROXY_HOST_BPM=${E2E_HOST_APA}" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: install aws cli
|
||||||
|
shell: bash
|
||||||
|
run: pip install awscli
|
||||||
|
- name: download smartrunner test results from s3 bucket if they exist
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
REMOTE_PATH: smart-runner/${{ github.run_id}}/${{ inputs.e2e-test-folder }}-${{ inputs.e2e-test-id}}/e2e.tar.gz
|
||||||
|
run: |
|
||||||
|
set -u;
|
||||||
|
mkdir -p "${SMART_RUNNER_PATH}"
|
||||||
|
if [[ $(aws s3 ls "${S3_BUILD_BUCKET}/adf/${REMOTE_PATH}" > /dev/null; echo $?) -eq 0 ]]; then
|
||||||
|
echo "downloading test files"
|
||||||
|
aws s3 cp "${S3_BUILD_BUCKET}/adf/${REMOTE_PATH}" .;
|
||||||
|
tar xzf ${{ inputs.e2e-tar-name }};
|
||||||
|
else
|
||||||
|
echo "nothing to download";
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: check EXTERNAL-CS is UP
|
||||||
|
shell: bash
|
||||||
|
if: ${{ inputs.check-external-cs-env == 'true' }}
|
||||||
|
run: |
|
||||||
|
set -u;
|
||||||
|
./node_modules/@alfresco/adf-cli/bin/adf-cli \
|
||||||
|
check-cs-env \
|
||||||
|
--host "$EXTERNAL_ACS_HOST" \
|
||||||
|
-u "$E2E_USERNAME" \
|
||||||
|
-p "$E2E_PASSWORD" || exit 1
|
||||||
|
|
||||||
|
- name: Check CS is UP
|
||||||
|
shell: bash
|
||||||
|
if: ${{ inputs.check-cs-env == 'true' }}
|
||||||
|
run: |
|
||||||
|
set -u;
|
||||||
|
./node_modules/@alfresco/adf-cli/bin/adf-cli \
|
||||||
|
check-cs-env \
|
||||||
|
--host "$E2E_HOST" \
|
||||||
|
-u "$E2E_USERNAME" \
|
||||||
|
-p "$E2E_PASSWORD" || exit 1
|
||||||
|
|
||||||
|
- name: check PS is UP
|
||||||
|
shell: bash
|
||||||
|
if: ${{ inputs.check-ps-env == 'true' }}
|
||||||
|
run: |
|
||||||
|
set -u;
|
||||||
|
./node_modules/@alfresco/adf-cli/bin/adf-cli init-aps-env \
|
||||||
|
--host "$E2E_HOST" \
|
||||||
|
-u "$E2E_USERNAME" \
|
||||||
|
-p "$E2E_PASSWORD" \
|
||||||
|
--license "$AWS_S3_BUCKET_ACTIVITI_LICENSE" || exit 1
|
||||||
|
|
||||||
|
- name: check PS-CLOUD is UP
|
||||||
|
shell: bash
|
||||||
|
if: ${{ inputs.check-ps-cloud-env == 'true' }}
|
||||||
|
run: |
|
||||||
|
set -u;
|
||||||
|
./node_modules/@alfresco/adf-cli/bin/adf-cli init-aae-env \
|
||||||
|
--oauth "$E2E_HOST" \
|
||||||
|
--host "$E2E_HOST_APA" \
|
||||||
|
--modelerUsername "$E2E_MODELER_USERNAME" \
|
||||||
|
--modelerPassword "$E2E_MODELER_PASSWORD" \
|
||||||
|
--devopsUsername "$E2E_DEVOPS_USERNAME" \
|
||||||
|
--devopsPassword "$E2E_DEVOPS_PASSWORD" \
|
||||||
|
--clientId 'alfresco' || exit 1
|
||||||
|
|
||||||
|
- name: variables sanification
|
||||||
|
env:
|
||||||
|
FOLDER: "${{ inputs.e2e-test-folder }}"
|
||||||
|
PROVIDER: "${{ inputs.e2e-test-provider }}"
|
||||||
|
AUTH_TYPE: "${{ inputs.e2e-test-auth }}"
|
||||||
|
E2E_TEST_ID: "${{ inputs.e2e-test-id }}"
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -u;
|
||||||
|
echo $PROXY_HOST_BPM
|
||||||
|
NX_CALCULATION_FLAGS=$(echo $NX_CALCULATION_FLAGS | sed "s@'@@g")
|
||||||
|
echo "NX_CALCULATION_FLAGS=$NX_CALCULATION_FLAGS" >> $GITHUB_ENV
|
||||||
|
GIT_HASH=$(echo $GIT_HASH | sed "s@'@@g")
|
||||||
|
echo "GIT_HASH=$GIT_HASH" >> $GITHUB_ENV
|
||||||
|
- name: run test with retries
|
||||||
|
id: retry_run
|
||||||
|
env:
|
||||||
|
FOLDER: "${{ inputs.e2e-test-folder }}"
|
||||||
|
PROVIDER: "${{ inputs.e2e-test-provider }}"
|
||||||
|
AUTH_TYPE: "${{ inputs.e2e-test-auth }}"
|
||||||
|
E2E_TEST_ID: "${{ inputs.e2e-test-id }}"
|
||||||
|
uses: nick-fields/retry@v2.8.2
|
||||||
|
with:
|
||||||
|
timeout_minutes: 40
|
||||||
|
max_attempts: 2
|
||||||
|
retry_wait_seconds: 30
|
||||||
|
shell: bash
|
||||||
|
command: |
|
||||||
|
set -u;
|
||||||
|
export GH_ACTION_RETRY_COUNT=$(cat ${GITHUB_OUTPUT} | grep -E '^[0-9]{1,2}$' | tail -n1)
|
||||||
|
echo "RETRY GH_ACTION_RETRY_COUNT = <$GH_ACTION_RETRY_COUNT>"
|
||||||
|
bash ./scripts/travis/e2e/e2e.sh "$E2E_TEST_ID" || exit 1
|
||||||
|
|
||||||
|
- name: upload smartrunner tests results on s3 to cache tests
|
||||||
|
shell: bash
|
||||||
|
if: always()
|
||||||
|
env:
|
||||||
|
REMOTE_PATH: "smart-runner/${{ github.run_id}}/${{ inputs.e2e-test-folder }}-${{ inputs.e2e-test-id}}/e2e.tar.gz"
|
||||||
|
# description: always upload newer results
|
||||||
|
run: |
|
||||||
|
tar czf "${{ inputs.e2e-tar-name }}" "${SMART_RUNNER_PATH}"
|
||||||
|
aws s3 cp "${{ inputs.e2e-tar-name }}" "${S3_BUILD_BUCKET}/adf/$REMOTE_PATH"
|
||||||
|
|
||||||
|
|
19
.github/actions/get-latest-tag-sha/action.yml
vendored
Normal file
19
.github/actions/get-latest-tag-sha/action.yml
vendored
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
name: 'Get tag sha'
|
||||||
|
description: 'get the sha of the latest or a specific tag'
|
||||||
|
|
||||||
|
outputs:
|
||||||
|
tag_sha:
|
||||||
|
description: "long sha of the tag"
|
||||||
|
value: ${{ steps.sha_out.outputs.tag_sha }}
|
||||||
|
|
||||||
|
runs:
|
||||||
|
using: "composite"
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: get tag short sha
|
||||||
|
shell: bash
|
||||||
|
id: sha_out
|
||||||
|
run: |
|
||||||
|
tag_sha=$(git rev-list -n 1 $(git tag --sort=-creatordate | head -n 1))
|
||||||
|
echo "tag_sha=$tag_sha" >> $GITHUB_OUTPUT
|
||||||
|
echo $tag_sha
|
64
.github/actions/setup/action.yml
vendored
Normal file
64
.github/actions/setup/action.yml
vendored
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
name: 'Setup'
|
||||||
|
description: 'Initialize cache, env var load'
|
||||||
|
inputs:
|
||||||
|
enable-cache:
|
||||||
|
description: 'enable caching'
|
||||||
|
required: false
|
||||||
|
type: boolean
|
||||||
|
default: 'true'
|
||||||
|
|
||||||
|
runs:
|
||||||
|
using: "composite"
|
||||||
|
steps:
|
||||||
|
- name: install NPM
|
||||||
|
uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: 14
|
||||||
|
cache-dependency-path: package-lock.json
|
||||||
|
- name: get latest tag sha
|
||||||
|
id: tag-sha
|
||||||
|
uses: ./.github/actions/get-latest-tag-sha
|
||||||
|
# CACHE
|
||||||
|
- name: Node cache
|
||||||
|
id: node-cache
|
||||||
|
if: ${{ inputs.enable-cache == 'true' }}
|
||||||
|
uses: actions/cache@v3
|
||||||
|
env:
|
||||||
|
cache-name: node-cache
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
node_modules
|
||||||
|
~/.npm
|
||||||
|
nxcache
|
||||||
|
dist
|
||||||
|
key: .npm-${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }}-${{ steps.tag-sha.outputs.tag_sha }}
|
||||||
|
restore-keys: |
|
||||||
|
node-${{ runner.os }}-build-${{ env.cache-name }}-
|
||||||
|
node-${{ runner.os }}-build-
|
||||||
|
node-${{ runner.os }}-
|
||||||
|
- name: pip cache
|
||||||
|
uses: actions/cache@v3
|
||||||
|
if: ${{ inputs.enable-cache == 'true' }}
|
||||||
|
with:
|
||||||
|
path: ~/.cache/pip
|
||||||
|
key: ${{ runner.os }}-pip-
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}
|
||||||
|
# ENV LOAD
|
||||||
|
- name: load .travis.yml env.global variables
|
||||||
|
uses: Alfresco/alfresco-build-tools/.github/actions/travis-env-load@v1.17.0
|
||||||
|
with:
|
||||||
|
yml_path: .travis.yml
|
||||||
|
- name: load "TRAVIS_EVENT_TYPE"
|
||||||
|
uses: ./.github/actions/travis-env-var-facade
|
||||||
|
- name: before install script
|
||||||
|
uses: ./.github/actions/before-install
|
||||||
|
# sanitize nx calculation flags string
|
||||||
|
- name: input validation for NX CALCULATION FLAGS
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
export NX_CALCULATION_FLAGS=$(echo $NX_CALCULATION_FLAGS | sed "s@'@@g")
|
||||||
|
echo "NX_CALCULATION_FLAGS=$NX_CALCULATION_FLAGS" >> $GITHUB_ENV
|
||||||
|
- name: link nx executable
|
||||||
|
run: sudo ln -s $(npm bin)/nx /usr/bin/nx
|
||||||
|
shell: bash
|
34
.github/actions/travis-env-var-facade/action.yml
vendored
Normal file
34
.github/actions/travis-env-var-facade/action.yml
vendored
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
# TRAVIS_EVENT_TYPE= Indicates how the build was triggered. One of push, pull_request, api, cron
|
||||||
|
name: "travis-event-type-env-var"
|
||||||
|
description: "Mimic loading of a TRAVIS_EVENT_TYPE env var"
|
||||||
|
|
||||||
|
inputs:
|
||||||
|
event_name:
|
||||||
|
description: "override github.event_name"
|
||||||
|
required: false
|
||||||
|
default: ${{ github.event_name }}
|
||||||
|
runs:
|
||||||
|
using: "composite"
|
||||||
|
steps:
|
||||||
|
- name: Parse env global
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
EVENT_TYPE=""
|
||||||
|
TRAVIS_PULL_REQUEST_BRANCH=""
|
||||||
|
TRAVIS_BRANCH=""
|
||||||
|
REGEX="(repository|workflow)_dispatch"
|
||||||
|
if [[ "${{ inputs.event_name }}" == "schedule" ]]; then
|
||||||
|
EVENT_TYPE="cron";
|
||||||
|
elif [[ "${{ inputs.event_name }}" == "pull_request" ]]; then
|
||||||
|
EVENT_TYPE="pull_request";
|
||||||
|
TRAVIS_BRANCH=${GITHUB_BASE_REF}
|
||||||
|
TRAVIS_PULL_REQUEST_BRANCH=${GITHUB_BASE_REF}
|
||||||
|
elif [[ "${{ inputs.event_name }}" == "push" ]]; then
|
||||||
|
EVENT_TYPE="push";
|
||||||
|
TRAVIS_BRANCH=${GITHUB_REF##*/}
|
||||||
|
elif [[ "${{ inputs.event_name }}" =~ $REGEX ]]; then
|
||||||
|
EVENT_TYPE="api";
|
||||||
|
fi
|
||||||
|
echo "TRAVIS_EVENT_TYPE=${EVENT_TYPE}" >> $GITHUB_ENV
|
||||||
|
echo "TRAVIS_PULL_REQUEST_BRANCH=${TRAVIS_PULL_REQUEST_BRANCH}" >> $GITHUB_ENV
|
||||||
|
echo "TRAVIS_BRANCH=${TRAVIS_BRANCH}" >> $GITHUB_ENV
|
41
.github/actions/upload-build-artifacts/action.yml
vendored
Normal file
41
.github/actions/upload-build-artifacts/action.yml
vendored
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
name: "Upload build artifacts"
|
||||||
|
description: "Upload build artifacts"
|
||||||
|
|
||||||
|
runs:
|
||||||
|
using: "composite"
|
||||||
|
steps:
|
||||||
|
## NXCACHE
|
||||||
|
- name: tar artifacts
|
||||||
|
run: |
|
||||||
|
tar czf dist.tar.gz dist
|
||||||
|
tar czf nxcache.tar.gz nxcache
|
||||||
|
tar czf adf-cli.tar.gz node_modules/@alfresco/adf-cli
|
||||||
|
tar czf adf-testing.tar.gz node_modules/@alfresco/adf-testing
|
||||||
|
shell: bash
|
||||||
|
- name: update NX cache folder
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
continue-on-error: true
|
||||||
|
with:
|
||||||
|
path: nxcache.tar.gz
|
||||||
|
name: nxcache
|
||||||
|
## DIST
|
||||||
|
- name: update Dist folder
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
continue-on-error: true
|
||||||
|
with:
|
||||||
|
path: dist.tar.gz
|
||||||
|
name: dist
|
||||||
|
## ADF CLI
|
||||||
|
- name: save ADF cli
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
continue-on-error: true
|
||||||
|
with:
|
||||||
|
path: adf-cli.tar.gz
|
||||||
|
name: adf-cli
|
||||||
|
## ADF TESTING
|
||||||
|
- name: save ADF testing
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
continue-on-error: true
|
||||||
|
with:
|
||||||
|
path: adf-testing.tar.gz
|
||||||
|
name: adf-testing
|
250
.github/workflows/pull-request.yml
vendored
Normal file
250
.github/workflows/pull-request.yml
vendored
Normal file
@ -0,0 +1,250 @@
|
|||||||
|
name: "pull-request"
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
types: [opened, synchronize, reopened]
|
||||||
|
branches:
|
||||||
|
- develop
|
||||||
|
- master
|
||||||
|
- develop-patch*
|
||||||
|
- master-patch*
|
||||||
|
|
||||||
|
env:
|
||||||
|
BASE_REF: ${{ github.base_ref }}
|
||||||
|
HEAD_REF: ${{ github.head_ref }}
|
||||||
|
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
|
GITHUB_BRANCH: ${{ github.ref_name }}
|
||||||
|
TRAVIS_BUILD_DIR: ${{ github.workspace }}
|
||||||
|
TRAVIS_COMMIT: ${{ github.sha }}
|
||||||
|
BUILD_ID: ${{ github.run_id }}
|
||||||
|
TRAVIS_RUN_NUMBER: ${{ github.run_attempt }}
|
||||||
|
TRAVIS_BUILD_NUMBER: ${{ github.run_id }}
|
||||||
|
JOB_ID: ${{ github.run_id }}
|
||||||
|
PROXY_HOST_BPM: ${{ secrets.E2E_HOST }}
|
||||||
|
E2E_HOST_APA: ${{ secrets.E2E_HOST_APA }}
|
||||||
|
E2E_HOST: ${{ secrets.E2E_HOST }}
|
||||||
|
E2E_USERNAME: ${{ secrets.E2E_ADMIN_EMAIL_IDENTITY }}
|
||||||
|
E2E_PASSWORD: ${{ secrets.E2E_PASSWORD }}
|
||||||
|
E2E_ADMIN_EMAIL_IDENTITY: ${{ secrets.E2E_ADMIN_EMAIL_IDENTITY }}
|
||||||
|
E2E_ADMIN_PASSWORD_IDENTITY: ${{ secrets.E2E_ADMIN_PASSWORD_IDENTITY }}
|
||||||
|
USERNAME_ADF: ${{ secrets.E2E_USERNAME }}
|
||||||
|
PASSWORD_ADF: ${{ secrets.E2E_PASSWORD }}
|
||||||
|
URL_HOST_ADF: ${{ secrets.URL_HOST_ADF }}
|
||||||
|
IDENTITY_ADMIN_EMAIL: ${{ secrets.E2E_ADMIN_EMAIL_IDENTITY }}
|
||||||
|
IDENTITY_ADMIN_PASSWORD: ${{ secrets.E2E_ADMIN_PASSWORD_IDENTITY }}
|
||||||
|
AWS_S3_BUCKET_ACTIVITI_LICENSE: ${{ secrets.AWS_S3_BUCKET_ACTIVITI_LICENSE }}
|
||||||
|
HOST_SSO: ${{ secrets.HOST_SSO }}
|
||||||
|
LOG_LEVEL: "ERROR"
|
||||||
|
E2E_LOG_LEVEL: "ERROR"
|
||||||
|
E2E_MODELER_USERNAME: ${{ secrets.E2E_MODELER_USERNAME }}
|
||||||
|
E2E_MODELER_PASSWORD: ${{ secrets.E2E_MODELER_PASSWORD }}
|
||||||
|
EXTERNAL_ACS_HOST: ${{ secrets.EXTERNAL_ACS_HOST }}
|
||||||
|
E2E_DEVOPS_USERNAME: ${{ secrets.E2E_DEVOPS_USERNAME }}
|
||||||
|
E2E_DEVOPS_PASSWORD: ${{ secrets.E2E_DEVOPS_PASSWORD }}
|
||||||
|
USERNAME_SUPER_ADMIN_ADF: ${{ secrets.USERNAME_SUPER_ADMIN_ADF }}
|
||||||
|
PASSWORD_SUPER_ADMIN_ADF: ${{ secrets.PASSWORD_SUPER_ADMIN_ADF }}
|
||||||
|
HR_USER: ${{ secrets.HR_USER }}
|
||||||
|
HR_USER_PASSWORD: ${{ secrets.HR_USER_PASSWORD }}
|
||||||
|
SMART_RUNNER_PATH: ".protractor-smartrunner"
|
||||||
|
S3_DBP_PATH: ${{ secrets.S3_DBP_PATH }}
|
||||||
|
S3_BUILD_BUCKET: ${{ secrets.S3_BUILD_BUCKET }}
|
||||||
|
NODE_OPTIONS: "--max-old-space-size=5120"
|
||||||
|
jobs:
|
||||||
|
main:
|
||||||
|
# long timeout required when cache has to be recreated
|
||||||
|
timeout-minutes: 30
|
||||||
|
name: "Build Components"
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
fetch-depth: 0 # Fetch all history for all tags and branches
|
||||||
|
- uses: ./.github/actions/setup
|
||||||
|
- name: install
|
||||||
|
run: |
|
||||||
|
npm ci
|
||||||
|
nx run cli:bundle
|
||||||
|
nx run testing:bundle
|
||||||
|
- name: nx affected lint
|
||||||
|
run: nx affected --target=lint $NX_CALCULATION_FLAGS
|
||||||
|
- run: /usr/bin/xvfb-run --auto-servernum nx affected:test $NX_CALCULATION_FLAGS --exclude="insights,core,extensions,process-services,process-services-cloud" }}
|
||||||
|
- run: nx affected:build $NX_CALCULATION_FLAGS --prod
|
||||||
|
- run: nx run stories:build-storybook --configuration ci
|
||||||
|
- uses: ./.github/actions/upload-build-artifacts
|
||||||
|
|
||||||
|
playwright:
|
||||||
|
timeout-minutes: 20
|
||||||
|
name: "playwright"
|
||||||
|
needs: [main]
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
fetch-depth: 0 # Fetch all history for all
|
||||||
|
- uses: ./.github/actions/setup
|
||||||
|
- name: clean
|
||||||
|
run: |
|
||||||
|
rm -rf nxcache
|
||||||
|
rm -rf dist
|
||||||
|
- uses: ./.github/actions/download-build-artifacts
|
||||||
|
- name: build adf cli
|
||||||
|
run: |
|
||||||
|
nx build cli
|
||||||
|
- name: Process Cloud Storybook Playwright
|
||||||
|
run: |
|
||||||
|
npx playwright install chromium
|
||||||
|
sudo sysctl -w fs.inotify.max_user_watches=524288
|
||||||
|
npx playwright test --config='e2e-playwright/playwright.config.ts' || exit 1
|
||||||
|
- uses: ./.github/actions/upload-build-artifacts
|
||||||
|
|
||||||
|
e2e:
|
||||||
|
timeout-minutes: 90
|
||||||
|
name: "e2e: ${{ matrix.e2e-test.description }}"
|
||||||
|
needs: [playwright]
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
# max-parallel: 4
|
||||||
|
matrix:
|
||||||
|
e2e-test:
|
||||||
|
- description: "Core"
|
||||||
|
test-id: "core"
|
||||||
|
folder: "core"
|
||||||
|
provider: "ALL"
|
||||||
|
auth: "OAUTH"
|
||||||
|
check-cs-env: "true"
|
||||||
|
check-ps-env: "true"
|
||||||
|
- description: "Content: Components"
|
||||||
|
test-id: "content-services"
|
||||||
|
folder: "content-services/components"
|
||||||
|
provider: "ECM"
|
||||||
|
auth: "BASIC"
|
||||||
|
check-cs-env: "true"
|
||||||
|
- description: "Content: Directives"
|
||||||
|
test-id: "content-services"
|
||||||
|
folder: "content-services/directives"
|
||||||
|
provider: "ECM"
|
||||||
|
auth: "BASIC"
|
||||||
|
check-cs-env: "true"
|
||||||
|
- description: "Content: Document List"
|
||||||
|
test-id: "content-services"
|
||||||
|
folder: "content-services/document-list"
|
||||||
|
provider: "ECM"
|
||||||
|
auth: "BASIC"
|
||||||
|
check-cs-env: "true"
|
||||||
|
- description: "Content: Metadata"
|
||||||
|
test-id: "content-services"
|
||||||
|
folder: "content-services/metadata"
|
||||||
|
provider: "ECM"
|
||||||
|
auth: "BASIC"
|
||||||
|
check-cs-env: "true"
|
||||||
|
- description: "Content: Upload and Versioning"
|
||||||
|
test-id: "content-services browser"
|
||||||
|
folder: "content-services/upload"
|
||||||
|
provider: "ECM"
|
||||||
|
auth: "BASIC"
|
||||||
|
check-cs-env: "true"
|
||||||
|
- description: "Search"
|
||||||
|
test-id: "content-services"
|
||||||
|
folder: "search"
|
||||||
|
provider: "ECM"
|
||||||
|
auth: "BASIC"
|
||||||
|
check-cs-env: "true"
|
||||||
|
- description: "Process: Form"
|
||||||
|
test-id: "process-services"
|
||||||
|
folder: "process-services/form"
|
||||||
|
provider: "BPM"
|
||||||
|
auth: "OAUTH"
|
||||||
|
check-ps-env: "true"
|
||||||
|
check-external-cs-env: "true"
|
||||||
|
- description: "Process: Process"
|
||||||
|
test-id: "process-services"
|
||||||
|
folder: "process-services/process"
|
||||||
|
provider: "BPM"
|
||||||
|
auth: "OAUTH"
|
||||||
|
check-ps-env: "true"
|
||||||
|
check-external-cs-env: "true"
|
||||||
|
- description: "Process: Tasks"
|
||||||
|
test-id: "process-services"
|
||||||
|
folder: "process-services/tasks"
|
||||||
|
provider: "BPM"
|
||||||
|
auth: "OAUTH"
|
||||||
|
check-ps-env: "true"
|
||||||
|
check-external-cs-env: "true"
|
||||||
|
- description: "Process: Widget"
|
||||||
|
test-id: "process-services"
|
||||||
|
folder: "process-services/widgets"
|
||||||
|
provider: "BPM"
|
||||||
|
auth: "OAUTH"
|
||||||
|
check-ps-env: "true"
|
||||||
|
check-external-cs-env: "true"
|
||||||
|
- description: "Process Cloud: Form"
|
||||||
|
test-id: "process-services-cloud"
|
||||||
|
folder: "process-services-cloud/form-field"
|
||||||
|
provider: "ALL"
|
||||||
|
auth: "OAUTH"
|
||||||
|
apa-proxy: true
|
||||||
|
check-cs-env: "true"
|
||||||
|
check-ps-cloud-env: "true"
|
||||||
|
- description: "Process Cloud: People"
|
||||||
|
test-id: "process-services-cloud"
|
||||||
|
folder: "process-services-cloud/people"
|
||||||
|
provider: "ALL"
|
||||||
|
auth: "OAUTH"
|
||||||
|
apa-proxy: true
|
||||||
|
check-cs-env: "true"
|
||||||
|
check-ps-cloud-env: "true"
|
||||||
|
- description: "Process Cloud: Process"
|
||||||
|
test-id: "process-services-cloud"
|
||||||
|
folder: "process-services-cloud/process"
|
||||||
|
provider: "ALL"
|
||||||
|
auth: "OAUTH"
|
||||||
|
apa-proxy: true
|
||||||
|
check-cs-env: "true"
|
||||||
|
check-ps-cloud-env: "true"
|
||||||
|
- description: "Process Cloud: Start Task"
|
||||||
|
test-id: "process-services-cloud"
|
||||||
|
folder: "process-services-cloud/start-task"
|
||||||
|
provider: "ALL"
|
||||||
|
auth: "OAUTH"
|
||||||
|
apa-proxy: true
|
||||||
|
check-cs-env: "true"
|
||||||
|
check-ps-cloud-env: "true"
|
||||||
|
- description: "Process Cloud: Tasks List"
|
||||||
|
test-id: "process-services-cloud"
|
||||||
|
folder: "process-services-cloud/task-list"
|
||||||
|
provider: "ALL"
|
||||||
|
auth: "OAUTH"
|
||||||
|
apa-proxy: true
|
||||||
|
check-cs-env: "true"
|
||||||
|
check-ps-cloud-env: "true"
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
fetch-depth: 0 # Fetch all history for all
|
||||||
|
- uses: ./.github/actions/setup
|
||||||
|
with:
|
||||||
|
enable-cache: "true"
|
||||||
|
- name: clean
|
||||||
|
run: |
|
||||||
|
rm -rf nxcache
|
||||||
|
rm -rf dist
|
||||||
|
- uses: ./.github/actions/download-build-artifacts
|
||||||
|
- name: e2e
|
||||||
|
uses: ./.github/actions/e2e
|
||||||
|
with:
|
||||||
|
e2e-test-id: ${{ matrix.e2e-test.test-id }}
|
||||||
|
e2e-test-folder: ${{ matrix.e2e-test.folder }}
|
||||||
|
e2e-test-provider: ${{ matrix.e2e-test.provider }}
|
||||||
|
e2e-test-auth: ${{ matrix.e2e-test.auth }}
|
||||||
|
check-cs-env: ${{ matrix.e2e-test.check-cs-env }}
|
||||||
|
check-ps-env: ${{ matrix.e2e-test.check-ps-env }}
|
||||||
|
check-ps-cloud-env: ${{ matrix.e2e-test.check-ps-cloud-env }}
|
||||||
|
check-external-cs-env: ${{ matrix.e2e-test.check-external-cs-env }}
|
||||||
|
apa-proxy: ${{ matrix.e2e-test.apa-proxy }}
|
||||||
|
#
|
173
.github/workflows/release.yml
vendored
Normal file
173
.github/workflows/release.yml
vendored
Normal file
@ -0,0 +1,173 @@
|
|||||||
|
name: "release"
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
types: [closed]
|
||||||
|
branches:
|
||||||
|
- develop
|
||||||
|
- master
|
||||||
|
- develop-patch*
|
||||||
|
- master-patch*
|
||||||
|
|
||||||
|
|
||||||
|
env:
|
||||||
|
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
|
GITHUB_BRANCH: ${{ github.ref_name }}
|
||||||
|
TRAVIS_BUILD_DIR: ${{ github.workspace }}
|
||||||
|
TRAVIS_COMMIT: ${{ github.sha }}
|
||||||
|
BUILD_ID: ${{ github.run_id }}
|
||||||
|
TRAVIS_RUN_NUMBER: ${{ github.run_attempt }}
|
||||||
|
TRAVIS_BUILD_NUMBER: github-run-${{ github.run_id }}
|
||||||
|
JOB_ID: ${{ github.run_id }}
|
||||||
|
PROXY_HOST_BPM: ${{ secrets.E2E_HOST }}
|
||||||
|
E2E_HOST_APA: ${{ secrets.E2E_HOST_APA }}
|
||||||
|
E2E_HOST: ${{ secrets.E2E_HOST }}
|
||||||
|
E2E_USERNAME: ${{ secrets.E2E_ADMIN_EMAIL_IDENTITY }}
|
||||||
|
E2E_PASSWORD: ${{ secrets.E2E_PASSWORD }}
|
||||||
|
E2E_ADMIN_EMAIL_IDENTITY: ${{ secrets.E2E_ADMIN_EMAIL_IDENTITY }}
|
||||||
|
E2E_ADMIN_PASSWORD_IDENTITY: ${{ secrets.E2E_ADMIN_PASSWORD_IDENTITY }}
|
||||||
|
USERNAME_ADF: ${{ secrets.E2E_USERNAME }}
|
||||||
|
PASSWORD_ADF: ${{ secrets.E2E_PASSWORD }}
|
||||||
|
URL_HOST_ADF: ${{ secrets.URL_HOST_ADF }}
|
||||||
|
IDENTITY_ADMIN_EMAIL: ${{ secrets.E2E_ADMIN_EMAIL_IDENTITY }}
|
||||||
|
IDENTITY_ADMIN_PASSWORD: ${{ secrets.E2E_ADMIN_PASSWORD_IDENTITY }}
|
||||||
|
AWS_S3_BUCKET_ACTIVITI_LICENSE: ${{ secrets.AWS_S3_BUCKET_ACTIVITI_LICENSE }}
|
||||||
|
HOST_SSO: ${{ secrets.HOST_SSO }}
|
||||||
|
LOG_LEVEL: "ERROR"
|
||||||
|
E2E_LOG_LEVEL: "ERROR"
|
||||||
|
E2E_MODELER_USERNAME: ${{ secrets.E2E_MODELER_USERNAME }}
|
||||||
|
E2E_MODELER_PASSWORD: ${{ secrets.E2E_MODELER_PASSWORD }}
|
||||||
|
EXTERNAL_ACS_HOST: ${{ secrets.EXTERNAL_ACS_HOST }}
|
||||||
|
E2E_DEVOPS_USERNAME: ${{ secrets.E2E_DEVOPS_USERNAME }}
|
||||||
|
E2E_DEVOPS_PASSWORD: ${{ secrets.E2E_DEVOPS_PASSWORD }}
|
||||||
|
USERNAME_SUPER_ADMIN_ADF: ${{ secrets.USERNAME_SUPER_ADMIN_ADF }}
|
||||||
|
PASSWORD_SUPER_ADMIN_ADF: ${{ secrets.PASSWORD_SUPER_ADMIN_ADF }}
|
||||||
|
HR_USER: ${{ secrets.HR_USER }}
|
||||||
|
HR_USER_PASSWORD: ${{ secrets.HR_USER_PASSWORD }}
|
||||||
|
NPM_REGISTRY_ADDRESS: ${{ secrets.NPM_REGISTRY_ADDRESS }}
|
||||||
|
NPM_REGISTRY_TOKEN: ${{ secrets.NPM_REGISTRY_TOKEN }}
|
||||||
|
DOCKER_REPOSITORY_USER: ${{ secrets.DOCKER_REPOSITORY_USER }}
|
||||||
|
DOCKER_REPOSITORY_PASSWORD: ${{ secrets.DOCKER_REPOSITORY_PASSWORD }}
|
||||||
|
DOCKER_REPOSITORY_DOMAIN: ${{ secrets.DOCKER_REPOSITORY_DOMAIN }}
|
||||||
|
DOCKER_REPOSITORY_STORYBOOK: "${{ secrets.DOCKER_REPOSITORY_DOMAIN }}/alfresco/storybook"
|
||||||
|
DOCKER_REPOSITORY: "${{ secrets.DOCKER_REPOSITORY_DOMAIN }}/alfresco/demo-shell"
|
||||||
|
NODE_OPTIONS: "--max-old-space-size=5120"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
timeout-minutes: 20
|
||||||
|
if: github.event.pull_request.merged == true
|
||||||
|
name: "Build Components"
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- uses: ./.github/actions/setup
|
||||||
|
- run: npm -v
|
||||||
|
- name: install
|
||||||
|
run: |
|
||||||
|
npm ci
|
||||||
|
nx run cli:bundle
|
||||||
|
nx run testing:bundle
|
||||||
|
- run: npx nx affected --target=lint $NX_CALCULATION_FLAGS
|
||||||
|
- run: nx affected:build $NX_CALCULATION_FLAGS --prod --exclude="demoshell"
|
||||||
|
- run: nx $NX_CALCULATION_FLAGS build demoshell --configuration production
|
||||||
|
- run: nx build demoshell --configuration production
|
||||||
|
- uses: ./.github/actions/upload-build-artifacts
|
||||||
|
|
||||||
|
push-demoshell:
|
||||||
|
needs: [build]
|
||||||
|
timeout-minutes: 10
|
||||||
|
if: github.event.pull_request.merged == true
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
fetch-depth: 1
|
||||||
|
- run: git fetch --all
|
||||||
|
- uses: ./.github/actions/setup
|
||||||
|
- name: clean
|
||||||
|
run: |
|
||||||
|
rm -rf nxcache
|
||||||
|
rm -rf dist
|
||||||
|
- uses: ./.github/actions/download-build-artifacts
|
||||||
|
- run: nx run cli:bundle
|
||||||
|
- name: push Demoshell docker image
|
||||||
|
run: |
|
||||||
|
nx build demoshell --configuration production
|
||||||
|
. ./scripts/travis/release/docker-tag.sh
|
||||||
|
echo $TAGS
|
||||||
|
set -u;
|
||||||
|
./scripts/travis/release/release-demoshell-docker.sh
|
||||||
|
|
||||||
|
push-storybook:
|
||||||
|
needs: [build]
|
||||||
|
timeout-minutes: 10
|
||||||
|
if: github.event.pull_request.merged == true
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- uses: ./.github/actions/setup
|
||||||
|
- name: clean
|
||||||
|
run: |
|
||||||
|
rm -rf nxcache
|
||||||
|
rm -rf dist
|
||||||
|
- uses: ./.github/actions/download-build-artifacts
|
||||||
|
- run: nx run cli:bundle
|
||||||
|
- name: push Storybook docker image
|
||||||
|
run: |
|
||||||
|
nx run stories:build-storybook --configuration ci
|
||||||
|
. ./scripts/travis/release/docker-tag.sh
|
||||||
|
echo $TAGS
|
||||||
|
set -u;
|
||||||
|
./scripts/travis/release/release-storybook-docker.sh
|
||||||
|
|
||||||
|
npm-release:
|
||||||
|
needs: [build]
|
||||||
|
timeout-minutes: 10
|
||||||
|
if: github.event.pull_request.merged == true
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- uses: ./.github/actions/setup
|
||||||
|
- name: clean
|
||||||
|
run: |
|
||||||
|
rm -rf nxcache
|
||||||
|
rm -rf dist
|
||||||
|
- uses: ./.github/actions/download-build-artifacts
|
||||||
|
- run: nx run cli:bundle
|
||||||
|
- name: nx build
|
||||||
|
run: |
|
||||||
|
set -u;
|
||||||
|
./scripts/travis/build/bumpversion.sh
|
||||||
|
nx affected:build $NX_CALCULATION_FLAGS --prod --exclude="demoshell"
|
||||||
|
nx affected $NX_CALCULATION_FLAGS --target=pretheme
|
||||||
|
- name: release
|
||||||
|
run: ./scripts/travis/release/release-npm.sh
|
||||||
|
|
||||||
|
|
||||||
|
npm-check-bundle:
|
||||||
|
needs: [npm-release]
|
||||||
|
timeout-minutes: 10
|
||||||
|
if: github.event.pull_request.merged == true
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- uses: ./.github/actions/download-build-artifacts
|
||||||
|
- name: Check npm bundle
|
||||||
|
run: |
|
||||||
|
export ADF_VERSION=$(npm view @alfresco/adf-core@${TAG_NPM} version)
|
||||||
|
./scripts/travis/build/npm-check-bundles.sh -v ${ADF_VERSION}
|
@ -44,11 +44,11 @@ async function uploadScreenshot(retryCount, suffixFileName) {
|
|||||||
|
|
||||||
suffixFileName = suffixFileName.replace(/\//g, '-');
|
suffixFileName = suffixFileName.replace(/\//g, '-');
|
||||||
|
|
||||||
fs.renameSync(path.resolve(__dirname, '../../e2e-output/'), path.resolve(__dirname, `../../e2e-output-${retryCount}/`))
|
fs.renameSync(path.resolve(__dirname, '../../e2e-output/'), path.resolve(__dirname, `../../e2e-output-${retryCount}-${process.env.GH_ACTION_RETRY_COUNT}/`))
|
||||||
|
|
||||||
const child_process = require("child_process");
|
const child_process = require("child_process");
|
||||||
child_process.execSync(` tar -czvf ../e2e-result-${suffixFileName}-${retryCount}.tar .`, {
|
child_process.execSync(` tar -czvf ../e2e-result-${suffixFileName}-${retryCount}.tar .`, {
|
||||||
cwd: path.resolve(__dirname, `../../e2e-output-${retryCount}/`)
|
cwd: path.resolve(__dirname, `../../e2e-output-${retryCount}-${process.env.GH_ACTION_RETRY_COUNT}/`)
|
||||||
});
|
});
|
||||||
|
|
||||||
let pathFile = path.join(__dirname, `../../e2e-result-${suffixFileName}-${retryCount}.tar`);
|
let pathFile = path.join(__dirname, `../../e2e-result-${suffixFileName}-${retryCount}.tar`);
|
||||||
|
@ -31,6 +31,8 @@ enum TARGETS {
|
|||||||
const DOCKER_FILENAME = 'Dockerfile';
|
const DOCKER_FILENAME = 'Dockerfile';
|
||||||
export interface PublishArgs {
|
export interface PublishArgs {
|
||||||
tag?: string;
|
tag?: string;
|
||||||
|
dryrun?: boolean;
|
||||||
|
verbose?: boolean;
|
||||||
loginCheck?: boolean;
|
loginCheck?: boolean;
|
||||||
loginUsername?: string;
|
loginUsername?: string;
|
||||||
loginPassword?: string;
|
loginPassword?: string;
|
||||||
@ -44,7 +46,7 @@ export interface PublishArgs {
|
|||||||
|
|
||||||
function loginPerform(args: PublishArgs) {
|
function loginPerform(args: PublishArgs) {
|
||||||
logger.info(`Perform docker login...${args.loginRepo}`);
|
logger.info(`Perform docker login...${args.loginRepo}`);
|
||||||
const loginDockerRes = exec('docker', ['login', `-u=${args.loginUsername}`, `-p=${args.loginPassword}`, `${args.loginRepo}`], {});
|
const loginDockerRes = exec('docker', ['login', `-u=${args.loginUsername}`, `-p=${args.loginPassword}`, `${args.loginRepo}`]);
|
||||||
logger.info(loginDockerRes);
|
logger.info(loginDockerRes);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -53,14 +55,16 @@ function buildImagePerform(args: PublishArgs, tag: string) {
|
|||||||
|
|
||||||
const buildArgs = [];
|
const buildArgs = [];
|
||||||
|
|
||||||
if (typeof args.buildArgs === 'string') {
|
if (typeof args.buildArgs === 'string') {
|
||||||
buildArgs.push(`--build-arg=${args.buildArgs}`);
|
buildArgs.push(`--build-arg=${args.buildArgs}`);
|
||||||
} else {
|
} else {
|
||||||
args.buildArgs.forEach((envVar) => {
|
args.buildArgs.forEach((envVar) => {
|
||||||
buildArgs.push (`--build-arg=${envVar}`);
|
buildArgs.push(`--build-arg=${envVar}`);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
if (args.verbose) {
|
||||||
|
logger.info(`Dry-run Perform docker build -t=${args.dockerRepo}:${tag} ${buildArgs} -f=${args.fileName} ${args.pathProject}`);
|
||||||
|
}
|
||||||
const response = exec('docker', ['build', `-t=${args.dockerRepo}:${tag}`, ...buildArgs, `-f=${args.fileName}`, args.pathProject], {});
|
const response = exec('docker', ['build', `-t=${args.dockerRepo}:${tag}`, ...buildArgs, `-f=${args.fileName}`, args.pathProject], {});
|
||||||
logger.info(response);
|
logger.info(response);
|
||||||
}
|
}
|
||||||
@ -78,9 +82,13 @@ function pullImagePerform(dockerRepo: string, sourceTag: string) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function pushImagePerform(args: PublishArgs, tag: string) {
|
function pushImagePerform(args: PublishArgs, tag: string) {
|
||||||
logger.info(`Perform docker push... ${args.dockerRepo}:${tag}`);
|
if (args.dryrun) {
|
||||||
const response = exec('docker', ['push', `${args.dockerRepo}:${tag}`], {});
|
logger.info(`Dry-run Perform docker push... ${args.dockerRepo}:${tag}`);
|
||||||
logger.info(response);
|
} else {
|
||||||
|
logger.info(`Perform docker push... ${args.dockerRepo}:${tag}`);
|
||||||
|
const response = exec('docker', ['push', `${args.dockerRepo}:${tag}`], {});
|
||||||
|
logger.info(response);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function cleanImagePerform(args: PublishArgs, tag: string) {
|
function cleanImagePerform(args: PublishArgs, tag: string) {
|
||||||
@ -101,6 +109,8 @@ function main(args) {
|
|||||||
.option('--loginRepo [type]', 'URL registry')
|
.option('--loginRepo [type]', 'URL registry')
|
||||||
.option('--loginPassword [type]', ' password')
|
.option('--loginPassword [type]', ' password')
|
||||||
.option('--loginUsername [type]', ' username')
|
.option('--loginUsername [type]', ' username')
|
||||||
|
.option('--dryrun [type]', 'dryrun')
|
||||||
|
.option('--verbose [type]', 'verbose')
|
||||||
.option('--loginCheck [type]', 'perform login')
|
.option('--loginCheck [type]', 'perform login')
|
||||||
.option('--pathProject [type]', 'the path build context')
|
.option('--pathProject [type]', 'the path build context')
|
||||||
.option('--sourceTag [type]', 'sourceTag')
|
.option('--sourceTag [type]', 'sourceTag')
|
||||||
|
@ -28,6 +28,7 @@ export interface PublishArgs {
|
|||||||
npmRegistry?: string;
|
npmRegistry?: string;
|
||||||
tokenRegistry?: string;
|
tokenRegistry?: string;
|
||||||
pathProject: string;
|
pathProject: string;
|
||||||
|
dryrun?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
const projects = [
|
const projects = [
|
||||||
@ -42,6 +43,10 @@ const projects = [
|
|||||||
];
|
];
|
||||||
|
|
||||||
async function npmPublish(args: PublishArgs, project: string) {
|
async function npmPublish(args: PublishArgs, project: string) {
|
||||||
|
if (args.dryrun) {
|
||||||
|
logger.info(`Dry run mode, no publish will be done`);
|
||||||
|
}
|
||||||
|
|
||||||
if (args.npmRegistry) {
|
if (args.npmRegistry) {
|
||||||
changeRegistry(args, project);
|
changeRegistry(args, project);
|
||||||
}
|
}
|
||||||
@ -57,10 +62,14 @@ async function npmPublish(args: PublishArgs, project: string) {
|
|||||||
options.push('-tag');
|
options.push('-tag');
|
||||||
options.push(`${args.tag}`);
|
options.push(`${args.tag}`);
|
||||||
}
|
}
|
||||||
const response = exec('npm', options, { cwd: path.resolve(`${args.pathProject}/dist/libs/${project}`) });
|
if (args.dryrun) {
|
||||||
logger.info(response);
|
logger.info(`Dry-run npm publish. cwd: ${args.pathProject}/dist/libs/${project}`);
|
||||||
if (args.npmRegistry) {
|
} else {
|
||||||
removeNpmConfig(args, project);
|
const response = exec('npm', options, { cwd: path.resolve(`${args.pathProject}/dist/libs/${project}`) });
|
||||||
|
logger.info(response);
|
||||||
|
if (args.npmRegistry) {
|
||||||
|
removeNpmConfig(args, project);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
await sleep(30000);
|
await sleep(30000);
|
||||||
@ -72,8 +81,12 @@ async function npmPublish(args: PublishArgs, project: string) {
|
|||||||
|
|
||||||
function npmCheckExist(project: string, version: string) {
|
function npmCheckExist(project: string, version: string) {
|
||||||
logger.info(`Check if lib ${project} is already in npm with version ${version}`);
|
logger.info(`Check if lib ${project} is already in npm with version ${version}`);
|
||||||
|
let exist = '';
|
||||||
const exist = exec(`npm`, [`view`, `@alfresco/adf-${project}@${version} version`] );
|
try {
|
||||||
|
exist = exec(`npm`, [`view`, `@alfresco/adf-${project}@${version} version`]);
|
||||||
|
} catch (e) {
|
||||||
|
logger.info(`Error: '@alfresco/adf-${project}@${version} version' is not available `);
|
||||||
|
}
|
||||||
|
|
||||||
return exist !== '';
|
return exist !== '';
|
||||||
}
|
}
|
||||||
@ -117,6 +130,7 @@ async function main(args) {
|
|||||||
.option('--npmRegistry [type]', 'npm Registry')
|
.option('--npmRegistry [type]', 'npm Registry')
|
||||||
.option('--tokenRegistry [type]', 'token Registry')
|
.option('--tokenRegistry [type]', 'token Registry')
|
||||||
.option('--pathProject [type]', 'pathProject')
|
.option('--pathProject [type]', 'pathProject')
|
||||||
|
.option('--dryrun [type]', 'dryrun')
|
||||||
.parse(process.argv);
|
.parse(process.argv);
|
||||||
|
|
||||||
if (process.argv.includes('-h') || process.argv.includes('--help')) {
|
if (process.argv.includes('-h') || process.argv.includes('--help')) {
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
# Upload protractor-smartrunner artifact related to this particular job to S3
|
# Upload protractor-smartrunner artifact related to this particular job to S3
|
||||||
./scripts/ci/utils/artifact-to-s3.sh -a "$SMART_RUNNER_DIRECTORY" -o "$S3_SMART_RUNNER_PATH/$TRAVIS_JOB_ID.tar.bz2"
|
./scripts/ci/utils/artifact-to-s3.sh -a "$SMART_RUNNER_DIRECTORY" -o "$S3_SMART_RUNNER_PATH/$JOB_ID.tar.bz2"
|
||||||
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
# Download protractor-smartrunner artifact related to this particular job from S3, if exists
|
# Download protractor-smartrunner artifact related to this particular job from S3, if exists
|
||||||
./scripts/ci/utils/artifact-from-s3.sh -a "$S3_SMART_RUNNER_PATH/$TRAVIS_JOB_ID.tar.bz2" -o "$SMART_RUNNER_DIRECTORY"
|
./scripts/ci/utils/artifact-from-s3.sh -a "$S3_SMART_RUNNER_PATH/$JOB_ID.tar.bz2" -o "$SMART_RUNNER_DIRECTORY"
|
||||||
|
|
||||||
# The adf-testing is not installed through NPM for this reason it needs to be built
|
# The adf-testing is not installed through NPM for this reason it needs to be built
|
||||||
# in addition the dist folder needs to be moved as part of the node modules
|
# in addition the dist folder needs to be moved as part of the node modules
|
||||||
|
@ -7,18 +7,17 @@
|
|||||||
# is supposed to be in the "install.sh" hook script.
|
# is supposed to be in the "install.sh" hook script.
|
||||||
# ===================================================================
|
# ===================================================================
|
||||||
PARENT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )/.." && pwd )"
|
PARENT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )/.." && pwd )"
|
||||||
|
ENV_FILE=${1:-"/tmp/github_vars.env"}
|
||||||
|
|
||||||
# Settings for protractor-smartrunner -------------------------------------------------
|
# Settings for protractor-smartrunner -------------------------------------------------
|
||||||
export GIT_HASH=`git rev-parse HEAD`
|
export GIT_HASH=$(git rev-parse HEAD)
|
||||||
|
|
||||||
# Node settings
|
|
||||||
export NODE_OPTIONS="--max_old_space_size=30000"
|
|
||||||
|
|
||||||
# Settings for Nx ---------------------------------------------------------------------
|
# Settings for Nx ---------------------------------------------------------------------
|
||||||
export BASE_HASH="$(git merge-base origin/$TRAVIS_BRANCH HEAD)"
|
export BASE_HASH="$(git merge-base origin/"$TRAVIS_BRANCH" HEAD)"
|
||||||
export HEAD_HASH="HEAD"
|
export HEAD_HASH="HEAD"
|
||||||
export HEAD_COMMIT_HASH=${TRAVIS_PULL_REQUEST_SHA:-${TRAVIS_COMMIT}}
|
export HEAD_COMMIT_HASH=${TRAVIS_PULL_REQUEST_SHA:-${TRAVIS_COMMIT}}
|
||||||
export COMMIT_MESSAGE=`git log --format=%B -n 1 $HEAD_COMMIT_HASH`
|
export COMMIT_MESSAGE=$(git log --format=%B -n 1 "$HEAD_COMMIT_HASH")
|
||||||
|
|
||||||
#########################################################################################
|
#########################################################################################
|
||||||
# Settings based of Travis event type
|
# Settings based of Travis event type
|
||||||
@ -36,7 +35,7 @@ if [ "${TRAVIS_EVENT_TYPE}" == "push" ]; then
|
|||||||
export BUILD_OPTS="--configuration production"
|
export BUILD_OPTS="--configuration production"
|
||||||
else
|
else
|
||||||
# into develop
|
# into develop
|
||||||
export NX_CALCULATION_FLAGS="--base=$(git describe --tags `git rev-list --tags --max-count=1`) --head=$HEAD_HASH"
|
export NX_CALCULATION_FLAGS="--base=$(git describe --tags $(git rev-list --tags --max-count=1)) --head=$HEAD_HASH"
|
||||||
export BUILD_OPTS="--configuration production"
|
export BUILD_OPTS="--configuration production"
|
||||||
fi
|
fi
|
||||||
elif [ "${TRAVIS_EVENT_TYPE}" == "pull_request" ]; then
|
elif [ "${TRAVIS_EVENT_TYPE}" == "pull_request" ]; then
|
||||||
@ -63,7 +62,7 @@ if [ "${TRAVIS_EVENT_TYPE}" == "push" ]; then
|
|||||||
elif [ "${TRAVIS_EVENT_TYPE}" == "pull_request" ]; then
|
elif [ "${TRAVIS_EVENT_TYPE}" == "pull_request" ]; then
|
||||||
echo "pull_request"
|
echo "pull_request"
|
||||||
export BASE_HASH="origin/$TRAVIS_BRANCH"
|
export BASE_HASH="origin/$TRAVIS_BRANCH"
|
||||||
source $PARENT_DIR/partials/_ci-flags-parser.sh
|
source "$PARENT_DIR/partials/_ci-flags-parser.sh"
|
||||||
elif [ "${TRAVIS_EVENT_TYPE}" == "cron" ]; then
|
elif [ "${TRAVIS_EVENT_TYPE}" == "cron" ]; then
|
||||||
echo "cron"
|
echo "cron"
|
||||||
else
|
else
|
||||||
@ -71,23 +70,29 @@ else
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# Cache for protractor smart-runner
|
# Cache for protractor smart-runner
|
||||||
export S3_SMART_RUNNER_PATH="$S3_DBP_PATH/smart-runner/$TRAVIS_BUILD_ID"
|
export S3_SMART_RUNNER_PATH="$S3_DBP_PATH/smart-runner/$BUILD_ID"
|
||||||
|
|
||||||
# Cache for node_modules
|
# Cache for node_modules
|
||||||
export NODE_VERSION=`node -v`
|
export NODE_VERSION=$(node -v)
|
||||||
export PACKAGE_LOCK_SHASUM=`shasum ./package-lock.json | cut -f 1 -d " "`
|
export PACKAGE_LOCK_SHASUM=$(shasum ./package-lock.json | cut -f 1 -d " ")
|
||||||
# This can change regardless of package-lock.json, so we need to calculate with this one as well
|
# This can change regardless of package-lock.json, so we need to calculate with this one as well
|
||||||
export S3_NODE_MODULES_CACHE_ID=`echo $NODE_VERSION-$PACKAGE_LOCK_SHASUM | shasum | cut -f 1 -d " "`
|
export S3_NODE_MODULES_CACHE_ID=$(echo "$NODE_VERSION-$PACKAGE_LOCK_SHASUM" | shasum | cut -f 1 -d " ")
|
||||||
export S3_NODE_MODULES_CACHE_PATH="$S3_DBP_PATH/cache/node_modules/$S3_NODE_MODULES_CACHE_ID.tar.bz2"
|
export S3_NODE_MODULES_CACHE_PATH="$S3_DBP_PATH/cache/node_modules/$S3_NODE_MODULES_CACHE_ID.tar.bz2"
|
||||||
|
|
||||||
echo "========== Caching settings =========="
|
echo "========== Caching settings =========="
|
||||||
echo "S3_SMART_RUNNER_PATH: $S3_SMART_RUNNER_PATH"
|
echo "S3_SMART_RUNNER_PATH=$S3_SMART_RUNNER_PATH" | tee -a $ENV_FILE
|
||||||
echo "PACKAGE_LOCK_SHASUM: $PACKAGE_LOCK_SHASUM"
|
echo "PACKAGE_LOCK_SHASUM=$PACKAGE_LOCK_SHASUM" | tee -a $ENV_FILE
|
||||||
echo "NODE_VERSION: $NODE_VERSION"
|
echo "NODE_VERSION=$NODE_VERSION" | tee -a $ENV_FILE
|
||||||
echo "S3_NODE_MODULES_CACHE_ID: $S3_NODE_MODULES_CACHE_ID"
|
echo "S3_NODE_MODULES_CACHE_ID=$S3_NODE_MODULES_CACHE_ID" | tee -a $ENV_FILE
|
||||||
echo "S3_NODE_MODULES_CACHE_PATH: $S3_NODE_MODULES_CACHE_PATH"
|
echo "S3_NODE_MODULES_CACHE_PATH=$S3_NODE_MODULES_CACHE_PATH" | tee -a $ENV_FILE
|
||||||
echo "========== Nx settings =========="
|
echo "========== Nx settings =========="
|
||||||
echo "GIT_HASH: $GIT_HASH"
|
echo "GIT_HASH=$GIT_HASH" | tee -a $ENV_FILE
|
||||||
echo "BASE_HASH: $BASE_HASH"
|
echo "BASE_HASH=$BASE_HASH" | tee -a $ENV_FILE
|
||||||
echo "HEAD_HASH: $HEAD_HASH"
|
echo "HEAD_HASH=$HEAD_HASH" | tee -a $ENV_FILE
|
||||||
|
|
||||||
|
|
||||||
|
echo "========== Build vars=========="
|
||||||
|
echo "BUILD_OPTS=$BUILD_OPTS" | tee -a $ENV_FILE
|
||||||
|
echo "NX_CALCULATION_FLAGS=$NX_CALCULATION_FLAGS" | tee -a $ENV_FILE
|
||||||
|
echo "HEAD_COMMIT_HASH=$HEAD_COMMIT_HASH" | tee -a $ENV_FILE
|
||||||
|
echo "COMMIT_MESSAGE=$COMMIT_MESSAGE" | tee -a $ENV_FILE
|
||||||
|
@ -12,6 +12,17 @@
|
|||||||
# git commit -m "[affected:project-name1,project-name2] you commit message"
|
# git commit -m "[affected:project-name1,project-name2] you commit message"
|
||||||
#
|
#
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------
|
||||||
|
# Affected projects calculation
|
||||||
|
# ---------------------------------------------------------------
|
||||||
|
if [[ $COMMIT_MESSAGE == *"[affected:"* ]]; then
|
||||||
|
PROJECTS=$(echo "$COMMIT_MESSAGE" | grep -o "\[affected\:[^]]*\]" | sed -e 's#\[affected:##g' | sed -e 's#\]##g')
|
||||||
|
|
||||||
|
if [[ $PROJECTS == "*" ]]; then
|
||||||
|
export NX_CALCULATION_FLAGS="--all"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
# ---------------------------------------------------------------
|
# ---------------------------------------------------------------
|
||||||
# Forced CI run
|
# Forced CI run
|
||||||
# ---------------------------------------------------------------
|
# ---------------------------------------------------------------
|
||||||
@ -21,3 +32,4 @@ if [[ $COMMIT_MESSAGE == *"[ci:force]"* ]]; then
|
|||||||
else
|
else
|
||||||
export CI_FORCE_RUN=false
|
export CI_FORCE_RUN=false
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
set -e
|
||||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||||
verifyLib=$1;
|
verifyLib=$1;
|
||||||
cd $DIR/../../
|
cd $DIR/../../
|
||||||
@ -9,10 +9,8 @@ if [ "${TRAVIS_EVENT_TYPE}" == "cron" ]; then
|
|||||||
echo true
|
echo true
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
AFFECTED_LIBS="$(nx print-affected --type=lib --select=projects --base=$BASE_HASH --head=$HEAD_HASH --plain)"
|
AFFECTED_LIBS=$(npx nx print-affected --type=lib --select=projects ${NX_CALCULATION_FLAGS} --plain)
|
||||||
#echo "Verify if affected build contains $1"
|
#echo "Verify if affected build contains $1"
|
||||||
|
|
||||||
|
|
||||||
#echo "Affected libs:$AFFECTED_LIBS"
|
#echo "Affected libs:$AFFECTED_LIBS"
|
||||||
if [[ $AFFECTED_LIBS =~ $verifyLib ]]; then
|
if [[ $AFFECTED_LIBS =~ $verifyLib ]]; then
|
||||||
#echo "Yep project:$verifyLib is affected carry on"
|
#echo "Yep project:$verifyLib is affected carry on"
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
echo "Start e2e"
|
echo "Start e2e"
|
||||||
|
set -e
|
||||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||||
|
|
||||||
cd $DIR/../../../
|
cd $DIR/../../../
|
||||||
@ -9,7 +9,7 @@ BASE_DIRECTORY=$(echo "$FOLDER" | cut -d "/" -f1)
|
|||||||
verifyLib=$1;
|
verifyLib=$1;
|
||||||
|
|
||||||
# set test-e2e params
|
# set test-e2e params
|
||||||
if [ ! -z "$2" ]; then
|
if [ -n "$2" ]; then
|
||||||
e2eParams="--$2"
|
e2eParams="--$2"
|
||||||
else
|
else
|
||||||
e2eParams=""
|
e2eParams=""
|
||||||
@ -17,7 +17,8 @@ fi
|
|||||||
|
|
||||||
echo "Step1 - Verify if affected libs contains $verifyLib"
|
echo "Step1 - Verify if affected libs contains $verifyLib"
|
||||||
|
|
||||||
AFFECTED_LIB="$(./scripts/travis/affected-contains.sh $verifyLib )";
|
AFFECTED_LIB=$(./scripts/travis/affected-contains.sh $verifyLib )
|
||||||
|
|
||||||
if [ ${AFFECTED_LIB} == true ]; then
|
if [ ${AFFECTED_LIB} == true ]; then
|
||||||
echo "Step2 - $verifyLib affected... will execute e2e"
|
echo "Step2 - $verifyLib affected... will execute e2e"
|
||||||
|
|
||||||
|
@ -8,11 +8,13 @@ if [[ $TRAVIS_BRANCH =~ ^master(-patch.*)?$ ]]; then
|
|||||||
else
|
else
|
||||||
if [[ "${TRAVIS_PULL_REQUEST_BRANCH}" != "" ]];
|
if [[ "${TRAVIS_PULL_REQUEST_BRANCH}" != "" ]];
|
||||||
then
|
then
|
||||||
export TAGS=""$TRAVIS_PULL_REQUEST_BRANCH-$TRAVIS_BUILD_NUMBER""
|
export TAGS="$TRAVIS_PULL_REQUEST_BRANCH-$TRAVIS_BUILD_NUMBER"
|
||||||
else
|
else
|
||||||
export TAGS="$TRAVIS_BRANCH-$TRAVIS_BUILD_NUMBER,$TRAVIS_BRANCH"
|
export TAGS="$TRAVIS_BRANCH-$TRAVIS_BUILD_NUMBER,$TRAVIS_BRANCH"
|
||||||
fi;
|
fi;
|
||||||
|
|
||||||
fi;
|
fi;
|
||||||
|
|
||||||
echo $TAGS
|
if [[ -n "$GITHUB_ACTIONS" ]]; then
|
||||||
|
echo "TAGS=$TAGS" >> $GITHUB_ENV;
|
||||||
|
fi
|
||||||
|
echo "$TAGS"
|
||||||
|
@ -8,6 +8,13 @@ echo "ℹ️ demo-shell: Running the docker with tag" $TAGS
|
|||||||
DOCKER_PROJECT_ARGS="PROJECT_NAME=demo-shell"
|
DOCKER_PROJECT_ARGS="PROJECT_NAME=demo-shell"
|
||||||
|
|
||||||
# Publish Image to docker
|
# Publish Image to docker
|
||||||
./node_modules/@alfresco/adf-cli/bin/adf-cli docker --loginCheck --loginUsername "$DOCKER_REPOSITORY_USER" --loginPassword "$DOCKER_REPOSITORY_PASSWORD" --loginRepo "$DOCKER_REPOSITORY_DOMAIN" --dockerRepo "$DOCKER_REPOSITORY" --buildArgs "$DOCKER_PROJECT_ARGS" --dockerTags "$TAGS" --pathProject "$(pwd)"
|
./node_modules/@alfresco/adf-cli/bin/adf-cli docker \
|
||||||
|
--loginCheck \
|
||||||
|
--loginUsername "$DOCKER_REPOSITORY_USER" \
|
||||||
|
--loginPassword "$DOCKER_REPOSITORY_PASSWORD" \
|
||||||
|
--loginRepo "$DOCKER_REPOSITORY_DOMAIN" \
|
||||||
|
--dockerRepo "$DOCKER_REPOSITORY" \
|
||||||
|
--buildArgs "$DOCKER_PROJECT_ARGS" \
|
||||||
|
--dockerTags "$TAGS" \
|
||||||
|
--pathProject "$(pwd)" \
|
||||||
|
"$@"
|
||||||
|
@ -23,5 +23,13 @@ then
|
|||||||
TAG_NPM=alpha
|
TAG_NPM=alpha
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if [[ -n "$GITHUB_ACTIONS" ]]; then
|
||||||
|
TAG_NPM=test
|
||||||
|
fi
|
||||||
echo "Publishing on npm with tag $TAG_NPM"
|
echo "Publishing on npm with tag $TAG_NPM"
|
||||||
./node_modules/@alfresco/adf-cli/bin/adf-cli npm-publish --npmRegistry $NPM_REGISTRY_ADDRESS --tokenRegistry $NPM_REGISTRY_TOKEN --tag $TAG_NPM --pathProject "$(pwd)"
|
./node_modules/@alfresco/adf-cli/bin/adf-cli npm-publish \
|
||||||
|
--npmRegistry $NPM_REGISTRY_ADDRESS \
|
||||||
|
--tokenRegistry $NPM_REGISTRY_TOKEN \
|
||||||
|
--tag $TAG_NPM \
|
||||||
|
--pathProject "$(pwd)" \
|
||||||
|
"$@"
|
||||||
|
@ -10,6 +10,13 @@ DOCKER_PROJECT_ARGS="PROJECT_NAME=storybook/stories"
|
|||||||
echo "{}" > $DIR/../../../dist/storybook/stories/app.config.json
|
echo "{}" > $DIR/../../../dist/storybook/stories/app.config.json
|
||||||
|
|
||||||
# Publish Image to docker
|
# Publish Image to docker
|
||||||
./node_modules/@alfresco/adf-cli/bin/adf-cli docker --loginCheck --loginUsername "$DOCKER_REPOSITORY_USER" --loginPassword "$DOCKER_REPOSITORY_PASSWORD" --loginRepo "$DOCKER_REPOSITORY_DOMAIN" --dockerRepo "$DOCKER_REPOSITORY_STORYBOOK" --buildArgs "$DOCKER_PROJECT_ARGS" --dockerTags "$TAGS" --pathProject "$(pwd)"
|
./node_modules/@alfresco/adf-cli/bin/adf-cli docker \
|
||||||
|
--loginCheck \
|
||||||
|
--loginUsername "$DOCKER_REPOSITORY_USER" \
|
||||||
|
--loginPassword "$DOCKER_REPOSITORY_PASSWORD" \
|
||||||
|
--loginRepo "$DOCKER_REPOSITORY_DOMAIN" \
|
||||||
|
--dockerRepo "$DOCKER_REPOSITORY_STORYBOOK" \
|
||||||
|
--buildArgs "$DOCKER_PROJECT_ARGS" \
|
||||||
|
--dockerTags "$TAGS" \
|
||||||
|
--pathProject "$(pwd)" \
|
||||||
|
"$@"
|
Loading…
x
Reference in New Issue
Block a user