mirror of
https://github.com/Alfresco/alfresco-ng2-components.git
synced 2025-07-24 17:32:15 +00:00
[AAE-4608] Use smartrunner version 2 and add SuperCache (workspaces + S3 combo)! (#6704)
* [AAE-4608] Use smartrunner version 2 * Fix npm installing and do not depend on develop cache * Add Travis workspaces the simplest way * Add missing dependencies * Remove unnecessary npm install on lint * Fix travis indentation * Fix e2e missing adf packages * Trying to fix Travis * Remove debug information * Use Travis workspaces for smartrunner and built artefacts caching instead of S3 * Implement nuclear caching for node_modules * Quiet snyk running, since the log length kills Travis * Maybe with -q * Quieting snyk * Why displaying the added packages?????? * Fix S3 persister/retriever scripts * Restore back parallel build and lint, to gain 10 minutes
This commit is contained in:
@@ -1,6 +1,3 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Upload protractor-smartrunner artifact related to this particular job to S3
|
||||
./scripts/ci/utils/artifact-to-s3.sh -a "$SMART_RUNNER_DIRECTORY" -o "$S3_DBP_FOLDER/protractor-smartrunner-$TRAVIS_JOB_ID.tar.bz2"
|
||||
|
||||
./node_modules/@alfresco/adf-cli/bin/adf-cli scan-env --host "$E2E_HOST" -u "$E2E_ADMIN_EMAIL_IDENTITY" -p "$E2E_ADMIN_PASSWORD_IDENTITY"
|
||||
|
@@ -1,11 +1,14 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# TODO: This one needs to be cleaned up... Only fixing it like this, because for the current PR it is out of scope
|
||||
# =======================================
|
||||
rm -rf ./node_modules/@alfresco/adf-cli/ && \
|
||||
mkdir -p ./node_modules/@alfresco/adf-cli/ && \
|
||||
cp -R ./lib/dist/cli/* ./node_modules/@alfresco/adf-cli/
|
||||
|
||||
rm -rf ./node_modules/@alfresco/adf-testing/ && \
|
||||
mkdir -p ./node_modules/@alfresco/adf-testing/ && \
|
||||
cp -R ./lib/dist/testing/* ./node_modules/@alfresco/adf-testing/
|
||||
# =======================================
|
||||
|
||||
./node_modules/@alfresco/adf-cli/bin/adf-cli scan-env --host "$E2E_HOST" -u "$E2E_ADMIN_EMAIL_IDENTITY" -p "$E2E_ADMIN_PASSWORD_IDENTITY"
|
||||
|
||||
# Download built application artifact from S3
|
||||
./scripts/ci/utils/artifact-from-s3.sh -a "$S3_DBP_FOLDER/alfresco-demoshell.tar.bz2" -o "./dist/demo-shell"
|
||||
./scripts/ci/utils/artifact-from-s3.sh -a "$S3_DBP_FOLDER/alfresco-libs.tar.bz2" -o "./lib/dist"
|
||||
|
||||
# Download protractor-smartrunner artifact related to this particular job from S3, if exists
|
||||
./scripts/ci/utils/artifact-from-s3.sh -a "$S3_DBP_FOLDER/protractor-smartrunner-$TRAVIS_JOB_ID.tar.bz2" -o "$SMART_RUNNER_DIRECTORY"
|
||||
|
||||
|
@@ -1,28 +1,52 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
pip install --user awscli
|
||||
# ===================================================================
|
||||
# In this hook-file define only dynamic-ish environmental variables.
|
||||
# Put the static environment variables into the env.yml file
|
||||
# Command executions or any other installation logic
|
||||
# is supposed to be in the "install.sh" hook script.
|
||||
# ===================================================================
|
||||
PARENT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )/.." && pwd )"
|
||||
|
||||
# Settings for protractor-smartrunner -------------------------------------------------
|
||||
export SMART_RUNNER_DIRECTORY=".protractor-smartrunner"
|
||||
|
||||
|
||||
export NODE_OPTIONS="--max_old_space_size=30000"
|
||||
export GIT_HASH=`git rev-parse HEAD`
|
||||
echo "GIT_HASH: $GIT_HASH"
|
||||
S3_DBP_PATH="s3://alfresco-travis-builds/adf"
|
||||
export BASE_HASH="$(git merge-base origin/$BRANCH_NAME HEAD)"
|
||||
export HEAD_HASH="HEAD"
|
||||
|
||||
# Node settings
|
||||
export NODE_OPTIONS="--max_old_space_size=30000"
|
||||
|
||||
# Settings for Nx ---------------------------------------------------------------------
|
||||
export BASE_HASH="$(git wmerge-base origin/$BRANCH_NAME HEAD)"
|
||||
export HEAD_HASH=${TRAVIS_PULL_REQUEST_SHA:-${TRAVIS_COMMIT}}
|
||||
|
||||
if [ "${TRAVIS_EVENT_TYPE}" == "push" ]; then
|
||||
export S3_DBP_FOLDER="$S3_DBP_PATH/$TRAVIS_BRANCH/$TRAVIS_BUILD_ID"
|
||||
export S3_DBP_ROOT_FOLDER="$S3_DBP_PATH/$TRAVIS_BRANCH"
|
||||
elif [ "${TRAVIS_EVENT_TYPE}" == "pull_request" ]; then
|
||||
export S3_DBP_FOLDER="$S3_DBP_PATH/$TRAVIS_PULL_REQUEST/$TRAVIS_BUILD_ID"
|
||||
export S3_DBP_ROOT_FOLDER="$S3_DBP_PATH/$TRAVIS_PULL_REQUEST"
|
||||
export BASE_HASH="origin/$TRAVIS_BRANCH"
|
||||
elif [ "${TRAVIS_EVENT_TYPE}" == "cron" ]; then
|
||||
export S3_DBP_FOLDER="$S3_DBP_PATH/cron/$TRAVIS_BUILD_ID"
|
||||
export S3_DBP_ROOT_FOLDER="$S3_DBP_PATH/cron"
|
||||
else
|
||||
export S3_DBP_FOLDER="$S3_DBP_PATH/api/$TRAVIS_BUILD_ID"
|
||||
export S3_DBP_ROOT_FOLDER="$S3_DBP_PATH/api"
|
||||
fi
|
||||
|
||||
echo "BASE_HASH: $BASE_HASH"
|
||||
export S3_DBP_FOLDER="$S3_DBP_ROOT_FOLDER/$TRAVIS_BUILD_ID"
|
||||
|
||||
# Cache for node_modules
|
||||
export NODE_VERSION=`node -v`
|
||||
export PACKAGE_LOCK_SHASUM=`shasum ./package-lock.json | cut -f 1 -d " "`
|
||||
# This can change regardless of package-lock.json, so we need to calculate with this one as well
|
||||
export S3_NODE_MODULES_CACHE_ID=`echo $NODE_VERSION-$PACKAGE_LOCK_SHASUM | shasum | cut -f 1 -d " "`
|
||||
export S3_NODE_MODULES_CACHE_PATH="$S3_DBP_PATH/cache/node_modules/$S3_NODE_MODULES_CACHE_ID.tar.bz2"
|
||||
|
||||
echo "S3 DBP root folder: $S3_DBP_ROOT_FOLDER"
|
||||
echo "S3 DBP destination: $S3_DBP_FOLDER"
|
||||
echo "========== Caching settings =========="
|
||||
echo "PACKAGE_LOCK_SHASUM: $PACKAGE_LOCK_SHASUM"
|
||||
echo "NODE_VERSION: $NODE_VERSION"
|
||||
echo "S3_NODE_MODULES_CACHE_ID: $S3_NODE_MODULES_CACHE_ID"
|
||||
echo "S3_NODE_MODULES_CACHE_PATH: $S3_NODE_MODULES_CACHE_PATH"
|
||||
echo "========== Nx settings =========="
|
||||
echo "GIT_HASH: $GIT_HASH"
|
||||
echo "BASE_HASH: $BASE_HASH"
|
||||
echo "HEAD_HASH: $HEAD_HASH"
|
||||
|
||||
|
32
scripts/ci/job_hooks/install.sh
Executable file
32
scripts/ci/job_hooks/install.sh
Executable file
@@ -0,0 +1,32 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# ===================================================================
|
||||
# In this hook-file invoke commands and install dependencies.
|
||||
# Envinomnent variable declarations are supposed to be
|
||||
# in the "before_install.sh" hook script or in the env.yml file.
|
||||
# ===================================================================
|
||||
|
||||
# ========== AWS CLI ==========
|
||||
echo "Installing awscli (silent install)"
|
||||
pip install --user awscli -q
|
||||
|
||||
# ========== Install node_modules or restore it from cloud cache ==========
|
||||
# If the node_modules folder hasn't been restored from Travis workspace
|
||||
if [[ ! -d $NODE_MODULES_DIR ]]; then
|
||||
echo -e "\e[31mTravis Workspace doesn't contain $NODE_MODULES_DIR, checking S3...\e[0m"
|
||||
|
||||
aws s3 ls $S3_NODE_MODULES_CACHE_PATH > /dev/null
|
||||
|
||||
# If there is no cache uploaded yet to S3
|
||||
if [ "$?" -ne 0 ]
|
||||
then
|
||||
echo -e "\e[31mCache entry for current package-lock.json ($S3_NODE_MODULES_CACHE_ID) doesn't exist, doing installation now.\e[0m"
|
||||
npm ci && scripts/ci/utils/artifact-to-s3.sh -a "$NODE_MODULES_DIR" -o "$S3_NODE_MODULES_CACHE_PATH"
|
||||
# Otherwise the cache is already on S3
|
||||
else
|
||||
echo -e "\e[32mCache entry for current package-lock.json ($S3_NODE_MODULES_CACHE_ID) exist, downloading...\e[0m"
|
||||
scripts/ci/utils/artifact-from-s3.sh -a "$S3_NODE_MODULES_CACHE_PATH" -o "$NODE_MODULES_DIR"
|
||||
fi
|
||||
else
|
||||
echo -e "\e[32mThe $NODE_MODULES_DIR folder exists, possibly it was restored from the Travis Workspace...\e[0m"
|
||||
fi
|
@@ -23,9 +23,15 @@ then
|
||||
fi
|
||||
|
||||
test ! -d $OUTPUT && mkdir -p $OUTPUT
|
||||
aws s3 cp $ARTIFACT ./s3-artifact.tmp
|
||||
echo 'artifact download done'
|
||||
tar -xvf ./s3-artifact.tmp -C $OUTPUT >&/dev/null
|
||||
echo 'tar the artifact done'
|
||||
rm ./s3-artifact.tmp
|
||||
echo 'remove tmp file'
|
||||
|
||||
IS_PRESENT="$(aws s3 ls $ARTIFACT | wc -l | tr -d ' ')"
|
||||
if [ "${IS_PRESENT}" == "1" ]
|
||||
then
|
||||
echo "File ${ARTIFACT} is present. Copying"
|
||||
aws s3 cp $ARTIFACT ./s3-artifact.tmp
|
||||
tar -xf ./s3-artifact.tmp -C $OUTPUT
|
||||
rm ./s3-artifact.tmp
|
||||
else
|
||||
echo "File ${ARTIFACT} not present"
|
||||
exit 1;
|
||||
fi
|
||||
|
@@ -22,6 +22,6 @@ then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
tar cvfj ./s3-artifact.tmp -C $ARTIFACT `ls $ARTIFACT`
|
||||
tar cfj ./s3-artifact.tmp -C $ARTIFACT `ls -A $ARTIFACT`
|
||||
aws s3 cp ./s3-artifact.tmp $OUTPUT
|
||||
rm ./s3-artifact.tmp
|
||||
|
Reference in New Issue
Block a user