use one bucket variable to avoid: (#8263)

Can be miss configured with different buckets
We need to maintain two variables:
They seems to be used in different ways because one contain s3:// the other not
This commit is contained in:
Eugenio Romano 2023-02-14 16:26:26 +01:00 committed by GitHub
parent 3b7ac02098
commit a1ec61f857
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 8 additions and 11 deletions

View File

@ -60,9 +60,9 @@ runs:
run: |
set -u;
mkdir -p "${SMART_RUNNER_PATH}"
if [[ $(aws s3 ls "${S3_BUILD_BUCKET}/adf/${REMOTE_PATH}" > /dev/null; echo $?) -eq 0 ]]; then
if [[ $(aws s3 ls "s3://${S3_BUILD_BUCKET_SHORT_NAME}/adf/${REMOTE_PATH}" > /dev/null; echo $?) -eq 0 ]]; then
echo "downloading test files"
aws s3 cp "${S3_BUILD_BUCKET}/adf/${REMOTE_PATH}" .;
aws s3 cp "s3://${S3_BUILD_BUCKET_SHORT_NAME}/adf/${REMOTE_PATH}" .;
tar xzf ${{ inputs.e2e-tar-name }};
else
echo "nothing to download";
@ -161,7 +161,7 @@ runs:
name: e2e-artifact-output
path: /home/runner/work/alfresco-ng2-components/alfresco-ng2-components/e2e-output-*
- name: upload smartrunner tests results on s3 to cache tests
- name: upload smart-runner tests results on s3 to cache tests
shell: bash
if: always()
env:
@ -169,4 +169,4 @@ runs:
# description: always upload newer results
run: |
tar czf "${{ inputs.e2e-tar-name }}" "${SMART_RUNNER_PATH}"
aws s3 cp "${{ inputs.e2e-tar-name }}" "${S3_BUILD_BUCKET}/adf/$REMOTE_PATH"
aws s3 cp "${{ inputs.e2e-tar-name }}" "s3://${S3_BUILD_BUCKET_SHORT_NAME}/adf/${REMOTE_PATH}"

View File

@ -7,13 +7,13 @@ runs:
- name: tar and upload artifacts
shell: bash
env:
REMOTE_PATH: "alfresco-ng2-components/build-cache/${{ github.run_id }}"
REMOTE_PATH: "alfresco-ng2-components/build-cache/${{ github.run_id }}"
run: |
packages=( dist nxcache node_modules )
for i in "${packages[@]}"; do
time tar czf $i.tar.gz $i
du -h $i.tar.gz
time aws s3 cp --no-progress $i.tar.gz s3://${S3_BUILD_BUCKET_SHORT_NAME}/${REMOTE_PATH}/$i.tar.gz
time aws s3 cp --no-progress $i.tar.gz "s3://${S3_BUILD_BUCKET_SHORT_NAME}/${REMOTE_PATH}/$i.tar.gz"
done

View File

@ -52,7 +52,6 @@ env:
HR_USER_PASSWORD: ${{ secrets.HR_USER_PASSWORD }}
SMART_RUNNER_PATH: ".protractor-smartrunner"
S3_DBP_PATH: ${{ secrets.S3_DBP_PATH }}
S3_BUILD_BUCKET: ${{ secrets.S3_BUILD_BUCKET }}
S3_BUILD_BUCKET_SHORT_NAME: ${{ secrets.S3_BUILD_BUCKET_SHORT_NAME }}
NODE_OPTIONS: "--max-old-space-size=5120"
DOCKER_REPOSITORY_DOMAIN: ${{ secrets.DOCKER_REPOSITORY_DOMAIN }}
@ -84,8 +83,8 @@ jobs:
uses: actions/checkout@v3
- id: set-dryrun
uses: ./.github/actions/enable-dryrun
with:
dry-run-flag: ${{ inputs.dry-run-release }}
with:
dry-run-flag: ${{ inputs.dry-run-release }}
- name: install NPM
uses: actions/setup-node@v3
with:

View File

@ -60,7 +60,6 @@ env:
HR_USER_PASSWORD: ${{ secrets.HR_USER_PASSWORD }}
SMART_RUNNER_PATH: ".protractor-smartrunner"
S3_DBP_PATH: ${{ secrets.S3_DBP_PATH }}
S3_BUILD_BUCKET: ${{ secrets.S3_BUILD_BUCKET }}
S3_BUILD_BUCKET_SHORT_NAME: ${{ secrets.S3_BUILD_BUCKET_SHORT_NAME }}
NODE_OPTIONS: "--max-old-space-size=5120"
DOCKER_REPOSITORY_DOMAIN: ${{ secrets.DOCKER_REPOSITORY_DOMAIN }}

View File

@ -55,7 +55,6 @@ env:
HR_USER_PASSWORD: ${{ secrets.HR_USER_PASSWORD }}
SMART_RUNNER_PATH: ".protractor-smartrunner"
S3_DBP_PATH: ${{ secrets.S3_DBP_PATH }}
S3_BUILD_BUCKET: ${{ secrets.S3_BUILD_BUCKET }}
S3_BUILD_BUCKET_SHORT_NAME: ${{ secrets.S3_BUILD_BUCKET_SHORT_NAME }}
NODE_OPTIONS: "--max-old-space-size=5120"
DOCKER_REPOSITORY_DOMAIN: ${{ secrets.DOCKER_REPOSITORY_DOMAIN }}