From d00b0bc02793526964aa17744a7443cab13d71b4 Mon Sep 17 00:00:00 2001 From: Vitalii Koshura Date: Mon, 31 May 2021 04:57:40 +0200 Subject: [PATCH] [vcpkg] Upload vcpkg binary cache to s3 Signed-off-by: Vitalii Koshura --- .github/workflows/android.yml | 19 ++++++ .github/workflows/linux.yml | 19 ++++++ .github/workflows/windows.yml | 22 +++++++ deploy/manage_vcpkg_archive_cache.py | 97 ++++++++++++++++++++++++++++ 4 files changed, 157 insertions(+) create mode 100644 deploy/manage_vcpkg_archive_cache.py diff --git a/.github/workflows/android.yml b/.github/workflows/android.yml index f5faf20985..b31fedfe6c 100644 --- a/.github/workflows/android.yml +++ b/.github/workflows/android.yml @@ -34,6 +34,16 @@ jobs: path: 3rdParty/buildCache key: android-${{ matrix.type }}-${{ hashFiles('android/*.sh') }} + - name: Configure Python + if: ${{ success() && contains(matrix.type, 'vcpkg') }} + run: | + pip install boto3 + + - name: Download vcpkg binary cache + if: ${{ success() && contains(matrix.type, 'vcpkg') }} + run: | + python ./deploy/manage_vcpkg_archive_cache.py download 3rdParty/buildCache/android/vcpkgcache/vcpkg/archives/ android "edu.berkeley.boinc.github.actions.build.vcpkg.binary.cache" + - name: Build manager if: ${{ success() && matrix.type == 'manager' }} run: | @@ -92,3 +102,12 @@ jobs: with: fail_ci_if_error: true verbose: false + + - name: Upload vcpkg binary cache + if: ${{ success() && contains(matrix.type, 'vcpkg') }} + env: + BUCKET: ${{ secrets.S3_BUCKET }} + ACCESS_KEY: ${{ secrets.S3_ACCESS_KEY }} + SECRET_KEY: ${{ secrets.S3_SECRET_KEY }} + run: | + python ./deploy/manage_vcpkg_archive_cache.py upload 3rdParty/buildCache/android/vcpkgcache/vcpkg/archives/ android "edu.berkeley.boinc.github.actions.build.vcpkg.binary.cache" "$ACCESS_KEY" "$SECRET_KEY" diff --git a/.github/workflows/linux.yml b/.github/workflows/linux.yml index d2d599d3d6..28c1e1b6f4 100644 --- a/.github/workflows/linux.yml +++ b/.github/workflows/linux.yml @@ -40,6 +40,16 @@ jobs: key: linux-${{ matrix.type }}-${{ hashFiles('3rdParty/*Linux*.sh', 'linux/*.sh') }} restore-keys: linux-${{ matrix.type }}- + - name: Configure Python + if: ${{ success() && contains(matrix.type, 'vcpkg') }} + run: | + pip install boto3 + + - name: Download vcpkg binary cache + if: ${{ success() && contains(matrix.type, 'vcpkg') }} + run: | + python ./deploy/manage_vcpkg_archive_cache.py download 3rdParty/buildCache/linux/vcpkgcache/vcpkg/archives/ linux "edu.berkeley.boinc.github.actions.build.vcpkg.binary.cache" + - name: Automake if: success() run: ./_autosetup @@ -129,3 +139,12 @@ jobs: with: fail_ci_if_error: true verbose: false + + - name: Upload vcpkg binary cache + if: ${{ success() && contains(matrix.type, 'vcpkg') }} + env: + BUCKET: ${{ secrets.S3_BUCKET }} + ACCESS_KEY: ${{ secrets.S3_ACCESS_KEY }} + SECRET_KEY: ${{ secrets.S3_SECRET_KEY }} + run: | + python ./deploy/manage_vcpkg_archive_cache.py upload 3rdParty/buildCache/linux/vcpkgcache/vcpkg/archives/ linux "edu.berkeley.boinc.github.actions.build.vcpkg.binary.cache" "$ACCESS_KEY" "$SECRET_KEY" diff --git a/.github/workflows/windows.yml b/.github/workflows/windows.yml index d227348c07..9be4d1e43c 100644 --- a/.github/workflows/windows.yml +++ b/.github/workflows/windows.yml @@ -51,6 +51,18 @@ jobs: key: windows-${{ matrix.platform }}-${{ matrix.configuration }}-${{ hashFiles('win_build/vcpkg_3rdparty_dependencies_vs2019.vcxproj') }} restore-keys: windows-${{ matrix.platform }}-${{ matrix.configuration }}- + - name: Configure Python + if: ${{ success() }} + shell: cmd + run: | + pip install boto3 + + - name: Download vcpkg binary cache + if: ${{ success() }} + shell: cmd + run: | + python deploy\manage_vcpkg_archive_cache.py download ${{ github.workspace }}\3rdParty\buildCache\windows\vcpkgcache\ windows "edu.berkeley.boinc.github.actions.build.vcpkg.binary.cache" + - name: Build run: msbuild win_build\boinc_vs2019.sln -p:Configuration=${{ matrix.configuration }} -p:Platform=${{ matrix.platform }} -p:VcpkgTripletConfig=ci -m @@ -100,3 +112,13 @@ jobs: with: fail_ci_if_error: true verbose: false + + - name: Upload vcpkg binary cache + if: ${{ success() }} + shell: cmd + env: + BUCKET: ${{ secrets.S3_BUCKET }} + ACCESS_KEY: ${{ secrets.S3_ACCESS_KEY }} + SECRET_KEY: ${{ secrets.S3_SECRET_KEY }} + run: | + python deploy\manage_vcpkg_archive_cache.py upload ${{ github.workspace }}\3rdParty\buildCache\windows\vcpkgcache\ windows "edu.berkeley.boinc.github.actions.build.vcpkg.binary.cache" "%ACCESS_KEY%" "%SECRET_KEY%" diff --git a/deploy/manage_vcpkg_archive_cache.py b/deploy/manage_vcpkg_archive_cache.py new file mode 100644 index 0000000000..c583566223 --- /dev/null +++ b/deploy/manage_vcpkg_archive_cache.py @@ -0,0 +1,97 @@ +import boto3 +import os +import sys +from botocore import UNSIGNED +from botocore.client import Config +from botocore.exceptions import NoCredentialsError + +def s3_upload(local_file, s3_file, bucket, access_key, secret_key): + print('Uploading', local_file, '->', s3_file) + + s3 = boto3.client('s3', aws_access_key_id=access_key, + aws_secret_access_key=secret_key) + + try: + s3.upload_file(local_file, bucket, s3_file) + print("Upload Successful") + except FileNotFoundError: + print("The file was not found") + except NoCredentialsError: + print("Credentials not available") + +def s3_download(local_file, s3_file, bucket): + print('Downloading', s3_file, '->', local_file) + s3 = boto3.client('s3', config=Config(signature_version=UNSIGNED)) + + try: + s3.download_file(bucket, s3_file, local_file) + print("Download Successful") + return True + except NoCredentialsError: + print("Credentials not available") + return False + +def s3_list(bucket): + s3 = boto3.client('s3', config=Config(signature_version=UNSIGNED)) + + try: + return s3.list_objects(Bucket=bucket)['Contents'] + except Exception: + print("Failed to retrieve list of files in bucket") + return None + +def upload(os_name, dir, bucket, access_key, secret_key): + l = s3_list(bucket) + for path, _, files in os.walk(dir): + for name in files: + found = False + file_path = os.path.join(path, name) + dir_name = os.path.basename(path) + file_name = os_name + '-' + dir_name + '-' + name + if (l is not None): + for k in l: + key = k['Key'] + if (key == file_name): + found = True + break + if (not found): + s3_upload(file_path, file_name, bucket, access_key, secret_key) + +def download(os_name, dir, bucket): + l = s3_list(bucket) + if (l is not None): + for k in l: + key = k['Key'] + a = key.split('-') + if (len(a) == 3 and a[0] == os_name): + os.makedirs(os.path.join(dir, a[1]), exist_ok=True) + local_file = os.path.join(dir, a[1], a[2]) + if (os.path.isfile(local_file)): + print('Found local file', local_file) + continue + s3_download(local_file, key, bucket) + +def help(): + print('Usage:') + print('python manage_vcpkg_archive_cache.py ') + +if (len(sys.argv) != 7 and len(sys.argv) != 5): + help() + sys.exit(1) + +action_name = sys.argv[1] +dir_name = sys.argv[2] +os_name = sys.argv[3] +bucket_name = sys.argv[4] + +if (action_name == 'upload' and len(sys.argv) == 7): + access_key = sys.argv[5] + secret_key = sys.argv[6] + upload(os_name, dir_name, bucket_name, access_key, secret_key) +elif (action_name == 'download'): + download(os_name, dir_name, bucket_name) +else: + help() + sys.exit(1) + +sys.exit(0)