Merge pull request #4383 from BOINC/vko_vcpkg_manage_archive_cache

[vcpkg] Upload vcpkg binary cache to s3
This commit is contained in:
David Anderson 2021-06-01 11:15:00 -07:00 committed by GitHub
commit 096cc15e02
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 157 additions and 0 deletions

View File

@ -34,6 +34,16 @@ jobs:
path: 3rdParty/buildCache
key: android-${{ matrix.type }}-${{ hashFiles('android/*.sh') }}
- name: Configure Python
if: ${{ success() && contains(matrix.type, 'vcpkg') }}
run: |
pip install boto3
- name: Download vcpkg binary cache
if: ${{ success() && contains(matrix.type, 'vcpkg') }}
run: |
python ./deploy/manage_vcpkg_archive_cache.py download 3rdParty/buildCache/android/vcpkgcache/vcpkg/archives/ android "edu.berkeley.boinc.github.actions.build.vcpkg.binary.cache"
- name: Build manager
if: ${{ success() && matrix.type == 'manager' }}
run: |
@ -92,3 +102,12 @@ jobs:
with:
fail_ci_if_error: true
verbose: false
- name: Upload vcpkg binary cache
if: ${{ success() && contains(matrix.type, 'vcpkg') }}
env:
BUCKET: ${{ secrets.S3_BUCKET }}
ACCESS_KEY: ${{ secrets.S3_ACCESS_KEY }}
SECRET_KEY: ${{ secrets.S3_SECRET_KEY }}
run: |
python ./deploy/manage_vcpkg_archive_cache.py upload 3rdParty/buildCache/android/vcpkgcache/vcpkg/archives/ android "edu.berkeley.boinc.github.actions.build.vcpkg.binary.cache" "$ACCESS_KEY" "$SECRET_KEY"

View File

@ -40,6 +40,16 @@ jobs:
key: linux-${{ matrix.type }}-${{ hashFiles('3rdParty/*Linux*.sh', 'linux/*.sh') }}
restore-keys: linux-${{ matrix.type }}-
- name: Configure Python
if: ${{ success() && contains(matrix.type, 'vcpkg') }}
run: |
pip install boto3
- name: Download vcpkg binary cache
if: ${{ success() && contains(matrix.type, 'vcpkg') }}
run: |
python ./deploy/manage_vcpkg_archive_cache.py download 3rdParty/buildCache/linux/vcpkgcache/vcpkg/archives/ linux "edu.berkeley.boinc.github.actions.build.vcpkg.binary.cache"
- name: Automake
if: success()
run: ./_autosetup
@ -129,3 +139,12 @@ jobs:
with:
fail_ci_if_error: true
verbose: false
- name: Upload vcpkg binary cache
if: ${{ success() && contains(matrix.type, 'vcpkg') }}
env:
BUCKET: ${{ secrets.S3_BUCKET }}
ACCESS_KEY: ${{ secrets.S3_ACCESS_KEY }}
SECRET_KEY: ${{ secrets.S3_SECRET_KEY }}
run: |
python ./deploy/manage_vcpkg_archive_cache.py upload 3rdParty/buildCache/linux/vcpkgcache/vcpkg/archives/ linux "edu.berkeley.boinc.github.actions.build.vcpkg.binary.cache" "$ACCESS_KEY" "$SECRET_KEY"

View File

@ -51,6 +51,18 @@ jobs:
key: windows-${{ matrix.platform }}-${{ matrix.configuration }}-${{ hashFiles('win_build/vcpkg_3rdparty_dependencies_vs2019.vcxproj') }}
restore-keys: windows-${{ matrix.platform }}-${{ matrix.configuration }}-
- name: Configure Python
if: ${{ success() }}
shell: cmd
run: |
pip install boto3
- name: Download vcpkg binary cache
if: ${{ success() }}
shell: cmd
run: |
python deploy\manage_vcpkg_archive_cache.py download ${{ github.workspace }}\3rdParty\buildCache\windows\vcpkgcache\ windows "edu.berkeley.boinc.github.actions.build.vcpkg.binary.cache"
- name: Build
run: msbuild win_build\boinc_vs2019.sln -p:Configuration=${{ matrix.configuration }} -p:Platform=${{ matrix.platform }} -p:VcpkgTripletConfig=ci -m
@ -100,3 +112,13 @@ jobs:
with:
fail_ci_if_error: true
verbose: false
- name: Upload vcpkg binary cache
if: ${{ success() }}
shell: cmd
env:
BUCKET: ${{ secrets.S3_BUCKET }}
ACCESS_KEY: ${{ secrets.S3_ACCESS_KEY }}
SECRET_KEY: ${{ secrets.S3_SECRET_KEY }}
run: |
python deploy\manage_vcpkg_archive_cache.py upload ${{ github.workspace }}\3rdParty\buildCache\windows\vcpkgcache\ windows "edu.berkeley.boinc.github.actions.build.vcpkg.binary.cache" "%ACCESS_KEY%" "%SECRET_KEY%"

View File

@ -0,0 +1,97 @@
import boto3
import os
import sys
from botocore import UNSIGNED
from botocore.client import Config
from botocore.exceptions import NoCredentialsError
def s3_upload(local_file, s3_file, bucket, access_key, secret_key):
print('Uploading', local_file, '->', s3_file)
s3 = boto3.client('s3', aws_access_key_id=access_key,
aws_secret_access_key=secret_key)
try:
s3.upload_file(local_file, bucket, s3_file)
print("Upload Successful")
except FileNotFoundError:
print("The file was not found")
except NoCredentialsError:
print("Credentials not available")
def s3_download(local_file, s3_file, bucket):
print('Downloading', s3_file, '->', local_file)
s3 = boto3.client('s3', config=Config(signature_version=UNSIGNED))
try:
s3.download_file(bucket, s3_file, local_file)
print("Download Successful")
return True
except NoCredentialsError:
print("Credentials not available")
return False
def s3_list(bucket):
s3 = boto3.client('s3', config=Config(signature_version=UNSIGNED))
try:
return s3.list_objects(Bucket=bucket)['Contents']
except Exception:
print("Failed to retrieve list of files in bucket")
return None
def upload(os_name, dir, bucket, access_key, secret_key):
l = s3_list(bucket)
for path, _, files in os.walk(dir):
for name in files:
found = False
file_path = os.path.join(path, name)
dir_name = os.path.basename(path)
file_name = os_name + '-' + dir_name + '-' + name
if (l is not None):
for k in l:
key = k['Key']
if (key == file_name):
found = True
break
if (not found):
s3_upload(file_path, file_name, bucket, access_key, secret_key)
def download(os_name, dir, bucket):
l = s3_list(bucket)
if (l is not None):
for k in l:
key = k['Key']
a = key.split('-')
if (len(a) == 3 and a[0] == os_name):
os.makedirs(os.path.join(dir, a[1]), exist_ok=True)
local_file = os.path.join(dir, a[1], a[2])
if (os.path.isfile(local_file)):
print('Found local file', local_file)
continue
s3_download(local_file, key, bucket)
def help():
print('Usage:')
print('python manage_vcpkg_archive_cache.py <action> <dir> <os> <bucket> <access_key> <secret_key>')
if (len(sys.argv) != 7 and len(sys.argv) != 5):
help()
sys.exit(1)
action_name = sys.argv[1]
dir_name = sys.argv[2]
os_name = sys.argv[3]
bucket_name = sys.argv[4]
if (action_name == 'upload' and len(sys.argv) == 7):
access_key = sys.argv[5]
secret_key = sys.argv[6]
upload(os_name, dir_name, bucket_name, access_key, secret_key)
elif (action_name == 'download'):
download(os_name, dir_name, bucket_name)
else:
help()
sys.exit(1)
sys.exit(0)