Merge pull request #4400 from AenBleidd/vko_vcpkg_archive_cleanup

[CI] Add maintenance script to clean outdated vcpkg archives.
This commit is contained in:
David Anderson 2021-06-09 15:14:32 -07:00 committed by GitHub
commit a6c23fd38d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 221 additions and 84 deletions

View File

@ -31,7 +31,9 @@ jobs:
- name: Cache dependencies
uses: actions/cache@v2.1.3
with:
path: 3rdParty/buildCache
path: |
3rdParty/buildCache
!3rdParty/buildCache/android/vcpkgcache/
key: android-${{ matrix.type }}-${{ hashFiles('android/*.sh') }}
- name: Configure Python
@ -106,7 +108,6 @@ jobs:
- name: Upload vcpkg binary cache
if: ${{ success() && contains(matrix.type, 'vcpkg') }}
env:
BUCKET: ${{ secrets.S3_BUCKET }}
ACCESS_KEY: ${{ secrets.S3_ACCESS_KEY }}
SECRET_KEY: ${{ secrets.S3_SECRET_KEY }}
run: |

View File

@ -36,7 +36,9 @@ jobs:
- name: Cache dependencies
uses: actions/cache@v2.1.3
with:
path: 3rdParty/buildCache
path: |
3rdParty/buildCache
!3rdParty/buildCache/linux/vcpkgcache/
key: linux-${{ matrix.type }}-${{ hashFiles('3rdParty/*Linux*.sh', 'linux/*.sh') }}
restore-keys: linux-${{ matrix.type }}-
@ -143,7 +145,6 @@ jobs:
- name: Upload vcpkg binary cache
if: ${{ success() && contains(matrix.type, 'vcpkg') }}
env:
BUCKET: ${{ secrets.S3_BUCKET }}
ACCESS_KEY: ${{ secrets.S3_ACCESS_KEY }}
SECRET_KEY: ${{ secrets.S3_SECRET_KEY }}
run: |

25
.github/workflows/maintenance.yml vendored Normal file
View File

@ -0,0 +1,25 @@
name: Maintenance
on:
schedule:
- cron: '0 15 * * 0'
jobs:
build:
name: ${{ matrix.type }}
runs-on: ubuntu-latest
strategy:
matrix:
type: [maintenance]
fail-fast: false
steps:
- name: Configure Python
run: |
pip install boto3
- name: Cleanup outdated binary cache
if: ${{ success() }}
env:
ACCESS_KEY: ${{ secrets.S3_ACCESS_KEY }}
SECRET_KEY: ${{ secrets.S3_SECRET_KEY }}
run: |
python ./deploy/cleanup_vcpkg_archive_cache.py ./ "edu.berkeley.boinc.github.actions.build.vcpkg.binary.cache" "$ACCESS_KEY" "$SECRET_KEY"

View File

@ -46,7 +46,6 @@ jobs:
uses: actions/cache@v2.1.3
with:
path: |
${{ github.workspace }}\3rdParty\buildCache\windows\vcpkgcache\
${{ github.workspace }}\3rdParty\Windows\cuda\
key: windows-${{ matrix.platform }}-${{ matrix.configuration }}-${{ hashFiles('win_build/vcpkg_3rdparty_dependencies_vs2019.vcxproj') }}
restore-keys: windows-${{ matrix.platform }}-${{ matrix.configuration }}-
@ -72,9 +71,8 @@ jobs:
- name: Prepare logs on failure
if: ${{ failure() }}
uses: edgarrc/action-7z@v1.0.4
with:
args: 7z a -t7z -mx=9 deploy/logs.7z -r0 3rdParty/Windows/vcpkg/buildtrees/*.log
run: |
7z.exe a -t7z -mx=9 deploy/logs.7z -r0 3rdParty/Windows/vcpkg/buildtrees/*.log
- name: Upload logs on failure
if: ${{ failure() }}
@ -117,7 +115,6 @@ jobs:
if: ${{ success() }}
shell: cmd
env:
BUCKET: ${{ secrets.S3_BUCKET }}
ACCESS_KEY: ${{ secrets.S3_ACCESS_KEY }}
SECRET_KEY: ${{ secrets.S3_SECRET_KEY }}
run: |

View File

@ -0,0 +1,90 @@
import os
import sys
import zipfile
import s3
def get_files(dir):
file_names = []
for path, _, files in os.walk(dir):
for name in files:
file_name = os.path.join(path, name)
file_names.append(file_name)
return file_names
def read_control(control):
package = ''
version = '0'
port_version = '0'
architecture = ''
lines = control.split('\n')
for line in lines:
if (line != ''):
pair = line.split(': ')
if (pair[0] == 'Package'):
package = pair[1]
elif (pair[0] == 'Version'):
version = pair[1]
elif (pair[0] == 'Port-Version'):
port_version = pair[1]
elif (pair[0] == 'Architecture'):
architecture = pair[1]
return package, version + '-' + port_version, architecture
def get_packages(archives):
packages = {}
for archive in archives:
zip_file = zipfile.ZipFile(archive, 'r')
control = zip_file.read('CONTROL')
package, version, architecture = read_control(control.decode('utf-8'))
if (architecture not in packages.keys()):
packages[architecture] = {}
if (package not in packages[architecture].keys()):
packages[architecture][package] = {}
if (version not in packages[architecture][package].keys()):
packages[architecture][package][version] = []
if (archive not in packages[architecture][package][version]):
packages[architecture][package][version].append(archive)
return packages
def print_packages(packages):
for architecture in packages:
print(architecture)
for package in packages[architecture]:
print('\t', package)
for version in packages[architecture][package]:
print('\t\t', version)
for archive in packages[architecture][package][version]:
print('\t\t\t', archive)
def mark_outdated_packages(packages):
outdated = []
for architecture in packages:
for package in packages[architecture]:
if (len(packages[architecture][package]) == 1):
continue
max_version = sorted(packages[architecture][package].keys(), reverse=True)[0]
for version in packages[architecture][package]:
if (version != max_version):
for archive in packages[architecture][package][version]:
outdated.append(archive)
return outdated
def help():
print('Usage:')
print('python cleanup_vcpkg_archive_cache.py <dir> <bucket> <access_key> <secret_key>')
if (len(sys.argv) != 5):
help()
sys.exit(1)
dir_name = sys.argv[1]
bucket_name = sys.argv[2]
access_key = sys.argv[3]
secret_key = sys.argv[4]
os.makedirs(dir_name, exist_ok=True)
s3.download_all(dir_name, bucket_name)
packages = get_packages(get_files(dir_name))
print_packages(packages)
outdated = mark_outdated_packages(packages)
s3.remove_files(outdated, bucket_name, access_key, secret_key)

View File

@ -1,77 +1,5 @@
import boto3
import os
import sys
from botocore import UNSIGNED
from botocore.client import Config
from botocore.exceptions import NoCredentialsError
def s3_upload(local_file, s3_file, bucket, access_key, secret_key):
print('Uploading', local_file, '->', s3_file)
s3 = boto3.client('s3', aws_access_key_id=access_key,
aws_secret_access_key=secret_key)
try:
s3.upload_file(local_file, bucket, s3_file)
print("Upload Successful")
except FileNotFoundError:
print("The file was not found")
except NoCredentialsError:
print("Credentials not available")
except Exception:
print("Upload failed")
def s3_download(local_file, s3_file, bucket):
print('Downloading', s3_file, '->', local_file)
s3 = boto3.client('s3', config=Config(signature_version=UNSIGNED))
try:
s3.download_file(bucket, s3_file, local_file)
print("Download Successful")
except NoCredentialsError:
print("Credentials not available")
except Exception:
print("Download failed")
def s3_list(bucket):
s3 = boto3.client('s3', config=Config(signature_version=UNSIGNED))
try:
return s3.list_objects(Bucket=bucket)['Contents']
except Exception:
print("Failed to retrieve list of files in bucket")
return None
def upload(os_name, dir, bucket, access_key, secret_key):
l = s3_list(bucket)
for path, _, files in os.walk(dir):
for name in files:
found = False
file_path = os.path.join(path, name)
dir_name = os.path.basename(path)
file_name = os_name + '-' + dir_name + '-' + name
if (l is not None):
for k in l:
key = k['Key']
if (key == file_name):
found = True
break
if (not found):
s3_upload(file_path, file_name, bucket, access_key, secret_key)
def download(os_name, dir, bucket):
l = s3_list(bucket)
if (l is not None):
for k in l:
key = k['Key']
a = key.split('-')
if (len(a) == 3 and a[0] == os_name):
os.makedirs(os.path.join(dir, a[1]), exist_ok=True)
local_file = os.path.join(dir, a[1], a[2])
if (os.path.isfile(local_file)):
print('Found local file', local_file)
continue
s3_download(local_file, key, bucket)
import s3
def help():
print('Usage:')
@ -89,9 +17,9 @@ bucket_name = sys.argv[4]
if (action_name == 'upload' and len(sys.argv) == 7):
access_key = sys.argv[5]
secret_key = sys.argv[6]
upload(os_name, dir_name, bucket_name, access_key, secret_key)
s3.upload(os_name, dir_name, bucket_name, access_key, secret_key)
elif (action_name == 'download'):
download(os_name, dir_name, bucket_name)
s3.download(os_name, dir_name, bucket_name)
else:
help()
sys.exit(1)

95
deploy/s3.py Normal file
View File

@ -0,0 +1,95 @@
import boto3
import os
from botocore import UNSIGNED
from botocore.client import Config
from botocore.exceptions import NoCredentialsError
def s3_upload(local_file, s3_file, bucket, access_key, secret_key):
print('Uploading', local_file, '->', s3_file)
s3 = boto3.client('s3', aws_access_key_id=access_key,
aws_secret_access_key=secret_key)
try:
s3.upload_file(local_file, bucket, s3_file)
print("Upload Successful")
except FileNotFoundError:
print("The file was not found")
except NoCredentialsError:
print("Credentials not available")
except Exception:
print("Upload failed")
def s3_download(local_file, s3_file, bucket):
print('Downloading', s3_file, '->', local_file)
s3 = boto3.client('s3', config=Config(signature_version=UNSIGNED))
try:
s3.download_file(bucket, s3_file, local_file)
print("Download Successful")
except NoCredentialsError:
print("Credentials not available")
except Exception:
print("Download failed")
def s3_list(bucket):
s3 = boto3.client('s3', config=Config(signature_version=UNSIGNED))
try:
return s3.list_objects(Bucket=bucket)['Contents']
except Exception:
print("Failed to retrieve list of files in bucket")
return None
def upload(os_name, dir, bucket, access_key, secret_key):
objects = s3_list(bucket)
for path, _, files in os.walk(dir):
for name in files:
found = False
file_path = os.path.join(path, name)
dir_name = os.path.basename(path)
file_name = os_name + '-' + dir_name + '-' + name
if objects:
for object in objects:
key = object['Key']
if (key == file_name):
found = True
break
if (not found):
s3_upload(file_path, file_name, bucket, access_key, secret_key)
def download(os_name, dir, bucket):
objects = s3_list(bucket)
if objects:
for object in objects:
key = object['Key']
args = key.split('-')
if (len(args) == 3 and args[0] == os_name):
os.makedirs(os.path.join(dir, args[1]), exist_ok=True)
local_file = os.path.join(dir, args[1], args[2])
if (os.path.isfile(local_file)):
print('Found local file', local_file)
continue
s3_download(local_file, key, bucket)
def download_all(dir, bucket):
objects = s3_list(bucket)
if objects:
for object in objects:
key = object['Key']
local_file = os.path.join(dir, key)
s3_download(local_file, key, bucket)
def s3_remove(s3_file, bucket, access_key, secret_key):
print('Removing', s3_file)
s3 = boto3.client('s3', aws_access_key_id=access_key,
aws_secret_access_key=secret_key)
try:
s3.delete_object(Bucket=bucket, Key=s3_file)
print("Remove successful")
except Exception as ex:
print("Remove failed: ", ex)
def remove_files(files, bucket, access_key, secret_key):
for file in files:
s3_remove(os.path.basename(file), bucket, access_key, secret_key)