mirror of https://github.com/BOINC/boinc.git
[CI] Remove unused aws s3 custom deploy scripts
Signed-off-by: Vitalii Koshura <lestat.de.lionkur@gmail.com>
This commit is contained in:
parent
a657d5def1
commit
d86472a47e
|
@ -1,194 +0,0 @@
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import zipfile
|
|
||||||
import s3
|
|
||||||
|
|
||||||
def get_files(dir):
|
|
||||||
file_names = []
|
|
||||||
for path, _, files in os.walk(dir):
|
|
||||||
for name in files:
|
|
||||||
file_name = os.path.join(path, name)
|
|
||||||
file_names.append(file_name)
|
|
||||||
return file_names
|
|
||||||
|
|
||||||
def read_control(control):
|
|
||||||
package = ''
|
|
||||||
version = '0'
|
|
||||||
port_version = '0'
|
|
||||||
architecture = ''
|
|
||||||
lines = control.split('\n')
|
|
||||||
for line in lines:
|
|
||||||
if (line != ''):
|
|
||||||
pair = line.split(': ')
|
|
||||||
if (pair[0] == 'Package'):
|
|
||||||
package = pair[1]
|
|
||||||
elif (pair[0] == 'Version'):
|
|
||||||
version = pair[1]
|
|
||||||
elif (pair[0] == 'Port-Version'):
|
|
||||||
port_version = pair[1]
|
|
||||||
elif (pair[0] == 'Architecture'):
|
|
||||||
architecture = pair[1]
|
|
||||||
return package, version + '-' + port_version, architecture
|
|
||||||
|
|
||||||
def get_packages(archives):
|
|
||||||
packages = {}
|
|
||||||
for archive in archives:
|
|
||||||
zip_file = zipfile.ZipFile(archive, 'r')
|
|
||||||
control = zip_file.read('CONTROL')
|
|
||||||
package, version, architecture = read_control(control.decode('utf-8'))
|
|
||||||
if (architecture not in packages.keys()):
|
|
||||||
packages[architecture] = {}
|
|
||||||
if (package not in packages[architecture].keys()):
|
|
||||||
packages[architecture][package] = {}
|
|
||||||
if (version not in packages[architecture][package].keys()):
|
|
||||||
packages[architecture][package][version] = []
|
|
||||||
if (archive not in packages[architecture][package][version]):
|
|
||||||
packages[architecture][package][version].append(archive)
|
|
||||||
return packages
|
|
||||||
|
|
||||||
def print_packages(packages):
|
|
||||||
for architecture in packages:
|
|
||||||
print(architecture)
|
|
||||||
for package in packages[architecture]:
|
|
||||||
print('\t', package)
|
|
||||||
for version in packages[architecture][package]:
|
|
||||||
print('\t\t', version)
|
|
||||||
for archive in packages[architecture][package][version]:
|
|
||||||
print('\t\t\t', archive)
|
|
||||||
|
|
||||||
def get_hash_from_name(name):
|
|
||||||
parts = name.split('-')
|
|
||||||
return parts[2].split('.')[0]
|
|
||||||
|
|
||||||
def read_vcpkg_abi_info_content(content, packages):
|
|
||||||
dependencies = []
|
|
||||||
lines = content.split('\n')
|
|
||||||
for line in lines:
|
|
||||||
if line:
|
|
||||||
pair = line.split(' ')
|
|
||||||
if (pair[0] in packages):
|
|
||||||
dependencies.append(pair[1])
|
|
||||||
return dependencies
|
|
||||||
|
|
||||||
def read_vcpkg_abi_info(archive, package, packages):
|
|
||||||
zip_file = zipfile.ZipFile(archive, 'r')
|
|
||||||
if (package == 'gtest'):
|
|
||||||
package = 'GTest'
|
|
||||||
file_name = 'share/'+package+'/vcpkg_abi_info.txt'
|
|
||||||
try:
|
|
||||||
info_file = zip_file.read(file_name)
|
|
||||||
return read_vcpkg_abi_info_content(info_file.decode('utf-8'), packages)
|
|
||||||
except Exception as ex:
|
|
||||||
print('Failed to read the file', file_name, 'from', archive, ':', ex)
|
|
||||||
return ''
|
|
||||||
|
|
||||||
def mark_outdated_packages(packages, modification_dates):
|
|
||||||
outdated = []
|
|
||||||
for architecture in packages:
|
|
||||||
for package in packages[architecture]:
|
|
||||||
archives_with_same_version = {}
|
|
||||||
max_version = sorted(packages[architecture][package].keys(), reverse=True)[0]
|
|
||||||
for version in packages[architecture][package]:
|
|
||||||
if (version != max_version):
|
|
||||||
for archive in packages[architecture][package][version]:
|
|
||||||
outdated.append(archive)
|
|
||||||
else:
|
|
||||||
if (len(packages[architecture][package][version]) == 1):
|
|
||||||
continue
|
|
||||||
for archive in packages[architecture][package][version]:
|
|
||||||
if (modification_dates[archive] not in archives_with_same_version and archive in modification_dates):
|
|
||||||
archives_with_same_version[modification_dates[archive]] = archive
|
|
||||||
max_date = sorted(archives_with_same_version.keys(), reverse=True)[0]
|
|
||||||
for archive in packages[architecture][package][version]:
|
|
||||||
if (archive != archives_with_same_version[max_date]):
|
|
||||||
outdated.append(archive)
|
|
||||||
return outdated
|
|
||||||
|
|
||||||
def get_hash_list(packages):
|
|
||||||
hash_list = []
|
|
||||||
for architecture in packages:
|
|
||||||
for package in packages[architecture]:
|
|
||||||
for version in packages[architecture][package]:
|
|
||||||
for archive in packages[architecture][package][version]:
|
|
||||||
hash_list.append(get_hash_from_name(os.path.basename(archive)))
|
|
||||||
return hash_list
|
|
||||||
|
|
||||||
def remove_outdated_from_hash_list(hash_list, outdated):
|
|
||||||
for package in outdated:
|
|
||||||
package_hash = get_hash_from_name(os.path.basename(package))
|
|
||||||
if (package_hash in hash_list):
|
|
||||||
hash_list.remove(package_hash)
|
|
||||||
|
|
||||||
def add_package_to_outdated_by_hash(packages, outdated, package_hash, architecture):
|
|
||||||
for package in packages[architecture]:
|
|
||||||
for version in packages[architecture][package]:
|
|
||||||
for archive in packages[architecture][package][version]:
|
|
||||||
if (get_hash_from_name(os.path.basename(archive)) == package_hash and archive not in outdated):
|
|
||||||
outdated.append(archive)
|
|
||||||
return
|
|
||||||
|
|
||||||
def process_package_dependencies(package_hash, dependencies, packages, outdated, hash_list, architecture):
|
|
||||||
is_valid = True
|
|
||||||
if (package_hash not in hash_list):
|
|
||||||
add_package_to_outdated_by_hash(packages, outdated, package_hash, architecture)
|
|
||||||
is_valid = False
|
|
||||||
if (package_hash not in dependencies):
|
|
||||||
return is_valid
|
|
||||||
package_dependencies = dependencies[package_hash]
|
|
||||||
for dependency_hash in package_dependencies:
|
|
||||||
is_valid = is_valid and process_package_dependencies(dependency_hash, dependencies, packages, outdated, hash_list, architecture)
|
|
||||||
if (not is_valid):
|
|
||||||
add_package_to_outdated_by_hash(packages, outdated, package_hash, architecture)
|
|
||||||
if (package_hash in hash_list):
|
|
||||||
hash_list.remove(package_hash)
|
|
||||||
return False
|
|
||||||
return is_valid
|
|
||||||
|
|
||||||
def process_dependencies_list(dependencies, packages, outdated, hash_list, architecture):
|
|
||||||
for package_hash in dependencies:
|
|
||||||
process_package_dependencies(package_hash, dependencies, packages, outdated, hash_list, architecture)
|
|
||||||
|
|
||||||
def mark_duplicate_packages(packages, outdated):
|
|
||||||
hash_list = get_hash_list(packages)
|
|
||||||
remove_outdated_from_hash_list(hash_list, outdated)
|
|
||||||
|
|
||||||
for architecture in packages:
|
|
||||||
dependencies_list = {}
|
|
||||||
for package in packages[architecture]:
|
|
||||||
for version in packages[architecture][package]:
|
|
||||||
for archive in packages[architecture][package][version]:
|
|
||||||
dependencies = read_vcpkg_abi_info(archive, package, packages[architecture].keys())
|
|
||||||
if (len(dependencies) != 0):
|
|
||||||
dependencies_list[get_hash_from_name(os.path.basename(archive))] = dependencies
|
|
||||||
process_dependencies_list(dependencies_list, packages, outdated, hash_list, architecture)
|
|
||||||
|
|
||||||
def print_outdated(outdated, packages):
|
|
||||||
for architecture in packages:
|
|
||||||
for package in packages[architecture]:
|
|
||||||
for version in packages[architecture][package]:
|
|
||||||
for archive in packages[architecture][package][version]:
|
|
||||||
if (archive in outdated):
|
|
||||||
print(architecture, package, version, archive, sep=' -> ')
|
|
||||||
|
|
||||||
def help():
|
|
||||||
print('Usage:')
|
|
||||||
print('python cleanup_vcpkg_archive_cache.py <dir> <bucket> <access_key> <secret_key>')
|
|
||||||
|
|
||||||
if (len(sys.argv) != 5):
|
|
||||||
help()
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
dir_name = sys.argv[1]
|
|
||||||
bucket_name = sys.argv[2]
|
|
||||||
access_key = sys.argv[3]
|
|
||||||
secret_key = sys.argv[4]
|
|
||||||
|
|
||||||
modification_dates = s3.download_all(dir_name, bucket_name)
|
|
||||||
packages = get_packages(get_files(dir_name))
|
|
||||||
print_packages(packages)
|
|
||||||
outdated = mark_outdated_packages(packages, modification_dates)
|
|
||||||
|
|
||||||
mark_duplicate_packages(packages, outdated)
|
|
||||||
print('Outdated packages:')
|
|
||||||
print_outdated(outdated, packages)
|
|
||||||
s3.remove_files(outdated, bucket_name, access_key, secret_key)
|
|
|
@ -1,27 +0,0 @@
|
||||||
import sys
|
|
||||||
import s3
|
|
||||||
|
|
||||||
def help():
|
|
||||||
print('Usage:')
|
|
||||||
print('python manage_vcpkg_archive_cache.py <action> <dir> <os> <bucket> <access_key> <secret_key>')
|
|
||||||
|
|
||||||
if (len(sys.argv) != 7 and len(sys.argv) != 5):
|
|
||||||
help()
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
action_name = sys.argv[1]
|
|
||||||
dir_name = sys.argv[2]
|
|
||||||
os_name = sys.argv[3]
|
|
||||||
bucket_name = sys.argv[4]
|
|
||||||
|
|
||||||
if (action_name == 'upload' and len(sys.argv) == 7):
|
|
||||||
access_key = sys.argv[5]
|
|
||||||
secret_key = sys.argv[6]
|
|
||||||
s3.upload(os_name, dir_name, bucket_name, access_key, secret_key)
|
|
||||||
elif (action_name == 'download'):
|
|
||||||
s3.download(os_name, dir_name, bucket_name)
|
|
||||||
else:
|
|
||||||
help()
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
sys.exit(0)
|
|
114
deploy/s3.py
114
deploy/s3.py
|
@ -1,114 +0,0 @@
|
||||||
import boto3
|
|
||||||
import os
|
|
||||||
from botocore import UNSIGNED
|
|
||||||
from botocore.client import Config
|
|
||||||
from botocore.exceptions import NoCredentialsError
|
|
||||||
|
|
||||||
def s3_upload(local_file, s3_file, bucket, access_key, secret_key):
|
|
||||||
print('Uploading', local_file, '->', s3_file)
|
|
||||||
|
|
||||||
s3 = boto3.client('s3', aws_access_key_id=access_key,
|
|
||||||
aws_secret_access_key=secret_key)
|
|
||||||
|
|
||||||
try:
|
|
||||||
s3.upload_file(local_file, bucket, s3_file)
|
|
||||||
print("Upload Successful")
|
|
||||||
except FileNotFoundError:
|
|
||||||
print("The file was not found")
|
|
||||||
except NoCredentialsError:
|
|
||||||
print("Credentials not available")
|
|
||||||
except Exception:
|
|
||||||
print("Upload failed")
|
|
||||||
|
|
||||||
def s3_download(local_file, s3_file, bucket):
|
|
||||||
print('Downloading', s3_file, '->', local_file)
|
|
||||||
s3 = boto3.client('s3', config=Config(signature_version=UNSIGNED))
|
|
||||||
|
|
||||||
try:
|
|
||||||
s3.download_file(bucket, s3_file, local_file)
|
|
||||||
print("Download Successful")
|
|
||||||
except NoCredentialsError:
|
|
||||||
print("Credentials not available")
|
|
||||||
except Exception:
|
|
||||||
print("Download failed")
|
|
||||||
|
|
||||||
def s3_list(bucket):
|
|
||||||
s3 = boto3.client('s3', config=Config(signature_version=UNSIGNED))
|
|
||||||
|
|
||||||
try:
|
|
||||||
return s3.list_objects(Bucket=bucket)['Contents']
|
|
||||||
except Exception:
|
|
||||||
print("Failed to retrieve list of files in bucket")
|
|
||||||
return None
|
|
||||||
|
|
||||||
def upload(os_name, dir, bucket, access_key, secret_key):
|
|
||||||
objects = s3_list(bucket)
|
|
||||||
for path, _, files in os.walk(dir):
|
|
||||||
for name in files:
|
|
||||||
found = False
|
|
||||||
file_path = os.path.join(path, name)
|
|
||||||
dir_name = os.path.basename(path)
|
|
||||||
file_name = os_name + '-' + dir_name + '-' + name
|
|
||||||
if objects:
|
|
||||||
for object in objects:
|
|
||||||
key = object['Key']
|
|
||||||
if (key == file_name):
|
|
||||||
found = True
|
|
||||||
break
|
|
||||||
if (not found):
|
|
||||||
s3_upload(file_path, file_name, bucket, access_key, secret_key)
|
|
||||||
|
|
||||||
def cleanup(dir):
|
|
||||||
if not os.path.isdir(dir):
|
|
||||||
return
|
|
||||||
for filename in os.listdir(dir):
|
|
||||||
file_path = os.path.join(dir, filename)
|
|
||||||
try:
|
|
||||||
if os.path.isfile(file_path):
|
|
||||||
os.unlink(file_path)
|
|
||||||
elif os.path.isdir(file_path):
|
|
||||||
cleanup(file_path)
|
|
||||||
except Exception as e:
|
|
||||||
print('Failed to delete %s. Reason: %s' % (file_path, e))
|
|
||||||
|
|
||||||
def download(os_name, dir, bucket):
|
|
||||||
cleanup(dir)
|
|
||||||
objects = s3_list(bucket)
|
|
||||||
if objects:
|
|
||||||
for object in objects:
|
|
||||||
key = object['Key']
|
|
||||||
args = key.split('-')
|
|
||||||
if (len(args) == 3 and args[0] == os_name):
|
|
||||||
os.makedirs(os.path.join(dir, args[1]), exist_ok=True)
|
|
||||||
local_file = os.path.join(dir, args[1], args[2])
|
|
||||||
if (os.path.isfile(local_file)):
|
|
||||||
print('Found local file', local_file)
|
|
||||||
continue
|
|
||||||
s3_download(local_file, key, bucket)
|
|
||||||
|
|
||||||
def download_all(dir, bucket):
|
|
||||||
files = {}
|
|
||||||
cleanup(dir)
|
|
||||||
os.makedirs(dir, exist_ok=True)
|
|
||||||
objects = s3_list(bucket)
|
|
||||||
if objects:
|
|
||||||
for object in objects:
|
|
||||||
key = object['Key']
|
|
||||||
local_file = os.path.join(dir, key)
|
|
||||||
files[local_file] = object['LastModified']
|
|
||||||
s3_download(local_file, key, bucket)
|
|
||||||
return files
|
|
||||||
|
|
||||||
def s3_remove(s3_file, bucket, access_key, secret_key):
|
|
||||||
print('Removing', s3_file)
|
|
||||||
s3 = boto3.client('s3', aws_access_key_id=access_key,
|
|
||||||
aws_secret_access_key=secret_key)
|
|
||||||
try:
|
|
||||||
s3.delete_object(Bucket=bucket, Key=s3_file)
|
|
||||||
print("Remove successful")
|
|
||||||
except Exception as ex:
|
|
||||||
print("Remove failed: ", ex)
|
|
||||||
|
|
||||||
def remove_files(files, bucket, access_key, secret_key):
|
|
||||||
for file in files:
|
|
||||||
s3_remove(os.path.basename(file), bucket, access_key, secret_key)
|
|
Loading…
Reference in New Issue