oss-fuzz/infra/gcb/builds_status.py

195 lines
5.0 KiB
Python
Raw Normal View History

2017-03-14 18:26:34 +00:00
#!/usr/bin/env python2
import datetime
import os
import sys
import jinja2
import json
2017-03-23 02:56:28 +00:00
import tempfile
2018-05-30 08:40:19 +00:00
import time
2017-03-14 18:26:34 +00:00
2017-03-23 02:56:28 +00:00
import dateutil.parser
2017-03-14 18:26:34 +00:00
from oauth2client.client import GoogleCredentials
import googleapiclient
2017-03-14 18:26:34 +00:00
from googleapiclient.discovery import build as gcb_build
from google.cloud import logging
2017-03-14 18:26:34 +00:00
from google.cloud import storage
import build_and_run_coverage
import build_project
2017-03-23 02:56:28 +00:00
STATUS_BUCKET = 'oss-fuzz-build-logs'
2017-03-14 18:26:34 +00:00
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
2018-05-30 08:40:19 +00:00
RETRY_COUNT = 3
RETRY_WAIT = 5
MAX_BUILD_RESULTS = 2000
BUILDS_PAGE_SIZE = 256
2017-03-14 18:26:34 +00:00
def usage():
2018-07-27 03:32:09 +00:00
sys.stderr.write('Usage: ' + sys.argv[0] + ' <projects_dir>\n')
2017-03-14 18:26:34 +00:00
exit(1)
def scan_project_names(projects_dir):
projects = []
for root, dirs, files in os.walk(projects_dir):
for f in files:
2018-07-27 03:32:09 +00:00
if f == 'Dockerfile':
2017-03-14 18:26:34 +00:00
projects.append(os.path.basename(root))
return sorted(projects)
def upload_status(successes, failures, status_filename):
2017-03-14 18:26:34 +00:00
"""Upload main status page."""
data = {
2017-04-18 05:56:04 +00:00
'projects': failures + successes,
2017-03-14 18:26:34 +00:00
'failures': failures,
'successes': successes,
'last_updated': datetime.datetime.utcnow().ctime()
}
storage_client = storage.Client()
2017-03-23 02:56:28 +00:00
bucket = storage_client.get_bucket(STATUS_BUCKET)
blob = bucket.blob(status_filename)
blob.cache_control = 'no-cache'
2018-07-27 03:32:09 +00:00
blob.upload_from_string(json.dumps(data), content_type='application/json')
2017-03-14 18:26:34 +00:00
def is_build_successful(build):
return build['status'] == 'SUCCESS'
def find_last_build(builds, project, build_tag_suffix):
2017-03-23 02:56:28 +00:00
DELAY_MINUTES = 40
tag = project + '-' + build_tag_suffix
builds = builds.get(tag)
if not builds:
return None
2017-03-23 02:56:28 +00:00
for build in builds:
2017-10-06 07:45:37 +00:00
if build['status'] == 'WORKING':
continue
if tag not in build['tags']:
continue
if not 'finishTime' in build:
continue
2017-03-23 02:56:28 +00:00
finish_time = dateutil.parser.parse(build['finishTime'], ignoretz=True)
if (datetime.datetime.utcnow() - finish_time >=
datetime.timedelta(minutes=DELAY_MINUTES)):
storage_client = storage.Client()
status_bucket = storage_client.get_bucket(STATUS_BUCKET)
gcb_bucket = storage_client.get_bucket(build_project.GCB_LOGS_BUCKET)
2017-03-23 02:56:28 +00:00
log_name = 'log-{0}.txt'.format(build['id'])
log = gcb_bucket.blob(log_name)
dest_log = status_bucket.blob(log_name)
with tempfile.NamedTemporaryFile() as f:
log.download_to_filename(f.name)
dest_log.upload_from_filename(f.name, content_type='text/plain')
return build
return None
2018-05-30 08:40:19 +00:00
def execute_with_retries(request):
for i in xrange(RETRY_COUNT + 1):
try:
return request.execute()
except Exception as e:
print('request failed with {0}, retrying...'.format(str(e)))
if i < RETRY_COUNT:
time.sleep(RETRY_WAIT)
continue
2018-07-27 03:32:09 +00:00
2018-05-30 08:40:19 +00:00
raise
2018-07-27 03:32:09 +00:00
def get_builds(cloudbuild):
"""Get a batch of the latest builds (up to MAX_BUILD_RESULTS), grouped by
tag."""
ungrouped_builds = []
next_page_token = None
while True:
page_size = min(BUILDS_PAGE_SIZE, MAX_BUILD_RESULTS - len(ungrouped_builds))
response = execute_with_retries(cloudbuild.projects().builds().list(
projectId='oss-fuzz', pageSize=page_size, pageToken=next_page_token))
if not 'builds' in response:
print >> sys.stderr, 'Invalid response', response
return None
ungrouped_builds.extend(response['builds'])
if len(ungrouped_builds) >= MAX_BUILD_RESULTS:
break
next_page_token = response.get('nextPageToken')
builds = {}
for build in ungrouped_builds:
for tag in build['tags']:
builds.setdefault(tag, []).append(build)
return builds
def update_build_status(
builds, projects, build_tag_suffix, status_filename):
2017-03-14 18:26:34 +00:00
successes = []
failures = []
for project in projects:
2017-03-14 18:26:34 +00:00
print project
last_build = find_last_build(builds, project, build_tag_suffix)
2017-03-23 02:56:28 +00:00
if not last_build:
2018-07-27 03:32:09 +00:00
print >> sys.stderr, 'Failed to get build for', project
2017-03-23 02:56:28 +00:00
continue
2017-03-14 18:26:34 +00:00
print last_build['startTime'], last_build['status'], last_build['id']
if is_build_successful(last_build):
successes.append({
'name': project,
'build_id': last_build['id'],
2017-04-18 05:56:04 +00:00
'finish_time': last_build['finishTime'],
2017-04-18 15:52:37 +00:00
'success': True,
})
2017-03-14 18:26:34 +00:00
else:
failures.append({
'name': project,
'build_id': last_build['id'],
2017-04-18 05:56:04 +00:00
'finish_time': last_build['finishTime'],
2017-04-18 15:52:37 +00:00
'success': False,
})
2017-03-14 18:26:34 +00:00
upload_status(successes, failures, status_filename)
def main():
if len(sys.argv) != 2:
usage()
projects_dir = sys.argv[1]
projects = scan_project_names(projects_dir)
credentials = GoogleCredentials.get_application_default()
cloudbuild = gcb_build('cloudbuild', 'v1', credentials=credentials)
builds = get_builds(cloudbuild)
update_build_status(builds, projects, build_project.FUZZING_BUILD_TAG,
status_filename='status.json')
update_build_status(builds, projects,
build_and_run_coverage.COVERAGE_BUILD_TAG,
status_filename='status-coverage.json')
2017-03-14 18:26:34 +00:00
2017-03-23 02:56:28 +00:00
2018-07-27 03:32:09 +00:00
if __name__ == '__main__':
2017-03-14 18:26:34 +00:00
main()