2017-03-14 18:26:34 +00:00
|
|
|
#!/usr/bin/env python2
|
|
|
|
|
|
|
|
import datetime
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import jinja2
|
|
|
|
import json
|
2017-03-23 02:56:28 +00:00
|
|
|
import tempfile
|
2017-03-14 18:26:34 +00:00
|
|
|
|
2017-03-23 02:56:28 +00:00
|
|
|
import dateutil.parser
|
2017-03-14 18:26:34 +00:00
|
|
|
from oauth2client.client import GoogleCredentials
|
2017-08-10 18:44:19 +00:00
|
|
|
import googleapiclient
|
2017-03-14 18:26:34 +00:00
|
|
|
from googleapiclient.discovery import build as gcb_build
|
2017-03-16 19:26:22 +00:00
|
|
|
from google.cloud import logging
|
2017-03-14 18:26:34 +00:00
|
|
|
from google.cloud import storage
|
|
|
|
from jinja2 import Environment, FileSystemLoader
|
|
|
|
|
|
|
|
|
2017-03-23 02:56:28 +00:00
|
|
|
STATUS_BUCKET = 'oss-fuzz-build-logs'
|
2017-03-14 18:26:34 +00:00
|
|
|
LOGS_BUCKET = 'oss-fuzz-gcb-logs'
|
|
|
|
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
|
|
|
|
|
|
|
|
|
|
|
|
def usage():
|
|
|
|
sys.stderr.write(
|
|
|
|
"Usage: " + sys.argv[0] + " <projects_dir>\n")
|
|
|
|
exit(1)
|
|
|
|
|
|
|
|
|
|
|
|
def scan_project_names(projects_dir):
|
|
|
|
projects = []
|
|
|
|
for root, dirs, files in os.walk(projects_dir):
|
|
|
|
for f in files:
|
|
|
|
if f == "Dockerfile":
|
|
|
|
projects.append(os.path.basename(root))
|
|
|
|
return sorted(projects)
|
|
|
|
|
|
|
|
|
2017-04-18 05:56:04 +00:00
|
|
|
def upload_status(successes, failures):
|
2017-03-14 18:26:34 +00:00
|
|
|
"""Upload main status page."""
|
|
|
|
env = Environment(loader=FileSystemLoader(os.path.join(SCRIPT_DIR,
|
|
|
|
'templates')))
|
|
|
|
data = {
|
2017-04-18 05:56:04 +00:00
|
|
|
'projects': failures + successes,
|
2017-03-14 18:26:34 +00:00
|
|
|
'failures': failures,
|
|
|
|
'successes': successes,
|
|
|
|
'last_updated': datetime.datetime.utcnow().ctime()
|
|
|
|
}
|
|
|
|
|
|
|
|
storage_client = storage.Client()
|
2017-03-23 02:56:28 +00:00
|
|
|
bucket = storage_client.get_bucket(STATUS_BUCKET)
|
2017-03-14 18:26:34 +00:00
|
|
|
|
2017-03-14 21:05:25 +00:00
|
|
|
blob = bucket.blob('status.html')
|
|
|
|
blob.cache_control = 'no-cache'
|
|
|
|
blob.upload_from_string(
|
2017-03-14 18:26:34 +00:00
|
|
|
env.get_template('status_template.html').render(data),
|
|
|
|
content_type='text/html')
|
|
|
|
|
2017-03-14 21:05:25 +00:00
|
|
|
blob = bucket.blob('status.json')
|
|
|
|
blob.cache_control = 'no-cache'
|
|
|
|
blob.upload_from_string(
|
2017-03-14 18:26:34 +00:00
|
|
|
json.dumps(data),
|
2017-08-11 00:19:52 +00:00
|
|
|
content_type='application/json')
|
2017-03-14 18:26:34 +00:00
|
|
|
|
|
|
|
|
2017-03-16 19:26:22 +00:00
|
|
|
def is_build_successful(build):
|
2017-10-30 00:15:51 +00:00
|
|
|
return build['status'] == 'SUCCESS'
|
2017-03-16 19:26:22 +00:00
|
|
|
|
|
|
|
|
2017-03-23 02:56:28 +00:00
|
|
|
def find_last_build(builds):
|
|
|
|
DELAY_MINUTES = 40
|
|
|
|
|
|
|
|
for build in builds:
|
2017-10-06 07:45:37 +00:00
|
|
|
if build['status'] == 'WORKING':
|
|
|
|
continue
|
|
|
|
|
2017-03-23 02:56:28 +00:00
|
|
|
finish_time = dateutil.parser.parse(build['finishTime'], ignoretz=True)
|
|
|
|
if (datetime.datetime.utcnow() - finish_time >=
|
|
|
|
datetime.timedelta(minutes=DELAY_MINUTES)):
|
|
|
|
storage_client = storage.Client()
|
|
|
|
|
|
|
|
status_bucket = storage_client.get_bucket(STATUS_BUCKET)
|
|
|
|
gcb_bucket = storage_client.get_bucket(LOGS_BUCKET)
|
|
|
|
log_name = 'log-{0}.txt'.format(build['id'])
|
|
|
|
log = gcb_bucket.blob(log_name)
|
|
|
|
dest_log = status_bucket.blob(log_name)
|
|
|
|
|
|
|
|
with tempfile.NamedTemporaryFile() as f:
|
|
|
|
log.download_to_filename(f.name)
|
|
|
|
dest_log.upload_from_filename(f.name, content_type='text/plain')
|
|
|
|
|
|
|
|
return build
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2017-03-14 18:26:34 +00:00
|
|
|
def main():
|
|
|
|
if len(sys.argv) != 2:
|
|
|
|
usage()
|
|
|
|
|
|
|
|
projects_dir = sys.argv[1]
|
|
|
|
|
|
|
|
credentials = GoogleCredentials.get_application_default()
|
|
|
|
cloudbuild = gcb_build('cloudbuild', 'v1', credentials=credentials)
|
|
|
|
|
|
|
|
successes = []
|
|
|
|
failures = []
|
|
|
|
for project in scan_project_names(projects_dir):
|
|
|
|
print project
|
2017-10-06 07:37:50 +00:00
|
|
|
query_filter = ('images="gcr.io/oss-fuzz/{0}"'.format(project))
|
2017-08-10 18:44:19 +00:00
|
|
|
try:
|
|
|
|
response = cloudbuild.projects().builds().list(
|
|
|
|
projectId='oss-fuzz',
|
2017-08-10 18:47:29 +00:00
|
|
|
pageSize=2,
|
2017-08-10 18:44:19 +00:00
|
|
|
filter=query_filter).execute()
|
2018-05-30 08:34:57 +00:00
|
|
|
except googleapiclient.errors.HttpError as e:
|
|
|
|
print >>sys.stderr, 'Failed to list builds for', project, ':', str(e)
|
2017-08-10 18:44:19 +00:00
|
|
|
continue
|
|
|
|
|
2017-03-14 18:26:34 +00:00
|
|
|
if not 'builds' in response:
|
|
|
|
continue
|
|
|
|
|
|
|
|
builds = response['builds']
|
2017-03-23 02:56:28 +00:00
|
|
|
last_build = find_last_build(builds)
|
|
|
|
if not last_build:
|
|
|
|
print >>sys.stderr, 'Failed to get build for', project
|
|
|
|
continue
|
|
|
|
|
2017-03-14 18:26:34 +00:00
|
|
|
print last_build['startTime'], last_build['status'], last_build['id']
|
2017-03-16 19:26:22 +00:00
|
|
|
if is_build_successful(last_build):
|
|
|
|
successes.append({
|
|
|
|
'name': project,
|
|
|
|
'build_id': last_build['id'],
|
2017-04-18 05:56:04 +00:00
|
|
|
'finish_time': last_build['finishTime'],
|
2017-04-18 15:52:37 +00:00
|
|
|
'success': True,
|
2017-03-16 19:26:22 +00:00
|
|
|
})
|
2017-03-14 18:26:34 +00:00
|
|
|
else:
|
2017-03-16 19:26:22 +00:00
|
|
|
failures.append({
|
|
|
|
'name': project,
|
|
|
|
'build_id': last_build['id'],
|
2017-04-18 05:56:04 +00:00
|
|
|
'finish_time': last_build['finishTime'],
|
2017-04-18 15:52:37 +00:00
|
|
|
'success': False,
|
2017-03-16 19:26:22 +00:00
|
|
|
})
|
2017-03-14 18:26:34 +00:00
|
|
|
|
2017-04-18 05:56:04 +00:00
|
|
|
upload_status(successes, failures)
|
2017-03-14 18:26:34 +00:00
|
|
|
|
2017-03-23 02:56:28 +00:00
|
|
|
|
2017-03-14 18:26:34 +00:00
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|
|
|
|
|