[build][infra] Improve build functions. (#6296)

Important functional changes involve mostly improvements to
the command line scripts (this doesn't affect the build infra, only
local use):

1. Make sure scripts use the same builder as builds requested by infra, otherwise builds
will be very slow and will fail for larger projects.
2. Allow users to specify --test-images to use base images with suffix "-testing"
3. Allow script users to specify --parallel for parallel builds.
4. Allow script users to specify --testing so that builds are uploaded to testing buckets.
5. Allow script users to specify --branch so that builds use specified branch instead of master.
6. Clone oss-fuzz with depth 1 for improved speed and space usage.
7. Use logging instead of writing to stderr or print.
8. Allow scripts to accept multiple projects.
9. Allow script to keep executing after failure to get build steps.
10. Change scripts to use python3.
11. Tag more so builds are easier to query.
12. Log the gcb page for each build.

Other changes include major refactoring:

1. Don't construct image names from scratch using format strings each time they are used.
Provide a helper function for this.
2. Provide a helper function,  get_env instead of constructing the env from scratch each time.
3. Move compile step into its own function: get_compile_step.
4. Move upload steps into their own helper function get_upload_steps.
5. Don't misuse the name image_project when we really mean cloud project.
6. Move cleanup step into its own helper function: get_cleanup_step.
7. Exit with returncode of main function from build_project.
8. Add unittests for build_project.
9. Make request_build share run_build code with build_project.
10. Use proper spacing in comments.
11. Test builds other than libfuzzer-ASAN-x86_64. Test other sanitizers, fuzzers and architectures
12. Make build_and_run_coverage share more code with build_project.
13. Move tests for build_and_run_coverage_test.py out of requst_coverage_test.py into their own file.
14. Use single quotes for strings.
15. Store state for a build in Build object instead of passing it everywhere.
16. Don't abuse project_yaml dict for storing project state. Use a Project object instead.
17. Better variable naming.
18. Use more classes instead of passing around arguments.
19. Use more f-strings.
20. Make scripts share main function.
21. Begin comments with uppercase and end with period.
22. Don't import functions or classes as dictated by style guide.
23. Share more test code in test_utils

Related: #6180.
This commit is contained in:
jonathanmetzman 2021-08-25 11:44:52 -07:00 committed by GitHub
parent 0378a92819
commit 370fb73473
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 1129 additions and 609 deletions

View File

@ -32,8 +32,6 @@ BASE_IMAGES = [
BASE_PROJECT = 'oss-fuzz-base'
TAG_PREFIX = f'gcr.io/{BASE_PROJECT}/'
BASE_SANITIZER_LIBS_IMAGE = TAG_PREFIX + 'base-sanitizer-libs-builder'
def _get_base_image_steps(images, tag_prefix=TAG_PREFIX):
"""Returns build steps for given images."""

View File

@ -13,11 +13,11 @@
# limitations under the License.
#
################################################################################
#!/usr/bin/python2
#!/usr/bin/env python3
"""Starts and runs coverage build on Google Cloud Builder.
Usage: build_and_run_coverage.py <project_dir>
Usage: build_and_run_coverage.py <project>.
"""
import datetime
import json
import logging
import os
@ -27,117 +27,104 @@ import build_lib
import build_project
SANITIZER = 'coverage'
CONFIGURATION = ['FUZZING_ENGINE=libfuzzer', 'SANITIZER=%s' % SANITIZER]
FUZZING_ENGINE = 'libfuzzer'
ARCHITECTURE = 'x86_64'
PLATFORM = 'linux'
COVERAGE_BUILD_TAG = 'coverage'
COVERAGE_BUILD_TYPE = 'coverage'
# Where code coverage reports need to be uploaded to.
COVERAGE_BUCKET_NAME = 'oss-fuzz-coverage'
# Link to the code coverage report in HTML format.
HTML_REPORT_URL_FORMAT = (build_lib.GCS_URL_BASENAME + COVERAGE_BUCKET_NAME +
'/{project}/reports/{date}/{platform}/index.html')
# This is needed for ClusterFuzz to pick up the most recent reports data.
LATEST_REPORT_INFO_URL = ('/' + COVERAGE_BUCKET_NAME +
'/latest_report_info/{project}.json')
LATEST_REPORT_INFO_CONTENT_TYPE = 'application/json'
# Link where to upload code coverage report files to.
UPLOAD_URL_FORMAT = 'gs://' + COVERAGE_BUCKET_NAME + '/{project}/{type}/{date}'
LATEST_REPORT_INFO_CONTENT_TYPE = 'application/json'
# Languages from project.yaml that have code coverage support.
LANGUAGES_WITH_COVERAGE_SUPPORT = ['c', 'c++', 'go', 'jvm', 'rust']
def usage():
"""Exit with code 1 and display syntax to use this file."""
sys.stderr.write("Usage: " + sys.argv[0] + " <project_dir>\n")
sys.exit(1)
class Bucket: # pylint: disable=too-few-public-methods
"""Class representing the coverage GCS bucket."""
def __init__(self, project, date, platform, testing):
self.coverage_bucket_name = 'oss-fuzz-coverage'
if testing:
self.coverage_bucket_name += '-testing'
self.date = date
self.project = project
self.html_report_url = (
f'{build_lib.GCS_URL_BASENAME}{self.coverage_bucket_name}/{project}'
f'/reports/{date}/{platform}/index.html')
self.latest_report_info_url = (f'/{COVERAGE_BUCKET_NAME}'
f'/latest_report_info/{project}.json')
def get_upload_url(self, upload_type):
"""Returns an upload url for |upload_type|."""
return (f'gs://{self.coverage_bucket_name}/{self.project}'
f'/{upload_type}/{self.date}')
# pylint: disable=too-many-locals
def get_build_steps(project_name, project_yaml_file, dockerfile_lines,
image_project, base_images_project):
def get_build_steps( # pylint: disable=too-many-locals, too-many-arguments
project_name, project_yaml_contents, dockerfile_lines, image_project,
base_images_project, config):
"""Returns build steps for project."""
project_yaml = build_project.load_project_yaml(project_name,
project_yaml_file,
image_project)
if project_yaml['disabled']:
logging.info('Project "%s" is disabled.', project_name)
project = build_project.Project(project_name, project_yaml_contents,
dockerfile_lines, image_project)
if project.disabled:
logging.info('Project "%s" is disabled.', project.name)
return []
if project_yaml['language'] not in LANGUAGES_WITH_COVERAGE_SUPPORT:
if project.fuzzing_language not in LANGUAGES_WITH_COVERAGE_SUPPORT:
logging.info(
'Project "%s" is written in "%s", coverage is not supported yet.',
project_name, project_yaml['language'])
project.name, project.fuzzing_language)
return []
name = project_yaml['name']
image = project_yaml['image']
language = project_yaml['language']
report_date = datetime.datetime.now().strftime('%Y%m%d')
report_date = build_project.get_datetime_now().strftime('%Y%m%d')
bucket = Bucket(project.name, report_date, PLATFORM, config.testing)
build_steps = build_lib.project_image_steps(name, image, language)
build_steps = build_lib.project_image_steps(project.name,
project.image,
project.fuzzing_language,
branch=config.branch,
test_images=config.test_images)
env = CONFIGURATION[:]
out = '/workspace/out/' + SANITIZER
env.append('OUT=' + out)
env.append('FUZZING_LANGUAGE=' + language)
workdir = build_project.workdir_from_dockerfile(dockerfile_lines)
if not workdir:
workdir = '/src'
failure_msg = ('*' * 80 + '\nCoverage build failed.\nTo reproduce, run:\n'
f'python infra/helper.py build_image {name}\n'
'python infra/helper.py build_fuzzers --sanitizer coverage '
f'{name}\n' + '*' * 80)
# Compilation step.
build_steps.append({
'name':
image,
'env':
env,
'args': [
'bash',
'-c',
# Remove /out to make sure there are non instrumented binaries.
# `cd /src && cd {workdir}` (where {workdir} is parsed from the
# Dockerfile). Container Builder overrides our workdir so we need
# to add this step to set it back.
(f'rm -r /out && cd /src && cd {workdir} && mkdir -p {out} && '
f'compile || (echo "{failure_msg}" && false)'),
],
})
download_corpora_steps = build_lib.download_corpora_steps(project_name)
build = build_project.Build('libfuzzer', 'coverage', 'x86_64')
env = build_project.get_env(project.fuzzing_language, build)
build_steps.append(
build_project.get_compile_step(project, build, env, config.parallel))
download_corpora_steps = build_lib.download_corpora_steps(
project.name, testing=config.testing)
if not download_corpora_steps:
logging.info('Skipping code coverage build for %s.', project_name)
logging.info('Skipping code coverage build for %s.', project.name)
return []
build_steps.extend(download_corpora_steps)
failure_msg = ('*' * 80 + '\nCode coverage report generation failed.\n'
'To reproduce, run:\n'
f'python infra/helper.py build_image {name}\n'
f'python infra/helper.py build_image {project.name}\n'
'python infra/helper.py build_fuzzers --sanitizer coverage '
f'{name}\n'
f'python infra/helper.py coverage {name}\n' + '*' * 80)
f'{project.name}\n'
f'python infra/helper.py coverage {project.name}\n' + '*' * 80)
# Unpack the corpus and run coverage script.
coverage_env = env + [
'HTTP_PORT=',
'COVERAGE_EXTRA_ARGS=%s' % project_yaml['coverage_extra_args'].strip(),
f'COVERAGE_EXTRA_ARGS={project.coverage_extra_args.strip()}',
]
if 'dataflow' in project_yaml['fuzzing_engines']:
if 'dataflow' in project.fuzzing_engines:
coverage_env.append('FULL_SUMMARY_PER_TARGET=1')
build_steps.append({
'name': f'gcr.io/{base_images_project}/base-runner',
'env': coverage_env,
'name':
build_project.get_runner_image_name(base_images_project,
config.testing),
'env':
coverage_env,
'args': [
'bash', '-c',
('for f in /corpus/*.zip; do unzip -q $f -d ${f%%.*} || ('
@ -156,9 +143,7 @@ def get_build_steps(project_name, project_yaml_file, dockerfile_lines,
})
# Upload the report.
upload_report_url = UPLOAD_URL_FORMAT.format(project=project_name,
type='reports',
date=report_date)
upload_report_url = bucket.get_upload_url('reports')
# Delete the existing report as gsutil cannot overwrite it in a useful way due
# to the lack of `-T` option (it creates a subdir in the destination dir).
@ -170,15 +155,14 @@ def get_build_steps(project_name, project_yaml_file, dockerfile_lines,
'-m',
'cp',
'-r',
os.path.join(out, 'report'),
os.path.join(build.out, 'report'),
upload_report_url,
],
})
# Upload the fuzzer stats. Delete the old ones just in case.
upload_fuzzer_stats_url = UPLOAD_URL_FORMAT.format(project=project_name,
type='fuzzer_stats',
date=report_date)
upload_fuzzer_stats_url = bucket.get_upload_url('fuzzer_stats')
build_steps.append(build_lib.gsutil_rm_rf_step(upload_fuzzer_stats_url))
build_steps.append({
'name':
@ -187,15 +171,13 @@ def get_build_steps(project_name, project_yaml_file, dockerfile_lines,
'-m',
'cp',
'-r',
os.path.join(out, 'fuzzer_stats'),
os.path.join(build.out, 'fuzzer_stats'),
upload_fuzzer_stats_url,
],
})
# Upload the fuzzer logs. Delete the old ones just in case
upload_fuzzer_logs_url = UPLOAD_URL_FORMAT.format(project=project_name,
type='logs',
date=report_date)
upload_fuzzer_logs_url = bucket.get_upload_url('logs')
build_steps.append(build_lib.gsutil_rm_rf_step(upload_fuzzer_logs_url))
build_steps.append({
'name':
@ -204,15 +186,13 @@ def get_build_steps(project_name, project_yaml_file, dockerfile_lines,
'-m',
'cp',
'-r',
os.path.join(out, 'logs'),
os.path.join(build.out, 'logs'),
upload_fuzzer_logs_url,
],
})
# Upload srcmap.
srcmap_upload_url = UPLOAD_URL_FORMAT.format(project=project_name,
type='srcmap',
date=report_date)
srcmap_upload_url = bucket.get_upload_url('srcmap')
srcmap_upload_url = srcmap_upload_url.rstrip('/') + '.json'
build_steps.append({
'name': 'gcr.io/cloud-builders/gsutil',
@ -225,15 +205,13 @@ def get_build_steps(project_name, project_yaml_file, dockerfile_lines,
# Update the latest report information file for ClusterFuzz.
latest_report_info_url = build_lib.get_signed_url(
LATEST_REPORT_INFO_URL.format(project=project_name),
bucket.latest_report_info_url,
content_type=LATEST_REPORT_INFO_CONTENT_TYPE)
latest_report_info_body = json.dumps({
'fuzzer_stats_dir':
upload_fuzzer_stats_url,
'html_report_url':
HTML_REPORT_URL_FORMAT.format(project=project_name,
date=report_date,
platform=PLATFORM),
bucket.html_report_url,
'report_date':
report_date,
'report_summary_path':
@ -249,25 +227,10 @@ def get_build_steps(project_name, project_yaml_file, dockerfile_lines,
def main():
"""Build and run coverage for projects."""
if len(sys.argv) != 2:
usage()
image_project = 'oss-fuzz'
base_images_project = 'oss-fuzz-base'
project_dir = sys.argv[1].rstrip(os.path.sep)
project_name = os.path.basename(project_dir)
dockerfile_path = os.path.join(project_dir, 'Dockerfile')
project_yaml_path = os.path.join(project_dir, 'project.yaml')
with open(dockerfile_path) as docker_file:
dockerfile_lines = docker_file.readlines()
with open(project_yaml_path) as project_yaml_file:
steps = get_build_steps(project_name, project_yaml_file, dockerfile_lines,
image_project, base_images_project)
build_project.run_build(steps, project_name, COVERAGE_BUILD_TAG)
return build_project.build_script_main(
'Generates coverage report for project.', get_build_steps,
COVERAGE_BUILD_TYPE)
if __name__ == "__main__":
main()
if __name__ == '__main__':
sys.exit(main())

View File

@ -1,4 +1,4 @@
# Copyright 2020 Google Inc.
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@ -13,80 +13,66 @@
# limitations under the License.
#
################################################################################
"""Unit tests for Cloud Function that builds coverage reports."""
"""Unit tests for build_and_run_coverage."""
import json
import datetime
import os
import sys
import unittest
from unittest import mock
from google.cloud import ndb
from pyfakefs import fake_filesystem_unittest
sys.path.append(os.path.dirname(__file__))
FUNCTIONS_DIR = os.path.dirname(__file__)
sys.path.append(FUNCTIONS_DIR)
# pylint: disable=wrong-import-position
from datastore_entities import Project
from build_and_run_coverage import get_build_steps
import build_and_run_coverage
import build_project
import test_utils
# pylint: disable=no-member
class TestRequestCoverageBuilds(unittest.TestCase):
class TestRequestCoverageBuilds(fake_filesystem_unittest.TestCase):
"""Unit tests for sync."""
@classmethod
def setUpClass(cls):
cls.ds_emulator = test_utils.start_datastore_emulator()
test_utils.wait_for_emulator_ready(cls.ds_emulator, 'datastore',
test_utils.DATASTORE_READY_INDICATOR)
test_utils.set_gcp_environment()
def setUp(self):
test_utils.reset_ds_emulator()
self.maxDiff = None # pylint: disable=invalid-name
self.setUpPyfakefs()
@mock.patch('build_lib.get_signed_url', return_value='test_url')
@mock.patch('build_lib.download_corpora_steps',
return_value=[{
'url': 'test_download'
}])
@mock.patch('datetime.datetime')
@mock.patch('build_project.get_datetime_now',
return_value=test_utils.FAKE_DATETIME)
def test_get_coverage_build_steps(self, mock_url, mock_corpora_steps,
mock_time):
mock_get_datetime_now):
"""Test for get_build_steps."""
del mock_url, mock_corpora_steps, mock_time
datetime.datetime = test_utils.SpoofedDatetime
del mock_url, mock_corpora_steps, mock_get_datetime_now
project_yaml_contents = ('language: c++\n'
'sanitizers:\n'
' - address\n'
'architectures:\n'
' - x86_64\n')
dockerfile_contents = 'test line'
image_project = 'oss-fuzz'
base_images_project = 'oss-fuzz-base'
self.fs.create_dir(test_utils.PROJECT_DIR)
test_utils.create_project_data(test_utils.PROJECT, project_yaml_contents)
expected_build_steps_file_path = test_utils.get_test_data_file_path(
'expected_coverage_build_steps.json')
self.fs.add_real_file(expected_build_steps_file_path)
with open(expected_build_steps_file_path) as expected_build_steps_file:
expected_coverage_build_steps = json.load(expected_build_steps_file)
with ndb.Client().context():
Project(name='test-project',
project_yaml_contents=project_yaml_contents,
dockerfile_contents=dockerfile_contents).put()
dockerfile_lines = dockerfile_contents.split('\n')
build_steps = get_build_steps('test-project', project_yaml_contents,
dockerfile_lines, image_project,
base_images_project)
config = build_project.Config(False, False, None, False)
project_yaml, dockerfile = build_project.get_project_data(
test_utils.PROJECT)
build_steps = build_and_run_coverage.get_build_steps(
test_utils.PROJECT, project_yaml, dockerfile, test_utils.IMAGE_PROJECT,
test_utils.BASE_IMAGES_PROJECT, config)
self.assertEqual(build_steps, expected_coverage_build_steps)
@classmethod
def tearDownClass(cls):
test_utils.cleanup_emulator(cls.ds_emulator)
if __name__ == '__main__':
unittest.main(exit=False)

View File

@ -83,11 +83,22 @@ def get_targets_list_url(bucket, project, sanitizer):
return url
def _get_targets_list(project_name):
def get_upload_bucket(engine, testing=False, architecture='x86_64'):
"""Returns the upload bucket for |engine|. Returns the testing bucket if
|testing|."""
bucket = ENGINE_INFO[engine].upload_bucket
if architecture != 'x86_64':
bucket += '-' + architecture
if testing:
bucket += '-testing'
return bucket
def _get_targets_list(project_name, testing):
"""Returns target list."""
# libFuzzer ASan is the default configuration, get list of targets from it.
url = get_targets_list_url(ENGINE_INFO['libfuzzer'].upload_bucket,
project_name, 'address')
bucket = get_upload_bucket('libfuzzer', testing)
url = get_targets_list_url(bucket, project_name, 'address')
url = urlparse.urljoin(GCS_URL_BASENAME, url)
response = requests.get(url)
@ -136,10 +147,10 @@ def get_signed_url(path, method='PUT', content_type=''):
return f'https://storage.googleapis.com{path}?{urlparse.urlencode(values)}'
def download_corpora_steps(project_name):
def download_corpora_steps(project_name, testing):
"""Returns GCB steps for downloading corpora backups for the given project.
"""
fuzz_targets = _get_targets_list(project_name)
fuzz_targets = _get_targets_list(project_name, testing)
if not fuzz_targets:
sys.stderr.write('No fuzz targets found for project "%s".\n' % project_name)
return None
@ -205,15 +216,61 @@ def gsutil_rm_rf_step(url):
return step
def project_image_steps(name, image, language):
def get_pull_test_image_steps():
"""Returns steps to pull testing versions of base-images and tag them so that
they are used in builds."""
images = ['gcr.io/oss-fuzz-base/base-builder']
steps = []
for image in images:
test_image = image + '-testing'
steps.append({
'name': 'gcr.io/cloud-builders/docker',
'args': [
'pull',
test_image,
],
'waitFor': '-' # Start this immediately, don't wait for previous step.
})
# This step is hacky but gives us great flexibility. OSS-Fuzz has hardcoded
# references to gcr.io/oss-fuzz-base/base-builder (in dockerfiles, for
# example) and gcr.io/oss-fuzz-base-runner (in this build code). But the
# testing versions of those images are called
# gcr.io/oss-fuzz-base/base-builder-testing and
# gcr.io/oss-fuzz-base/base-runner-testing. How can we get the build to use
# the testing images instead of the real ones? By doing this step: tagging
# the test image with the non-test version, so that the test version is used
# instead of pulling the real one.
steps.append({
'name': 'gcr.io/cloud-builders/docker',
'args': ['tag', test_image, image],
})
return steps
def get_srcmap_step_id():
"""Returns the id for the srcmap step."""
return 'srcmap'
def project_image_steps(name, image, language, branch=None, test_images=False):
"""Returns GCB steps to build OSS-Fuzz project image."""
steps = [{
clone_step = {
'args': [
'clone',
'https://github.com/google/oss-fuzz.git',
'clone', 'https://github.com/google/oss-fuzz.git', '--depth', '1'
],
'name': 'gcr.io/cloud-builders/git',
}, {
}
if branch:
# Do this to support testing other branches.
clone_step['args'].extend(['--branch', branch])
steps = [clone_step]
if test_images:
steps.extend(get_pull_test_image_steps())
srcmap_step_id = get_srcmap_step_id()
steps += [{
'name': 'gcr.io/cloud-builders/docker',
'args': [
'build',
@ -223,8 +280,7 @@ def project_image_steps(name, image, language):
],
'dir': 'oss-fuzz/projects/' + name,
}, {
'name':
image,
'name': image,
'args': [
'bash', '-c',
'srcmap > /workspace/srcmap.json && cat /workspace/srcmap.json'
@ -233,6 +289,7 @@ def project_image_steps(name, image, language):
'OSSFUZZ_REVISION=$REVISION_ID',
'FUZZING_LANGUAGE=%s' % language,
],
'id': srcmap_step_id
}]
return steps

View File

@ -13,7 +13,7 @@
# limitations under the License.
#
################################################################################
#!/usr/bin/python2
#!/usr/bin/env python3
"""Starts project build on Google Cloud Builder.
Usage: build_project.py <project_dir>
@ -21,37 +21,27 @@ Usage: build_project.py <project_dir>
from __future__ import print_function
import argparse
import collections
import datetime
import json
import logging
import os
import posixpath
import re
import sys
from googleapiclient.discovery import build as cloud_build
import oauth2client.client
import six
import yaml
from oauth2client.client import GoogleCredentials
from googleapiclient.discovery import build
import build_lib
FUZZING_BUILD_TAG = 'fuzzing'
FUZZING_BUILD_TYPE = 'fuzzing'
GCB_LOGS_BUCKET = 'oss-fuzz-gcb-logs'
CONFIGURATIONS = {
'sanitizer-address': ['SANITIZER=address'],
'sanitizer-dataflow': ['SANITIZER=dataflow'],
'sanitizer-memory': ['SANITIZER=memory'],
'sanitizer-undefined': ['SANITIZER=undefined'],
'engine-libfuzzer': ['FUZZING_ENGINE=libfuzzer'],
'engine-afl': ['FUZZING_ENGINE=afl'],
'engine-honggfuzz': ['FUZZING_ENGINE=honggfuzz'],
'engine-dataflow': ['FUZZING_ENGINE=dataflow'],
'engine-none': ['FUZZING_ENGINE=none'],
}
DEFAULT_ARCHITECTURES = ['x86_64']
DEFAULT_ENGINES = ['libfuzzer', 'afl', 'honggfuzz']
DEFAULT_SANITIZERS = ['address', 'undefined']
@ -61,18 +51,99 @@ LATEST_VERSION_CONTENT_TYPE = 'text/plain'
QUEUE_TTL_SECONDS = 60 * 60 * 24 # 24 hours.
PROJECTS_DIR = os.path.abspath(
os.path.join(__file__, os.path.pardir, os.path.pardir, os.path.pardir,
os.path.pardir, 'projects'))
def usage():
"""Exit with code 1 and display syntax to use this file."""
sys.stderr.write('Usage: ' + sys.argv[0] + ' <project_dir>\n')
sys.exit(1)
DEFAULT_GCB_OPTIONS = {'machineType': 'N1_HIGHCPU_32'}
Config = collections.namedtuple(
'Config', ['testing', 'test_images', 'branch', 'parallel'])
WORKDIR_REGEX = re.compile(r'\s*WORKDIR\s*([^\s]+)')
def set_yaml_defaults(project_name, project_yaml, image_project):
"""Set project.yaml's default parameters."""
class Build: # pylint: disable=too-few-public-methods
"""Class representing the configuration for a build."""
def __init__(self, fuzzing_engine, sanitizer, architecture):
self.fuzzing_engine = fuzzing_engine
self.sanitizer = sanitizer
self.architecture = architecture
self.targets_list_filename = build_lib.get_targets_list_filename(
self.sanitizer)
@property
def out(self):
"""Returns the out directory for the build."""
return posixpath.join(
'/workspace/out/',
f'{self.fuzzing_engine}-{self.sanitizer}-{self.architecture}')
def get_project_data(project_name):
"""Returns a tuple containing the contents of the project.yaml and Dockerfile
of |project_name|. Raises a FileNotFoundError if there is no Dockerfile for
|project_name|."""
project_dir = os.path.join(PROJECTS_DIR, project_name)
dockerfile_path = os.path.join(project_dir, 'Dockerfile')
try:
with open(dockerfile_path) as dockerfile:
dockerfile = dockerfile.read()
except FileNotFoundError:
logging.error('Project "%s" does not have a dockerfile.', project_name)
raise
project_yaml_path = os.path.join(project_dir, 'project.yaml')
with open(project_yaml_path, 'r') as project_yaml_file_handle:
project_yaml = yaml.safe_load(project_yaml_file_handle)
return project_yaml, dockerfile
class Project: # pylint: disable=too-many-instance-attributes
"""Class representing an OSS-Fuzz project."""
def __init__(self, name, project_yaml, dockerfile, image_project):
self.name = name
self.image_project = image_project
self.workdir = workdir_from_dockerfile(dockerfile)
set_yaml_defaults(project_yaml)
self._sanitizers = project_yaml['sanitizers']
self.disabled = project_yaml['disabled']
self.architectures = project_yaml['architectures']
self.fuzzing_engines = project_yaml['fuzzing_engines']
self.coverage_extra_args = project_yaml['coverage_extra_args']
self.labels = project_yaml['labels']
self.fuzzing_language = project_yaml['language']
self.run_tests = project_yaml['run_tests']
@property
def sanitizers(self):
"""Returns processed sanitizers."""
assert isinstance(self._sanitizers, list)
processed_sanitizers = []
for sanitizer in self._sanitizers:
if isinstance(sanitizer, six.string_types):
processed_sanitizers.append(sanitizer)
elif isinstance(sanitizer, dict):
for key in sanitizer.keys():
processed_sanitizers.append(key)
return processed_sanitizers
@property
def image(self):
"""Returns the docker image for the project."""
return f'gcr.io/{self.image_project}/{self.name}'
def get_last_step_id(steps):
"""Returns the id of the last step in |steps|."""
return steps[-1]['id']
def set_yaml_defaults(project_yaml):
"""Sets project.yaml's default parameters."""
project_yaml.setdefault('disabled', False)
project_yaml.setdefault('name', project_name)
project_yaml.setdefault('image', f'gcr.io/{image_project}/{project_name}')
project_yaml.setdefault('architectures', DEFAULT_ARCHITECTURES)
project_yaml.setdefault('sanitizers', DEFAULT_SANITIZERS)
project_yaml.setdefault('fuzzing_engines', DEFAULT_ENGINES)
@ -81,273 +152,303 @@ def set_yaml_defaults(project_name, project_yaml, image_project):
project_yaml.setdefault('labels', {})
def is_supported_configuration(fuzzing_engine, sanitizer, architecture):
def is_supported_configuration(build):
"""Check if the given configuration is supported."""
fuzzing_engine_info = build_lib.ENGINE_INFO[fuzzing_engine]
if architecture == 'i386' and sanitizer != 'address':
fuzzing_engine_info = build_lib.ENGINE_INFO[build.fuzzing_engine]
if build.architecture == 'i386' and build.sanitizer != 'address':
return False
return (sanitizer in fuzzing_engine_info.supported_sanitizers and
architecture in fuzzing_engine_info.supported_architectures)
return (build.sanitizer in fuzzing_engine_info.supported_sanitizers and
build.architecture in fuzzing_engine_info.supported_architectures)
def get_sanitizers(project_yaml):
"""Retrieve sanitizers from project.yaml."""
sanitizers = project_yaml['sanitizers']
assert isinstance(sanitizers, list)
processed_sanitizers = []
for sanitizer in sanitizers:
if isinstance(sanitizer, six.string_types):
processed_sanitizers.append(sanitizer)
elif isinstance(sanitizer, dict):
for key in sanitizer.keys():
processed_sanitizers.append(key)
return processed_sanitizers
def workdir_from_dockerfile(dockerfile_lines):
"""Parse WORKDIR from the Dockerfile."""
workdir_regex = re.compile(r'\s*WORKDIR\s*([^\s]+)')
def workdir_from_dockerfile(dockerfile):
"""Parses WORKDIR from the Dockerfile."""
dockerfile_lines = dockerfile.split('\n')
for line in dockerfile_lines:
match = re.match(workdir_regex, line)
match = re.match(WORKDIR_REGEX, line)
if match:
# We need to escape '$' since they're used for subsitutions in Container
# Builer builds.
return match.group(1).replace('$', '$$')
return None
return '/src'
def load_project_yaml(project_name, project_yaml_file, image_project):
"""Loads project yaml and sets default values."""
project_yaml = yaml.safe_load(project_yaml_file)
set_yaml_defaults(project_name, project_yaml, image_project)
return project_yaml
def get_datetime_now():
"""Returns datetime.datetime.now(). Used for mocking."""
return datetime.datetime.now()
# pylint: disable=too-many-locals, too-many-statements, too-many-branches
def get_build_steps(project_name, project_yaml_file, dockerfile_lines,
image_project, base_images_project):
def get_env(fuzzing_language, build):
"""Returns an environment for building. The environment is returned as a list
and is suitable for use as the "env" parameter in a GCB build step. The
environment variables are based on the values of |fuzzing_language| and
|build."""
env_dict = {
'FUZZING_LANGUAGE': fuzzing_language,
'FUZZING_ENGINE': build.fuzzing_engine,
'SANITIZER': build.sanitizer,
'ARCHITECTURE': build.architecture,
# Set HOME so that it doesn't point to a persisted volume (see
# https://github.com/google/oss-fuzz/issues/6035).
'HOME': '/root',
'OUT': build.out,
}
return list(sorted([f'{key}={value}' for key, value in env_dict.items()]))
def get_compile_step(project, build, env, parallel):
"""Returns the GCB step for compiling |projects| fuzzers using |env|. The type
of build is specified by |build|."""
failure_msg = (
'*' * 80 + '\nFailed to build.\nTo reproduce, run:\n'
f'python infra/helper.py build_image {project.name}\n'
'python infra/helper.py build_fuzzers --sanitizer '
f'{build.sanitizer} --engine {build.fuzzing_engine} --architecture '
f'{build.architecture} {project.name}\n' + '*' * 80)
compile_step = {
'name': project.image,
'env': env,
'args': [
'bash',
'-c',
# Remove /out to make sure there are non instrumented binaries.
# `cd /src && cd {workdir}` (where {workdir} is parsed from the
# Dockerfile). Container Builder overrides our workdir so we need
# to add this step to set it back.
(f'rm -r /out && cd /src && cd {project.workdir} && '
f'mkdir -p {build.out} && compile || '
f'(echo "{failure_msg}" && false)'),
],
'id': get_id('compile', build),
}
if parallel:
maybe_add_parallel(compile_step, build_lib.get_srcmap_step_id(), parallel)
return compile_step
def maybe_add_parallel(step, wait_for_id, parallel):
"""Makes |step| run immediately after |wait_for_id| if |parallel|. Mutates
|step|."""
if not parallel:
return
step['waitFor'] = wait_for_id
def get_id(step_type, build):
"""Returns a unique step id based on |step_type| and |build|. Useful for
parallelizing builds."""
return (f'{step_type}-{build.fuzzing_engine}-{build.sanitizer}'
f'-{build.architecture}')
def get_build_steps( # pylint: disable=too-many-locals, too-many-statements, too-many-branches, too-many-arguments
project_name, project_yaml, dockerfile, image_project, base_images_project,
config):
"""Returns build steps for project."""
project_yaml = load_project_yaml(project_name, project_yaml_file,
image_project)
if project_yaml['disabled']:
logging.info('Project "%s" is disabled.', project_name)
project = Project(project_name, project_yaml, dockerfile, image_project)
if project.disabled:
logging.info('Project "%s" is disabled.', project.name)
return []
name = project_yaml['name']
image = project_yaml['image']
language = project_yaml['language']
run_tests = project_yaml['run_tests']
time_stamp = datetime.datetime.now().strftime('%Y%m%d%H%M')
timestamp = get_datetime_now().strftime('%Y%m%d%H%M')
build_steps = build_lib.project_image_steps(name, image, language)
build_steps = build_lib.project_image_steps(project.name,
project.image,
project.fuzzing_language,
branch=config.branch,
test_images=config.test_images)
# Sort engines to make AFL first to test if libFuzzer has an advantage in
# finding bugs first since it is generally built first.
for fuzzing_engine in sorted(project_yaml['fuzzing_engines']):
for sanitizer in get_sanitizers(project_yaml):
for architecture in project_yaml['architectures']:
if not is_supported_configuration(fuzzing_engine, sanitizer,
architecture):
for fuzzing_engine in sorted(project.fuzzing_engines):
for sanitizer in project.sanitizers:
for architecture in project.architectures:
build = Build(fuzzing_engine, sanitizer, architecture)
if not is_supported_configuration(build):
continue
env = CONFIGURATIONS['engine-' + fuzzing_engine][:]
env.extend(CONFIGURATIONS['sanitizer-' + sanitizer])
out = '/workspace/out/' + sanitizer
stamped_name = '-'.join([name, sanitizer, time_stamp])
latest_version_file = '-'.join(
[name, sanitizer, LATEST_VERSION_FILENAME])
zip_file = stamped_name + '.zip'
stamped_srcmap_file = stamped_name + '.srcmap.json'
bucket = build_lib.ENGINE_INFO[fuzzing_engine].upload_bucket
if architecture != 'x86_64':
bucket += '-' + architecture
env = get_env(project.fuzzing_language, build)
compile_step = get_compile_step(project, build, env, config.parallel)
build_steps.append(compile_step)
upload_url = build_lib.get_signed_url(
build_lib.GCS_UPLOAD_URL_FORMAT.format(bucket, name, zip_file))
srcmap_url = build_lib.get_signed_url(
build_lib.GCS_UPLOAD_URL_FORMAT.format(bucket, name,
stamped_srcmap_file))
latest_version_url = build_lib.GCS_UPLOAD_URL_FORMAT.format(
bucket, name, latest_version_file)
latest_version_url = build_lib.get_signed_url(
latest_version_url, content_type=LATEST_VERSION_CONTENT_TYPE)
targets_list_filename = build_lib.get_targets_list_filename(sanitizer)
targets_list_url = build_lib.get_signed_url(
build_lib.get_targets_list_url(bucket, name, sanitizer))
env.append('OUT=' + out)
env.append('MSAN_LIBS_PATH=/workspace/msan')
env.append('ARCHITECTURE=' + architecture)
env.append('FUZZING_LANGUAGE=' + language)
# Set HOME so that it doesn't point to a persisted volume (see
# https://github.com/google/oss-fuzz/issues/6035).
env.append('HOME=/root')
workdir = workdir_from_dockerfile(dockerfile_lines)
if not workdir:
workdir = '/src'
failure_msg = ('*' * 80 + '\nFailed to build.\nTo reproduce, run:\n'
f'python infra/helper.py build_image {name}\n'
'python infra/helper.py build_fuzzers --sanitizer '
f'{sanitizer} --engine {fuzzing_engine} --architecture '
f'{architecture} {name}\n' + '*' * 80)
build_steps.append(
# compile
{
'name':
image,
'env':
env,
'args': [
'bash',
'-c',
# Remove /out to break loudly when a build script
# incorrectly uses /out instead of $OUT.
# `cd /src && cd {workdir}` (where {workdir} is parsed from
# the Dockerfile). Container Builder overrides our workdir
# so we need to add this step to set it back.
(f'rm -r /out && cd /src && cd {workdir} '
f'&& mkdir -p {out} && compile || (echo "{failure_msg}" '
'&& false)'),
],
})
if sanitizer == 'memory':
# Patch dynamic libraries to use instrumented ones.
build_steps.append({
if project.run_tests:
failure_msg = (
'*' * 80 + '\nBuild checks failed.\n'
'To reproduce, run:\n'
f'python infra/helper.py build_image {project.name}\n'
'python infra/helper.py build_fuzzers --sanitizer '
f'{build.sanitizer} --engine {build.fuzzing_engine} '
f'--architecture {build.architecture} {project.name}\n'
'python infra/helper.py check_build --sanitizer '
f'{build.sanitizer} --engine {build.fuzzing_engine} '
f'--architecture {build.architecture} {project.name}\n' +
'*' * 80)
# Test fuzz targets.
test_step = {
'name':
f'gcr.io/{base_images_project}/msan-libs-builder',
get_runner_image_name(base_images_project, config.testing),
'env':
env,
'args': [
'bash',
'-c',
# TODO(ochang): Replace with just patch_build.py once
# permission in image is fixed.
f'python /usr/local/bin/patch_build.py {out}'
'bash', '-c',
f'test_all.py || (echo "{failure_msg}" && false)'
],
})
'id':
get_id('build-check', build)
}
maybe_add_parallel(test_step, get_last_step_id(build_steps),
config.parallel)
build_steps.append(test_step)
if run_tests:
failure_msg = ('*' * 80 + '\nBuild checks failed.\n'
'To reproduce, run:\n'
f'python infra/helper.py build_image {name}\n'
'python infra/helper.py build_fuzzers --sanitizer '
f'{sanitizer} --engine {fuzzing_engine} '
f'--architecture {architecture} {name}\n'
'python infra/helper.py check_build --sanitizer '
f'{sanitizer} --engine {fuzzing_engine} '
f'--architecture {architecture} {name}\n' + '*' * 80)
build_steps.append(
# test binaries
{
'name':
f'gcr.io/{base_images_project}/base-runner',
'env':
env,
'args': [
'bash', '-c',
f'test_all.py || (echo "{failure_msg}" && false)'
],
})
if project_yaml['labels']:
# write target labels
if project.labels:
# Write target labels.
build_steps.append({
'name':
image,
project.image,
'env':
env,
'args': [
'/usr/local/bin/write_labels.py',
json.dumps(project_yaml['labels']),
out,
json.dumps(project.labels),
build.out,
],
})
if sanitizer == 'dataflow' and fuzzing_engine == 'dataflow':
dataflow_steps = dataflow_post_build_steps(name, env,
base_images_project)
if build.sanitizer == 'dataflow' and build.fuzzing_engine == 'dataflow':
dataflow_steps = dataflow_post_build_steps(project.name, env,
base_images_project,
config.testing)
if dataflow_steps:
build_steps.extend(dataflow_steps)
else:
sys.stderr.write('Skipping dataflow post build steps.\n')
build_steps.extend([
# generate targets list
# Generate targets list.
{
'name':
f'gcr.io/{base_images_project}/base-runner',
get_runner_image_name(base_images_project, config.testing),
'env':
env,
'args': [
'bash', '-c',
f'targets_list > /workspace/{targets_list_filename}'
f'targets_list > /workspace/{build.targets_list_filename}'
],
},
# zip binaries
{
'name': image,
'args': ['bash', '-c', f'cd {out} && zip -r {zip_file} *'],
},
# upload srcmap
{
'name': f'gcr.io/{base_images_project}/uploader',
'args': [
'/workspace/srcmap.json',
srcmap_url,
],
},
# upload binaries
{
'name': f'gcr.io/{base_images_project}/uploader',
'args': [
os.path.join(out, zip_file),
upload_url,
],
},
# upload targets list
{
'name':
f'gcr.io/{base_images_project}/uploader',
'args': [
f'/workspace/{targets_list_filename}',
targets_list_url,
],
},
# upload the latest.version file
build_lib.http_upload_step(zip_file, latest_version_url,
LATEST_VERSION_CONTENT_TYPE),
# cleanup
{
'name': image,
'args': [
'bash',
'-c',
'rm -r ' + out,
],
},
}
])
upload_steps = get_upload_steps(project, build, timestamp,
base_images_project, config.testing)
build_steps.extend(upload_steps)
return build_steps
def dataflow_post_build_steps(project_name, env, base_images_project):
def get_targets_list_upload_step(bucket, project, build, uploader_image):
"""Returns the step to upload targets_list for |build| of |project| to
|bucket|."""
targets_list_url = build_lib.get_signed_url(
build_lib.get_targets_list_url(bucket, project.name, build.sanitizer))
return {
'name': uploader_image,
'args': [
f'/workspace/{build.targets_list_filename}',
targets_list_url,
],
}
def get_uploader_image(base_images_project):
"""Returns the uploader base image in |base_images_project|."""
return f'gcr.io/{base_images_project}/uploader'
def get_upload_steps(project, build, timestamp, base_images_project, testing):
"""Returns the steps for uploading the fuzzer build specified by |project| and
|build|. Uses |timestamp| for naming the uploads. Uses |base_images_project|
and |testing| for determining which image to use for the upload."""
bucket = build_lib.get_upload_bucket(build.fuzzing_engine, testing)
stamped_name = '-'.join([project.name, build.sanitizer, timestamp])
zip_file = stamped_name + '.zip'
upload_url = build_lib.get_signed_url(
build_lib.GCS_UPLOAD_URL_FORMAT.format(bucket, project.name, zip_file))
stamped_srcmap_file = stamped_name + '.srcmap.json'
srcmap_url = build_lib.get_signed_url(
build_lib.GCS_UPLOAD_URL_FORMAT.format(bucket, project.name,
stamped_srcmap_file))
latest_version_file = '-'.join(
[project.name, build.sanitizer, LATEST_VERSION_FILENAME])
latest_version_url = build_lib.GCS_UPLOAD_URL_FORMAT.format(
bucket, project.name, latest_version_file)
latest_version_url = build_lib.get_signed_url(
latest_version_url, content_type=LATEST_VERSION_CONTENT_TYPE)
uploader_image = get_uploader_image(base_images_project)
upload_steps = [
# Zip binaries.
{
'name': project.image,
'args': ['bash', '-c', f'cd {build.out} && zip -r {zip_file} *'],
},
# Upload srcmap.
{
'name': uploader_image,
'args': [
'/workspace/srcmap.json',
srcmap_url,
],
},
# Upload binaries.
{
'name': uploader_image,
'args': [
os.path.join(build.out, zip_file),
upload_url,
],
},
# Upload targets list.
get_targets_list_upload_step(bucket, project, build, uploader_image),
# Upload the latest.version file.
build_lib.http_upload_step(zip_file, latest_version_url,
LATEST_VERSION_CONTENT_TYPE),
# Cleanup.
get_cleanup_step(project, build),
]
return upload_steps
def get_cleanup_step(project, build):
"""Returns the step for cleaning up after doing |build| of |project|."""
return {
'name': project.image,
'args': [
'bash',
'-c',
'rm -r ' + build.out,
],
}
def get_runner_image_name(base_images_project, testing):
"""Returns the runner image that should be used, based on
|base_images_project|. Returns the testing image if |testing|."""
image = f'gcr.io/{base_images_project}/base-runner'
if testing:
image += '-testing'
return image
def dataflow_post_build_steps(project_name, env, base_images_project, testing):
"""Appends dataflow post build steps."""
steps = build_lib.download_corpora_steps(project_name)
steps = build_lib.download_corpora_steps(project_name, testing)
if not steps:
return None
steps.append({
'name':
f'gcr.io/{base_images_project}/base-runner',
get_runner_image_name(base_images_project, testing),
'env':
env + [
'COLLECT_DFT_TIMEOUT=2h',
@ -368,63 +469,126 @@ def dataflow_post_build_steps(project_name, env, base_images_project):
return steps
def get_logs_url(build_id, image_project='oss-fuzz'):
def get_logs_url(build_id, cloud_project='oss-fuzz'):
"""Returns url where logs are displayed for the build."""
url_format = ('https://console.developers.google.com/logs/viewer?'
'resource=build%2Fbuild_id%2F{0}&project={1}')
return url_format.format(build_id, image_project)
return ('https://console.cloud.google.com/logs/viewer?'
f'resource=build%2Fbuild_id%2F{build_id}&project={cloud_project}')
def get_gcb_url(build_id, cloud_project='oss-fuzz'):
"""Returns url where logs are displayed for the build."""
return (f'https://console.cloud.google.com/cloud-build/builds/{build_id}'
f'?project={cloud_project}')
# pylint: disable=no-member
def run_build(build_steps, project_name, tag):
"""Run the build for given steps on cloud build."""
def run_build(oss_fuzz_project,
build_steps,
credentials,
build_type,
cloud_project='oss-fuzz'):
"""Run the build for given steps on cloud build. |build_steps| are the steps
to run. |credentials| are are used to authenticate to GCB and build in
|cloud_project|. |oss_fuzz_project| and |build_type| are used to tag the build
in GCB so the build can be queried for debugging purposes."""
options = {}
if 'GCB_OPTIONS' in os.environ:
options = yaml.safe_load(os.environ['GCB_OPTIONS'])
else:
options = DEFAULT_GCB_OPTIONS
tags = [oss_fuzz_project + '-' + build_type, build_type, oss_fuzz_project]
build_body = {
'steps': build_steps,
'timeout': str(build_lib.BUILD_TIMEOUT) + 's',
'options': options,
'logsBucket': GCB_LOGS_BUCKET,
'tags': [project_name + '-' + tag,],
'tags': tags,
'queueTtl': str(QUEUE_TTL_SECONDS) + 's',
}
credentials = GoogleCredentials.get_application_default()
cloudbuild = build('cloudbuild',
'v1',
credentials=credentials,
cache_discovery=False)
build_info = cloudbuild.projects().builds().create(projectId='oss-fuzz',
cloudbuild = cloud_build('cloudbuild',
'v1',
credentials=credentials,
cache_discovery=False)
build_info = cloudbuild.projects().builds().create(projectId=cloud_project,
body=build_body).execute()
build_id = build_info['metadata']['build']['id']
print('Logs:', get_logs_url(build_id), file=sys.stderr)
print(build_id)
logging.info('Build ID: %s', build_id)
logging.info('Logs: %s', get_logs_url(build_id, cloud_project))
logging.info('Cloud build page: %s', get_gcb_url(build_id, cloud_project))
return build_id
def get_args(description):
"""Parses command line arguments and returns them. Suitable for a build
script."""
parser = argparse.ArgumentParser(sys.argv[0], description=description)
parser.add_argument('projects', help='Projects.', nargs='+')
parser.add_argument('--testing',
action='store_true',
required=False,
default=False,
help='Upload to testing buckets.')
parser.add_argument('--test-images',
action='store_true',
required=False,
default=False,
help='Use testing base-images.')
parser.add_argument('--branch',
required=False,
default=None,
help='Use specified OSS-Fuzz branch.')
parser.add_argument('--parallel',
action='store_true',
required=False,
default=False,
help='Do builds in parallel.')
return parser.parse_args()
def build_script_main(script_description, get_build_steps_func, build_type):
"""Gets arguments from command line using |script_description| as helpstring
description. Gets build_steps using |get_build_steps_func| and then runs those
steps on GCB, tagging the builds with |build_type|. Returns 0 on success, 1 on
failure."""
args = get_args(script_description)
logging.basicConfig(level=logging.INFO)
image_project = 'oss-fuzz'
base_images_project = 'oss-fuzz-base'
credentials = oauth2client.client.GoogleCredentials.get_application_default()
error = False
config = Config(args.testing, args.test_images, args.branch, args.parallel)
for project_name in args.projects:
logging.info('Getting steps for: "%s".', project_name)
try:
project_yaml_contents, dockerfile_contents = get_project_data(
project_name)
except FileNotFoundError:
logging.error('Couldn\'t get project data. Skipping %s.', project_name)
error = True
continue
steps = get_build_steps_func(project_name, project_yaml_contents,
dockerfile_contents, image_project,
base_images_project, config)
if not steps:
logging.error('No steps. Skipping %s.', project_name)
error = True
continue
run_build(project_name, steps, credentials, build_type)
return 0 if not error else 1
def main():
"""Build and run projects."""
if len(sys.argv) != 2:
usage()
image_project = 'oss-fuzz'
base_images_project = 'oss-fuzz-base'
project_dir = sys.argv[1].rstrip(os.path.sep)
dockerfile_path = os.path.join(project_dir, 'Dockerfile')
project_yaml_path = os.path.join(project_dir, 'project.yaml')
project_name = os.path.basename(project_dir)
with open(dockerfile_path) as dockerfile:
dockerfile_lines = dockerfile.readlines()
with open(project_yaml_path) as project_yaml_file:
steps = get_build_steps(project_name, project_yaml_file, dockerfile_lines,
image_project, base_images_project)
run_build(steps, project_name, FUZZING_BUILD_TAG)
return build_script_main('Builds a project on GCB.', get_build_steps,
FUZZING_BUILD_TYPE)
if __name__ == '__main__':
main()
sys.exit(main())

View File

@ -0,0 +1,77 @@
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
"""Unit tests for build_project."""
import json
import os
import sys
import unittest
from unittest import mock
from pyfakefs import fake_filesystem_unittest
FUNCTIONS_DIR = os.path.dirname(__file__)
sys.path.append(FUNCTIONS_DIR)
# pylint: disable=wrong-import-position
import build_project
import test_utils
# pylint: disable=no-member
class TestRequestCoverageBuilds(fake_filesystem_unittest.TestCase):
"""Unit tests for sync."""
def setUp(self):
self.maxDiff = None # pylint: disable=invalid-name
self.setUpPyfakefs()
@mock.patch('build_lib.get_signed_url', return_value='test_url')
@mock.patch('build_project.get_datetime_now',
return_value=test_utils.FAKE_DATETIME)
def test_get_build_steps(self, mock_url, mock_get_datetime_now):
"""Test for get_build_steps."""
del mock_url, mock_get_datetime_now
project_yaml_contents = ('language: c++\n'
'sanitizers:\n'
' - address\n'
' - memory\n'
' - undefined\n'
'architectures:\n'
' - x86_64\n'
' - i386\n')
self.fs.create_dir(test_utils.PROJECT_DIR)
test_utils.create_project_data(test_utils.PROJECT, project_yaml_contents)
expected_build_steps_file_path = test_utils.get_test_data_file_path(
'expected_build_steps.json')
self.fs.add_real_file(expected_build_steps_file_path)
with open(expected_build_steps_file_path) as expected_build_steps_file:
expected_build_steps = json.load(expected_build_steps_file)
config = build_project.Config(False, False, None, False)
project_yaml, dockerfile = build_project.get_project_data(
test_utils.PROJECT)
build_steps = build_project.get_build_steps(test_utils.PROJECT,
project_yaml, dockerfile,
test_utils.IMAGE_PROJECT,
test_utils.BASE_IMAGES_PROJECT,
config)
self.assertEqual(build_steps, expected_build_steps)
if __name__ == '__main__':
unittest.main(exit=False)

View File

@ -94,8 +94,8 @@ def update_scheduler(cloud_scheduler_client, project, schedule, tag):
def delete_project(cloud_scheduler_client, project):
"""Delete the given project."""
logging.info('Deleting project %s', project.name)
for tag in (build_project.FUZZING_BUILD_TAG,
build_and_run_coverage.COVERAGE_BUILD_TAG):
for tag in (build_project.FUZZING_BUILD_TYPE,
build_and_run_coverage.COVERAGE_BUILD_TYPE):
try:
delete_scheduler(cloud_scheduler_client, project.name, tag)
except exceptions.NotFound:
@ -124,9 +124,9 @@ def sync_projects(cloud_scheduler_client, projects):
try:
create_scheduler(cloud_scheduler_client, project_name,
projects[project_name].schedule,
build_project.FUZZING_BUILD_TAG, FUZZING_BUILD_TOPIC)
build_project.FUZZING_BUILD_TYPE, FUZZING_BUILD_TOPIC)
create_scheduler(cloud_scheduler_client, project_name, COVERAGE_SCHEDULE,
build_and_run_coverage.COVERAGE_BUILD_TAG,
build_and_run_coverage.COVERAGE_BUILD_TYPE,
COVERAGE_BUILD_TOPIC)
project_metadata = projects[project_name]
Project(name=project_name,
@ -149,7 +149,7 @@ def sync_projects(cloud_scheduler_client, projects):
logging.info('Schedule changed.')
update_scheduler(cloud_scheduler_client, project,
projects[project.name].schedule,
build_project.FUZZING_BUILD_TAG)
build_project.FUZZING_BUILD_TYPE)
project.schedule = project_metadata.schedule
project_changed = True
except exceptions.GoogleAPICallError as error:

View File

@ -15,13 +15,10 @@
################################################################################
"""Cloud function to request builds."""
import base64
import logging
import google.auth
from googleapiclient.discovery import build
from google.cloud import ndb
import build_lib
import build_project
from datastore_entities import BuildsHistory
from datastore_entities import Project
@ -56,45 +53,27 @@ def get_project_data(project_name):
if not project:
raise RuntimeError(
f'Project {project_name} not available in cloud datastore')
project_yaml_contents = project.project_yaml_contents
dockerfile_lines = project.dockerfile_contents.split('\n')
return (project_yaml_contents, dockerfile_lines)
return project.project_yaml_contents, project.dockerfile_contents
def get_build_steps(project_name, image_project, base_images_project):
"""Retrieve build steps."""
# TODO(metzman): Figure out if we need this.
project_yaml_contents, dockerfile_lines = get_project_data(project_name)
build_config = build_project.Config(False, False, False, False)
return build_project.get_build_steps(project_name, project_yaml_contents,
dockerfile_lines, image_project,
base_images_project)
base_images_project, build_config)
# pylint: disable=no-member
def run_build(project_name, image_project, build_steps, credentials, tag):
"""Execute build on cloud build."""
build_body = {
'steps': build_steps,
'timeout': str(build_lib.BUILD_TIMEOUT) + 's',
'options': {
'machineType': 'N1_HIGHCPU_32'
},
'logsBucket': build_project.GCB_LOGS_BUCKET,
'tags': [project_name + '-' + tag,],
'queueTtl': str(QUEUE_TTL_SECONDS) + 's',
}
cloudbuild = build('cloudbuild',
'v1',
credentials=credentials,
cache_discovery=False)
build_info = cloudbuild.projects().builds().create(projectId=image_project,
body=build_body).execute()
build_id = build_info['metadata']['build']['id']
update_build_history(project_name, build_id, tag)
logging.info('Build ID: %s', build_id)
logging.info('Logs: %s', build_project.get_logs_url(build_id, image_project))
def run_build(oss_fuzz_project, build_steps, credentials, build_type,
cloud_project):
"""Execute build on cloud build. Wrapper around build_project.py that also
updates the db."""
build_id = build_project.run_build(oss_fuzz_project, build_steps, credentials,
build_type, cloud_project)
update_build_history(oss_fuzz_project, build_id, build_type)
# pylint: disable=no-member
@ -107,9 +86,14 @@ def request_build(event, context):
raise RuntimeError('Project name missing from payload')
with ndb.Client().context():
credentials, image_project = google.auth.default()
build_steps = get_build_steps(project_name, image_project, BASE_PROJECT)
credentials, cloud_project = google.auth.default()
build_steps = get_build_steps(project_name, cloud_project, BASE_PROJECT)
if not build_steps:
return
run_build(project_name, image_project, build_steps, credentials,
build_project.FUZZING_BUILD_TAG)
run_build(
project_name,
build_steps,
credentials,
build_project.FUZZING_BUILD_TYPE,
cloud_project=cloud_project,
)

View File

@ -14,23 +14,17 @@
#
################################################################################
"""Unit tests for Cloud Function request builds which builds projects."""
import json
import datetime
import os
import sys
import unittest
from unittest import mock
from google.cloud import ndb
sys.path.append(os.path.dirname(__file__))
# pylint: disable=wrong-import-position
from datastore_entities import BuildsHistory
from datastore_entities import Project
from request_build import get_build_steps
from request_build import get_project_data
from request_build import update_build_history
import datastore_entities
import request_build
import test_utils
# pylint: disable=no-member
@ -50,65 +44,40 @@ class TestRequestBuilds(unittest.TestCase):
test_utils.reset_ds_emulator()
self.maxDiff = None # pylint: disable=invalid-name
@mock.patch('build_lib.get_signed_url', return_value='test_url')
@mock.patch('datetime.datetime')
def test_get_build_steps(self, mock_url, mock_time):
"""Test for get_build_steps."""
del mock_url, mock_time
datetime.datetime = test_utils.SpoofedDatetime
project_yaml_contents = ('language: c++\n'
'sanitizers:\n'
' - address\n'
'architectures:\n'
' - x86_64\n')
image_project = 'oss-fuzz'
base_images_project = 'oss-fuzz-base'
expected_build_steps_file_path = test_utils.get_test_data_file_path(
'expected_build_steps.json')
with open(expected_build_steps_file_path) as expected_build_steps_file:
expected_build_steps = json.load(expected_build_steps_file)
with ndb.Client().context():
Project(name='test-project',
project_yaml_contents=project_yaml_contents,
dockerfile_contents='test line').put()
build_steps = get_build_steps('test-project', image_project,
base_images_project)
self.assertEqual(build_steps, expected_build_steps)
def test_get_build_steps_no_project(self):
"""Test for when project isn't available in datastore."""
with ndb.Client().context():
self.assertRaises(RuntimeError, get_build_steps, 'test-project',
'oss-fuzz', 'oss-fuzz-base')
self.assertRaises(RuntimeError, request_build.get_build_steps,
'test-project', 'oss-fuzz', 'oss-fuzz-base')
def test_build_history(self):
"""Testing build history."""
with ndb.Client().context():
BuildsHistory(id='test-project-fuzzing',
build_tag='fuzzing',
project='test-project',
build_ids=[str(i) for i in range(1, 65)]).put()
update_build_history('test-project', '65', 'fuzzing')
datastore_entities.BuildsHistory(id='test-project-fuzzing',
build_tag='fuzzing',
project='test-project',
build_ids=[str(i) for i in range(1, 65)
]).put()
request_build.update_build_history('test-project', '65', 'fuzzing')
expected_build_ids = [str(i) for i in range(2, 66)]
self.assertEqual(BuildsHistory.query().get().build_ids,
self.assertEqual(datastore_entities.BuildsHistory.query().get().build_ids,
expected_build_ids)
def test_build_history_no_existing_project(self):
"""Testing build history when build history object is missing."""
with ndb.Client().context():
update_build_history('test-project', '1', 'fuzzing')
request_build.update_build_history('test-project', '1', 'fuzzing')
expected_build_ids = ['1']
self.assertEqual(BuildsHistory.query().get().build_ids,
self.assertEqual(datastore_entities.BuildsHistory.query().get().build_ids,
expected_build_ids)
def test_get_project_data(self):
"""Testing get project data."""
with ndb.Client().context():
self.assertRaises(RuntimeError, get_project_data, 'test-project')
self.assertRaises(RuntimeError, request_build.get_project_data,
'test-project')
@classmethod
def tearDownClass(cls):

View File

@ -37,17 +37,19 @@ def get_build_steps(project_name, image_project, base_images_project):
def request_coverage_build(event, context):
"""Entry point for coverage build cloud function."""
del context #unused
del context # Unused.
if 'data' in event:
project_name = base64.b64decode(event['data']).decode('utf-8')
else:
raise RuntimeError('Project name missing from payload')
with ndb.Client().context():
credentials, image_project = google.auth.default()
build_steps = get_build_steps(project_name, image_project, BASE_PROJECT)
credentials, cloud_project = google.auth.default()
build_steps = get_build_steps(project_name, cloud_project, BASE_PROJECT)
if not build_steps:
return
request_build.run_build(project_name, image_project, build_steps,
request_build.run_build(project_name,
build_steps,
credentials,
build_and_run_coverage.COVERAGE_BUILD_TAG)
build_and_run_coverage.COVERAGE_BUILD_TYPE,
cloud_project=cloud_project)

View File

@ -2,7 +2,9 @@
{
"args": [
"clone",
"https://github.com/google/oss-fuzz.git"
"https://github.com/google/oss-fuzz.git",
"--depth",
"1"
],
"name": "gcr.io/cloud-builders/git"
},
@ -26,52 +28,52 @@
"env": [
"OSSFUZZ_REVISION=$REVISION_ID",
"FUZZING_LANGUAGE=c++"
]
],
"id": "srcmap"
},
{
"name": "gcr.io/oss-fuzz/test-project",
"env": [
"FUZZING_ENGINE=afl",
"SANITIZER=address",
"OUT=/workspace/out/address",
"MSAN_LIBS_PATH=/workspace/msan",
"ARCHITECTURE=x86_64",
"FUZZING_ENGINE=afl",
"FUZZING_LANGUAGE=c++",
"HOME=/root"
"HOME=/root",
"OUT=/workspace/out/afl-address-x86_64",
"SANITIZER=address"
],
"args": [
"bash",
"-c",
"rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/address && compile || (echo \"********************************************************************************\nFailed to build.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine afl --architecture x86_64 test-project\n********************************************************************************\" && false)"
]
"rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/afl-address-x86_64 && compile || (echo \"********************************************************************************\nFailed to build.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine afl --architecture x86_64 test-project\n********************************************************************************\" && false)"
],
"id": "compile-afl-address-x86_64"
},
{
"name": "gcr.io/oss-fuzz-base/base-runner",
"env": [
"FUZZING_ENGINE=afl",
"SANITIZER=address",
"OUT=/workspace/out/address",
"MSAN_LIBS_PATH=/workspace/msan",
"ARCHITECTURE=x86_64",
"FUZZING_ENGINE=afl",
"FUZZING_LANGUAGE=c++",
"HOME=/root"
"HOME=/root",
"OUT=/workspace/out/afl-address-x86_64",
"SANITIZER=address"
],
"args": [
"bash",
"-c",
"test_all.py || (echo \"********************************************************************************\nBuild checks failed.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine afl --architecture x86_64 test-project\npython infra/helper.py check_build --sanitizer address --engine afl --architecture x86_64 test-project\n********************************************************************************\" && false)"
]
],
"id": "build-check-afl-address-x86_64"
},
{
"name": "gcr.io/oss-fuzz-base/base-runner",
"env": [
"FUZZING_ENGINE=afl",
"SANITIZER=address",
"OUT=/workspace/out/address",
"MSAN_LIBS_PATH=/workspace/msan",
"ARCHITECTURE=x86_64",
"FUZZING_ENGINE=afl",
"FUZZING_LANGUAGE=c++",
"HOME=/root"
"HOME=/root",
"OUT=/workspace/out/afl-address-x86_64",
"SANITIZER=address"
],
"args": [
"bash",
@ -84,7 +86,7 @@
"args": [
"bash",
"-c",
"cd /workspace/out/address && zip -r test-project-address-202001010000.zip *"
"cd /workspace/out/afl-address-x86_64 && zip -r test-project-address-202001010000.zip *"
]
},
{
@ -97,7 +99,7 @@
{
"name": "gcr.io/oss-fuzz-base/uploader",
"args": [
"/workspace/out/address/test-project-address-202001010000.zip",
"/workspace/out/afl-address-x86_64/test-project-address-202001010000.zip",
"test_url"
]
},
@ -125,53 +127,52 @@
"args": [
"bash",
"-c",
"rm -r /workspace/out/address"
"rm -r /workspace/out/afl-address-x86_64"
]
},
{
"name": "gcr.io/oss-fuzz/test-project",
"env": [
"FUZZING_ENGINE=honggfuzz",
"SANITIZER=address",
"OUT=/workspace/out/address",
"MSAN_LIBS_PATH=/workspace/msan",
"ARCHITECTURE=x86_64",
"FUZZING_ENGINE=honggfuzz",
"FUZZING_LANGUAGE=c++",
"HOME=/root"
"HOME=/root",
"OUT=/workspace/out/honggfuzz-address-x86_64",
"SANITIZER=address"
],
"args": [
"bash",
"-c",
"rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/address && compile || (echo \"********************************************************************************\nFailed to build.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine honggfuzz --architecture x86_64 test-project\n********************************************************************************\" && false)"
]
"rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/honggfuzz-address-x86_64 && compile || (echo \"********************************************************************************\nFailed to build.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine honggfuzz --architecture x86_64 test-project\n********************************************************************************\" && false)"
],
"id": "compile-honggfuzz-address-x86_64"
},
{
"name": "gcr.io/oss-fuzz-base/base-runner",
"env": [
"FUZZING_ENGINE=honggfuzz",
"SANITIZER=address",
"OUT=/workspace/out/address",
"MSAN_LIBS_PATH=/workspace/msan",
"ARCHITECTURE=x86_64",
"FUZZING_ENGINE=honggfuzz",
"FUZZING_LANGUAGE=c++",
"HOME=/root"
"HOME=/root",
"OUT=/workspace/out/honggfuzz-address-x86_64",
"SANITIZER=address"
],
"args": [
"bash",
"-c",
"test_all.py || (echo \"********************************************************************************\nBuild checks failed.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine honggfuzz --architecture x86_64 test-project\npython infra/helper.py check_build --sanitizer address --engine honggfuzz --architecture x86_64 test-project\n********************************************************************************\" && false)"
]
],
"id": "build-check-honggfuzz-address-x86_64"
},
{
"name": "gcr.io/oss-fuzz-base/base-runner",
"env": [
"FUZZING_ENGINE=honggfuzz",
"SANITIZER=address",
"OUT=/workspace/out/address",
"MSAN_LIBS_PATH=/workspace/msan",
"ARCHITECTURE=x86_64",
"FUZZING_ENGINE=honggfuzz",
"FUZZING_LANGUAGE=c++",
"HOME=/root"
"HOME=/root",
"OUT=/workspace/out/honggfuzz-address-x86_64",
"SANITIZER=address"
],
"args": [
"bash",
@ -184,7 +185,7 @@
"args": [
"bash",
"-c",
"cd /workspace/out/address && zip -r test-project-address-202001010000.zip *"
"cd /workspace/out/honggfuzz-address-x86_64 && zip -r test-project-address-202001010000.zip *"
]
},
{
@ -197,7 +198,7 @@
{
"name": "gcr.io/oss-fuzz-base/uploader",
"args": [
"/workspace/out/address/test-project-address-202001010000.zip",
"/workspace/out/honggfuzz-address-x86_64/test-project-address-202001010000.zip",
"test_url"
]
},
@ -225,53 +226,52 @@
"args": [
"bash",
"-c",
"rm -r /workspace/out/address"
"rm -r /workspace/out/honggfuzz-address-x86_64"
]
},
{
"name": "gcr.io/oss-fuzz/test-project",
"env": [
"FUZZING_ENGINE=libfuzzer",
"SANITIZER=address",
"OUT=/workspace/out/address",
"MSAN_LIBS_PATH=/workspace/msan",
"ARCHITECTURE=x86_64",
"FUZZING_ENGINE=libfuzzer",
"FUZZING_LANGUAGE=c++",
"HOME=/root"
"HOME=/root",
"OUT=/workspace/out/libfuzzer-address-x86_64",
"SANITIZER=address"
],
"args": [
"bash",
"-c",
"rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/address && compile || (echo \"********************************************************************************\nFailed to build.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine libfuzzer --architecture x86_64 test-project\n********************************************************************************\" && false)"
]
"rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/libfuzzer-address-x86_64 && compile || (echo \"********************************************************************************\nFailed to build.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine libfuzzer --architecture x86_64 test-project\n********************************************************************************\" && false)"
],
"id": "compile-libfuzzer-address-x86_64"
},
{
"name": "gcr.io/oss-fuzz-base/base-runner",
"env": [
"FUZZING_ENGINE=libfuzzer",
"SANITIZER=address",
"OUT=/workspace/out/address",
"MSAN_LIBS_PATH=/workspace/msan",
"ARCHITECTURE=x86_64",
"FUZZING_ENGINE=libfuzzer",
"FUZZING_LANGUAGE=c++",
"HOME=/root"
"HOME=/root",
"OUT=/workspace/out/libfuzzer-address-x86_64",
"SANITIZER=address"
],
"args": [
"bash",
"-c",
"test_all.py || (echo \"********************************************************************************\nBuild checks failed.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine libfuzzer --architecture x86_64 test-project\npython infra/helper.py check_build --sanitizer address --engine libfuzzer --architecture x86_64 test-project\n********************************************************************************\" && false)"
]
],
"id": "build-check-libfuzzer-address-x86_64"
},
{
"name": "gcr.io/oss-fuzz-base/base-runner",
"env": [
"FUZZING_ENGINE=libfuzzer",
"SANITIZER=address",
"OUT=/workspace/out/address",
"MSAN_LIBS_PATH=/workspace/msan",
"ARCHITECTURE=x86_64",
"FUZZING_ENGINE=libfuzzer",
"FUZZING_LANGUAGE=c++",
"HOME=/root"
"HOME=/root",
"OUT=/workspace/out/libfuzzer-address-x86_64",
"SANITIZER=address"
],
"args": [
"bash",
@ -284,7 +284,7 @@
"args": [
"bash",
"-c",
"cd /workspace/out/address && zip -r test-project-address-202001010000.zip *"
"cd /workspace/out/libfuzzer-address-x86_64 && zip -r test-project-address-202001010000.zip *"
]
},
{
@ -297,7 +297,7 @@
{
"name": "gcr.io/oss-fuzz-base/uploader",
"args": [
"/workspace/out/address/test-project-address-202001010000.zip",
"/workspace/out/libfuzzer-address-x86_64/test-project-address-202001010000.zip",
"test_url"
]
},
@ -325,7 +325,304 @@
"args": [
"bash",
"-c",
"rm -r /workspace/out/address"
"rm -r /workspace/out/libfuzzer-address-x86_64"
]
},
{
"name": "gcr.io/oss-fuzz/test-project",
"env": [
"ARCHITECTURE=i386",
"FUZZING_ENGINE=libfuzzer",
"FUZZING_LANGUAGE=c++",
"HOME=/root",
"OUT=/workspace/out/libfuzzer-address-i386",
"SANITIZER=address"
],
"args": [
"bash",
"-c",
"rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/libfuzzer-address-i386 && compile || (echo \"********************************************************************************\nFailed to build.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine libfuzzer --architecture i386 test-project\n********************************************************************************\" && false)"
],
"id": "compile-libfuzzer-address-i386"
},
{
"name": "gcr.io/oss-fuzz-base/base-runner",
"env": [
"ARCHITECTURE=i386",
"FUZZING_ENGINE=libfuzzer",
"FUZZING_LANGUAGE=c++",
"HOME=/root",
"OUT=/workspace/out/libfuzzer-address-i386",
"SANITIZER=address"
],
"args": [
"bash",
"-c",
"test_all.py || (echo \"********************************************************************************\nBuild checks failed.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine libfuzzer --architecture i386 test-project\npython infra/helper.py check_build --sanitizer address --engine libfuzzer --architecture i386 test-project\n********************************************************************************\" && false)"
],
"id": "build-check-libfuzzer-address-i386"
},
{
"name": "gcr.io/oss-fuzz-base/base-runner",
"env": [
"ARCHITECTURE=i386",
"FUZZING_ENGINE=libfuzzer",
"FUZZING_LANGUAGE=c++",
"HOME=/root",
"OUT=/workspace/out/libfuzzer-address-i386",
"SANITIZER=address"
],
"args": [
"bash",
"-c",
"targets_list > /workspace/targets.list.address"
]
},
{
"name": "gcr.io/oss-fuzz/test-project",
"args": [
"bash",
"-c",
"cd /workspace/out/libfuzzer-address-i386 && zip -r test-project-address-202001010000.zip *"
]
},
{
"name": "gcr.io/oss-fuzz-base/uploader",
"args": [
"/workspace/srcmap.json",
"test_url"
]
},
{
"name": "gcr.io/oss-fuzz-base/uploader",
"args": [
"/workspace/out/libfuzzer-address-i386/test-project-address-202001010000.zip",
"test_url"
]
},
{
"name": "gcr.io/oss-fuzz-base/uploader",
"args": [
"/workspace/targets.list.address",
"test_url"
]
},
{
"name": "gcr.io/cloud-builders/curl",
"args": [
"-H",
"Content-Type: text/plain",
"-X",
"PUT",
"-d",
"test-project-address-202001010000.zip",
"test_url"
]
},
{
"name": "gcr.io/oss-fuzz/test-project",
"args": [
"bash",
"-c",
"rm -r /workspace/out/libfuzzer-address-i386"
]
},
{
"name": "gcr.io/oss-fuzz/test-project",
"env": [
"ARCHITECTURE=x86_64",
"FUZZING_ENGINE=libfuzzer",
"FUZZING_LANGUAGE=c++",
"HOME=/root",
"OUT=/workspace/out/libfuzzer-memory-x86_64",
"SANITIZER=memory"
],
"args": [
"bash",
"-c",
"rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/libfuzzer-memory-x86_64 && compile || (echo \"********************************************************************************\nFailed to build.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer memory --engine libfuzzer --architecture x86_64 test-project\n********************************************************************************\" && false)"
],
"id": "compile-libfuzzer-memory-x86_64"
},
{
"name": "gcr.io/oss-fuzz-base/base-runner",
"env": [
"ARCHITECTURE=x86_64",
"FUZZING_ENGINE=libfuzzer",
"FUZZING_LANGUAGE=c++",
"HOME=/root",
"OUT=/workspace/out/libfuzzer-memory-x86_64",
"SANITIZER=memory"
],
"args": [
"bash",
"-c",
"test_all.py || (echo \"********************************************************************************\nBuild checks failed.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer memory --engine libfuzzer --architecture x86_64 test-project\npython infra/helper.py check_build --sanitizer memory --engine libfuzzer --architecture x86_64 test-project\n********************************************************************************\" && false)"
],
"id": "build-check-libfuzzer-memory-x86_64"
},
{
"name": "gcr.io/oss-fuzz-base/base-runner",
"env": [
"ARCHITECTURE=x86_64",
"FUZZING_ENGINE=libfuzzer",
"FUZZING_LANGUAGE=c++",
"HOME=/root",
"OUT=/workspace/out/libfuzzer-memory-x86_64",
"SANITIZER=memory"
],
"args": [
"bash",
"-c",
"targets_list > /workspace/targets.list.memory"
]
},
{
"name": "gcr.io/oss-fuzz/test-project",
"args": [
"bash",
"-c",
"cd /workspace/out/libfuzzer-memory-x86_64 && zip -r test-project-memory-202001010000.zip *"
]
},
{
"name": "gcr.io/oss-fuzz-base/uploader",
"args": [
"/workspace/srcmap.json",
"test_url"
]
},
{
"name": "gcr.io/oss-fuzz-base/uploader",
"args": [
"/workspace/out/libfuzzer-memory-x86_64/test-project-memory-202001010000.zip",
"test_url"
]
},
{
"name": "gcr.io/oss-fuzz-base/uploader",
"args": [
"/workspace/targets.list.memory",
"test_url"
]
},
{
"name": "gcr.io/cloud-builders/curl",
"args": [
"-H",
"Content-Type: text/plain",
"-X",
"PUT",
"-d",
"test-project-memory-202001010000.zip",
"test_url"
]
},
{
"name": "gcr.io/oss-fuzz/test-project",
"args": [
"bash",
"-c",
"rm -r /workspace/out/libfuzzer-memory-x86_64"
]
},
{
"name": "gcr.io/oss-fuzz/test-project",
"env": [
"ARCHITECTURE=x86_64",
"FUZZING_ENGINE=libfuzzer",
"FUZZING_LANGUAGE=c++",
"HOME=/root",
"OUT=/workspace/out/libfuzzer-undefined-x86_64",
"SANITIZER=undefined"
],
"args": [
"bash",
"-c",
"rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/libfuzzer-undefined-x86_64 && compile || (echo \"********************************************************************************\nFailed to build.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer undefined --engine libfuzzer --architecture x86_64 test-project\n********************************************************************************\" && false)"
],
"id": "compile-libfuzzer-undefined-x86_64"
},
{
"name": "gcr.io/oss-fuzz-base/base-runner",
"env": [
"ARCHITECTURE=x86_64",
"FUZZING_ENGINE=libfuzzer",
"FUZZING_LANGUAGE=c++",
"HOME=/root",
"OUT=/workspace/out/libfuzzer-undefined-x86_64",
"SANITIZER=undefined"
],
"args": [
"bash",
"-c",
"test_all.py || (echo \"********************************************************************************\nBuild checks failed.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer undefined --engine libfuzzer --architecture x86_64 test-project\npython infra/helper.py check_build --sanitizer undefined --engine libfuzzer --architecture x86_64 test-project\n********************************************************************************\" && false)"
],
"id": "build-check-libfuzzer-undefined-x86_64"
},
{
"name": "gcr.io/oss-fuzz-base/base-runner",
"env": [
"ARCHITECTURE=x86_64",
"FUZZING_ENGINE=libfuzzer",
"FUZZING_LANGUAGE=c++",
"HOME=/root",
"OUT=/workspace/out/libfuzzer-undefined-x86_64",
"SANITIZER=undefined"
],
"args": [
"bash",
"-c",
"targets_list > /workspace/targets.list.undefined"
]
},
{
"name": "gcr.io/oss-fuzz/test-project",
"args": [
"bash",
"-c",
"cd /workspace/out/libfuzzer-undefined-x86_64 && zip -r test-project-undefined-202001010000.zip *"
]
},
{
"name": "gcr.io/oss-fuzz-base/uploader",
"args": [
"/workspace/srcmap.json",
"test_url"
]
},
{
"name": "gcr.io/oss-fuzz-base/uploader",
"args": [
"/workspace/out/libfuzzer-undefined-x86_64/test-project-undefined-202001010000.zip",
"test_url"
]
},
{
"name": "gcr.io/oss-fuzz-base/uploader",
"args": [
"/workspace/targets.list.undefined",
"test_url"
]
},
{
"name": "gcr.io/cloud-builders/curl",
"args": [
"-H",
"Content-Type: text/plain",
"-X",
"PUT",
"-d",
"test-project-undefined-202001010000.zip",
"test_url"
]
},
{
"name": "gcr.io/oss-fuzz/test-project",
"args": [
"bash",
"-c",
"rm -r /workspace/out/libfuzzer-undefined-x86_64"
]
}
]

View File

@ -2,7 +2,9 @@
{
"args": [
"clone",
"https://github.com/google/oss-fuzz.git"
"https://github.com/google/oss-fuzz.git",
"--depth",
"1"
],
"name": "gcr.io/cloud-builders/git"
},
@ -26,21 +28,25 @@
"env": [
"OSSFUZZ_REVISION=$REVISION_ID",
"FUZZING_LANGUAGE=c++"
]
],
"id": "srcmap"
},
{
"name": "gcr.io/oss-fuzz/test-project",
"env": [
"ARCHITECTURE=x86_64",
"FUZZING_ENGINE=libfuzzer",
"SANITIZER=coverage",
"OUT=/workspace/out/coverage",
"FUZZING_LANGUAGE=c++"
"FUZZING_LANGUAGE=c++",
"HOME=/root",
"OUT=/workspace/out/libfuzzer-coverage-x86_64",
"SANITIZER=coverage"
],
"args": [
"bash",
"-c",
"rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/coverage && compile || (echo \"********************************************************************************\nCoverage build failed.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer coverage test-project\n********************************************************************************\" && false)"
]
"rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/libfuzzer-coverage-x86_64 && compile || (echo \"********************************************************************************\nFailed to build.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer coverage --engine libfuzzer --architecture x86_64 test-project\n********************************************************************************\" && false)"
],
"id": "compile-libfuzzer-coverage-x86_64"
},
{
"url": "test_download"
@ -48,10 +54,12 @@
{
"name": "gcr.io/oss-fuzz-base/base-runner",
"env": [
"ARCHITECTURE=x86_64",
"FUZZING_ENGINE=libfuzzer",
"SANITIZER=coverage",
"OUT=/workspace/out/coverage",
"FUZZING_LANGUAGE=c++",
"HOME=/root",
"OUT=/workspace/out/libfuzzer-coverage-x86_64",
"SANITIZER=coverage",
"HTTP_PORT=",
"COVERAGE_EXTRA_ARGS="
],
@ -81,7 +89,7 @@
"-m",
"cp",
"-r",
"/workspace/out/coverage/report",
"/workspace/out/libfuzzer-coverage-x86_64/report",
"gs://oss-fuzz-coverage/test-project/reports/20200101"
]
},
@ -99,7 +107,7 @@
"-m",
"cp",
"-r",
"/workspace/out/coverage/fuzzer_stats",
"/workspace/out/libfuzzer-coverage-x86_64/fuzzer_stats",
"gs://oss-fuzz-coverage/test-project/fuzzer_stats/20200101"
]
},
@ -117,7 +125,7 @@
"-m",
"cp",
"-r",
"/workspace/out/coverage/logs",
"/workspace/out/libfuzzer-coverage-x86_64/logs",
"gs://oss-fuzz-coverage/test-project/logs/20200101"
]
},

View File

@ -24,16 +24,31 @@ import requests
DATASTORE_READY_INDICATOR = b'is now running'
DATASTORE_EMULATOR_PORT = 8432
EMULATOR_TIMEOUT = 20
TEST_PROJECT_ID = 'test-project'
FUNCTIONS_DIR = os.path.dirname(__file__)
OSS_FUZZ_DIR = os.path.dirname(os.path.dirname(os.path.dirname(FUNCTIONS_DIR)))
PROJECTS_DIR = os.path.join(OSS_FUZZ_DIR, 'projects')
FAKE_DATETIME = datetime.datetime(2020, 1, 1, 0, 0, 0)
IMAGE_PROJECT = 'oss-fuzz'
BASE_IMAGES_PROJECT = 'oss-fuzz-base'
PROJECT = 'test-project'
PROJECT_DIR = os.path.join(PROJECTS_DIR, PROJECT)
# pylint: disable=arguments-differ
class SpoofedDatetime(datetime.datetime):
"""Mocking Datetime class for now() function."""
def create_project_data(project,
project_yaml_contents,
dockerfile_contents='test line'):
"""Creates a project.yaml with |project_yaml_contents| and a Dockerfile with
|dockerfile_contents| for |project|."""
project_dir = os.path.join(PROJECTS_DIR, project)
project_yaml_path = os.path.join(project_dir, 'project.yaml')
with open(project_yaml_path, 'w') as project_yaml_handle:
project_yaml_handle.write(project_yaml_contents)
@classmethod
def now(cls):
return datetime.datetime(2020, 1, 1, 0, 0, 0)
dockerfile_path = os.path.join(project_dir, 'Dockerfile')
with open(dockerfile_path, 'w') as dockerfile_handle:
dockerfile_handle.write(dockerfile_contents)
def start_datastore_emulator():
@ -46,7 +61,7 @@ def start_datastore_emulator():
'start',
'--consistency=1.0',
'--host-port=localhost:' + str(DATASTORE_EMULATOR_PORT),
'--project=' + TEST_PROJECT_ID,
'--project=' + PROJECT,
'--no-store-on-disk',
],
stdout=subprocess.PIPE,
@ -96,9 +111,9 @@ def set_gcp_environment():
"""Set environment variables for simulating in google cloud platform."""
os.environ['DATASTORE_EMULATOR_HOST'] = 'localhost:' + str(
DATASTORE_EMULATOR_PORT)
os.environ['GOOGLE_CLOUD_PROJECT'] = TEST_PROJECT_ID
os.environ['DATASTORE_DATASET'] = TEST_PROJECT_ID
os.environ['GCP_PROJECT'] = TEST_PROJECT_ID
os.environ['GOOGLE_CLOUD_PROJECT'] = PROJECT
os.environ['DATASTORE_DATASET'] = PROJECT
os.environ['GCP_PROJECT'] = PROJECT
os.environ['FUNCTION_REGION'] = 'us-central1'

View File

@ -254,10 +254,10 @@ def update_status(event, context):
return
if status_type == 'fuzzing':
tag = build_project.FUZZING_BUILD_TAG
tag = build_project.FUZZING_BUILD_TYPE
status_filename = FUZZING_STATUS_FILENAME
elif status_type == 'coverage':
tag = build_and_run_coverage.COVERAGE_BUILD_TAG
tag = build_and_run_coverage.COVERAGE_BUILD_TYPE
status_filename = COVERAGE_STATUS_FILENAME
else:
raise RuntimeError('Invalid build status type ' + status_type)