mirror of https://github.com/google/oss-fuzz.git
msan_builder: collect and build dependencies.
Use python-apt to get dependencies. Also change source downloading to use this module.
This commit is contained in:
parent
0b3f8b3a29
commit
ad476b52db
|
@ -17,7 +17,7 @@
|
||||||
FROM gcr.io/oss-fuzz-base/base-clang
|
FROM gcr.io/oss-fuzz-base/base-clang
|
||||||
MAINTAINER ochang@google.com
|
MAINTAINER ochang@google.com
|
||||||
RUN sed -i -r 's/#\s*deb-src/deb-src/g' /etc/apt/sources.list
|
RUN sed -i -r 's/#\s*deb-src/deb-src/g' /etc/apt/sources.list
|
||||||
RUN apt-get update && apt-get install -y python dpkg-dev patchelf
|
RUN apt-get update && apt-get install -y python dpkg-dev patchelf python-apt
|
||||||
|
|
||||||
# Take all libraries from lib/msan
|
# Take all libraries from lib/msan
|
||||||
RUN cp -R /usr/msan/lib/* /usr/lib/
|
RUN cp -R /usr/msan/lib/* /usr/lib/
|
||||||
|
|
|
@ -19,10 +19,13 @@ from __future__ import print_function
|
||||||
import argparse
|
import argparse
|
||||||
import imp
|
import imp
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
|
import apt
|
||||||
|
|
||||||
from packages import package
|
from packages import package
|
||||||
|
|
||||||
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
|
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
@ -205,6 +208,48 @@ def PatchRpath(path, output_directory):
|
||||||
processed_rpath, path])
|
processed_rpath, path])
|
||||||
|
|
||||||
|
|
||||||
|
def _CollectDependencies(apt_cache, pkg, cache, dependencies):
|
||||||
|
"""Collect dependencies that need to be built."""
|
||||||
|
C_OR_CXX_DEPS = [
|
||||||
|
'libc++1',
|
||||||
|
'libc6',
|
||||||
|
'libc++abi1',
|
||||||
|
'libgcc1',
|
||||||
|
'libstdc++6',
|
||||||
|
]
|
||||||
|
|
||||||
|
if pkg.name in C_OR_CXX_DEPS:
|
||||||
|
return True
|
||||||
|
|
||||||
|
is_c_or_cxx = False
|
||||||
|
for dependency in pkg.versions[0].dependencies:
|
||||||
|
dependency = dependency[0]
|
||||||
|
if dependency.pre_depend:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if dependency.name in cache:
|
||||||
|
is_c_or_cxx |= cache[dependency.name]
|
||||||
|
else:
|
||||||
|
is_c_or_cxx |= _CollectDependencies(apt_cache, apt_cache[dependency.name],
|
||||||
|
cache, dependencies)
|
||||||
|
if is_c_or_cxx:
|
||||||
|
dependencies.append(pkg.name)
|
||||||
|
|
||||||
|
cache[pkg.name] = is_c_or_cxx
|
||||||
|
return is_c_or_cxx
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def GetBuildList(package_name):
|
||||||
|
"""Get list of packages that need to be built including dependencies."""
|
||||||
|
apt_cache = apt.Cache()
|
||||||
|
pkg = apt_cache[package_name]
|
||||||
|
|
||||||
|
dependencies = []
|
||||||
|
_CollectDependencies(apt_cache, pkg, {}, dependencies)
|
||||||
|
return dependencies
|
||||||
|
|
||||||
|
|
||||||
class MSanBuilder(object):
|
class MSanBuilder(object):
|
||||||
"""MSan builder."""
|
"""MSan builder."""
|
||||||
|
|
||||||
|
@ -265,18 +310,35 @@ def main():
|
||||||
parser.add_argument('--debug', action='store_true', help='Enable debug mode.')
|
parser.add_argument('--debug', action='store_true', help='Enable debug mode.')
|
||||||
parser.add_argument('--log-path', help='Log path for debugging.')
|
parser.add_argument('--log-path', help='Log path for debugging.')
|
||||||
parser.add_argument('--work-dir', help='Work directory.')
|
parser.add_argument('--work-dir', help='Work directory.')
|
||||||
|
parser.add_argument('--build-deps', action='store_true',
|
||||||
|
help='Build dependencies as well.')
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
if not os.path.exists(args.output_dir):
|
if not os.path.exists(args.output_dir):
|
||||||
os.makedirs(args.output_dir)
|
os.makedirs(args.output_dir)
|
||||||
|
|
||||||
|
if args.build_deps:
|
||||||
|
all_packages = set()
|
||||||
|
package_names = []
|
||||||
|
for package_name in args.package_names:
|
||||||
|
for dep in GetBuildList(package_name):
|
||||||
|
if dep in all_packages:
|
||||||
|
continue
|
||||||
|
|
||||||
|
all_packages.add(dep)
|
||||||
|
package_names.append(dep)
|
||||||
|
else:
|
||||||
|
package_names = args.package_names
|
||||||
|
|
||||||
|
print('Going to build:')
|
||||||
|
for package_name in package_names:
|
||||||
|
print('\t', package_name)
|
||||||
|
|
||||||
with MSanBuilder(debug=args.debug, log_path=args.log_path,
|
with MSanBuilder(debug=args.debug, log_path=args.log_path,
|
||||||
work_dir=args.work_dir) as builder:
|
work_dir=args.work_dir) as builder:
|
||||||
for package_name in args.package_names:
|
for package_name in package_names:
|
||||||
builder.Build(package_name, args.output_dir)
|
builder.Build(package_name, args.output_dir)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|
||||||
|
|
|
@ -1,15 +1,11 @@
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
|
import apt
|
||||||
|
|
||||||
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
|
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
|
||||||
|
|
||||||
def FindDirs(directory):
|
|
||||||
"""Find sub directories."""
|
|
||||||
return [subdir for subdir in os.listdir(directory)
|
|
||||||
if os.path.isdir(os.path.join(directory, subdir))]
|
|
||||||
|
|
||||||
|
|
||||||
def ApplyPatch(source_directory, patch_name):
|
def ApplyPatch(source_directory, patch_name):
|
||||||
"""Apply custom patch."""
|
"""Apply custom patch."""
|
||||||
subprocess.check_call(['patch', '-p1', '-i',
|
subprocess.check_call(['patch', '-p1', '-i',
|
||||||
|
@ -46,20 +42,11 @@ class Package(object):
|
||||||
def DownloadSource(self, download_directory):
|
def DownloadSource(self, download_directory):
|
||||||
"""Download the source for a package."""
|
"""Download the source for a package."""
|
||||||
self.PreDownload(download_directory)
|
self.PreDownload(download_directory)
|
||||||
before = FindDirs(download_directory)
|
|
||||||
subprocess.check_call(
|
|
||||||
['apt-get', 'source', self.name],
|
|
||||||
stderr=subprocess.STDOUT, cwd=download_directory)
|
|
||||||
|
|
||||||
after = FindDirs(download_directory)
|
apt_cache = apt.Cache()
|
||||||
new_dirs = [subdir for subdir in after
|
source_directory = apt_cache[self.name].versions[0].fetch_source(
|
||||||
if subdir not in before]
|
download_directory)
|
||||||
|
|
||||||
if len(new_dirs) != 1:
|
|
||||||
raise PackageException(
|
|
||||||
'Found more than one new directory after downloading apt-get source.')
|
|
||||||
|
|
||||||
source_directory = os.path.join(download_directory, new_dirs[0])
|
|
||||||
self.PostDownload(source_directory)
|
self.PostDownload(source_directory)
|
||||||
return source_directory
|
return source_directory
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue