clean up formatting with yapf

This commit is contained in:
Jack O'Connor 2018-10-03 15:10:48 -04:00
parent 42e5dfdc4e
commit f2ab34e5ff
43 changed files with 801 additions and 683 deletions

View File

@ -66,9 +66,9 @@ def get_args(cls, dist, header=None):
# ensure_safe_name
if re.search(r'[\\/]', name):
raise ValueError("Path separators not allowed in script names")
script_text = TEMPLATE.format(
ep.module_name, ep.attrs[0], '.'.join(ep.attrs),
spec, group, name)
script_text = TEMPLATE.format(ep.module_name, ep.attrs[0],
'.'.join(ep.attrs), spec, group,
name)
args = cls._get_script_args(type_, name, header, script_text)
for res in args:
yield res
@ -95,8 +95,8 @@ def main():
manifest.seek(0)
manifest_content = manifest.read()
if not 'include fastentrypoints.py' in manifest_content:
manifest.write(('\n' if manifest_content else '')
+ 'include fastentrypoints.py')
manifest.write(('\n' if manifest_content else '') +
'include fastentrypoints.py')
# Insert the import statement to setup.py if not present
with open(setup_path, 'a+') as setup:
@ -107,4 +107,5 @@ def main():
setup.truncate()
setup.write('import fastentrypoints\n' + setup_content)
print(__name__)

View File

@ -94,8 +94,12 @@ async def gather_coalescing_exceptions(coros, display, *, verbose):
return results
async def create_subprocess_with_handle(command, display_handle, *, shell=False, cwd,
**kwargs):
async def create_subprocess_with_handle(command,
display_handle,
*,
shell=False,
cwd,
**kwargs):
'''Writes subprocess output to a display handle as it comes in, and also
returns a copy of it as a string. Throws if the subprocess returns an
error. Note that cwd is a required keyword-only argument, on theory that
@ -122,11 +126,19 @@ async def create_subprocess_with_handle(command, display_handle, *, shell=False,
stderr = asyncio.subprocess.STDOUT
if shell:
proc = await asyncio.create_subprocess_shell(
command, stdin=stdin, stdout=stdout, stderr=stderr, cwd=cwd,
command,
stdin=stdin,
stdout=stdout,
stderr=stderr,
cwd=cwd,
**kwargs)
else:
proc = await asyncio.create_subprocess_exec(
*command, stdin=stdin, stdout=stdout, stderr=stderr, cwd=cwd,
*command,
stdin=stdin,
stdout=stdout,
stderr=stderr,
cwd=cwd,
**kwargs)
# Read all the output from the subprocess as its comes in.
@ -142,8 +154,8 @@ async def create_subprocess_with_handle(command, display_handle, *, shell=False,
returncode = await proc.wait()
if returncode != 0:
raise subprocess.CalledProcessError(
returncode, command, output_copy.getvalue())
raise subprocess.CalledProcessError(returncode, command,
output_copy.getvalue())
if hasattr(decoder, 'buffer'):
# The utf8 decoder has this attribute, but some others don't.

View File

@ -13,7 +13,6 @@ from .compat import makedirs
from .error import PrintableError
from .keyval import KeyVal
# git output modes
TEXT_MODE = object()
BINARY_MODE = object()
@ -127,8 +126,8 @@ class GitSession:
paths = ["./" + path for path in paths]
ls_output = await self.git(
'ls-files', '--full-name', '-z', *paths, output_mode=BINARY_MODE)
await self.git('update-index', '--force-remove', '-z', '--stdin',
input=ls_output)
await self.git(
'update-index', '--force-remove', '-z', '--stdin', input=ls_output)
async def merge_tree_into_index(self, tree, prefix):
# The --prefix argument to read-tree chokes on paths that contain dot
@ -150,21 +149,20 @@ class GitSession:
await self.git('read-tree', '-i', '--prefix', prefix_arg, tree)
async def working_copy_matches_index(self):
diff_output = await self.git(
'diff-files', output_mode=BINARY_MODE)
diff_output = await self.git('diff-files', output_mode=BINARY_MODE)
return len(diff_output) == 0
async def get_modified_files_skipping_deletes(self):
# We want to ignore deleted files, so we include every possible value
# of --diff-filter except 'D'.
diff_output = await self.git(
'diff-files', '-z', '--name-only', '--diff-filter=ACMRTUXB')
diff_output = await self.git('diff-files', '-z', '--name-only',
'--diff-filter=ACMRTUXB')
return [name for name in diff_output.split('\x00') if name]
async def get_new_files_in_tree(self, previous_tree, new_tree):
added_files_output = await self.git(
'diff-tree', '--diff-filter=A', '--name-only', '-r', '-z',
previous_tree, new_tree)
added_files_output = await self.git('diff-tree', '--diff-filter=A',
'--name-only', '-r', '-z',
previous_tree, new_tree)
return added_files_output.split('\x00')
async def read_tree_updating_working_copy(self, tree, force):
@ -185,14 +183,14 @@ class GitSession:
async def get_info_for_path(self, tree, path):
# --full-tree makes ls-tree ignore the cwd. As in list_tree_entries,
# prepend ./ to avoid interpreting leading colons in pathspecs.
ls_output = await self.git(
'ls-tree', '--full-tree', '-z', tree, "./" + path)
ls_output = await self.git('ls-tree', '--full-tree', '-z', tree,
"./" + path)
ls_lines = ls_output.strip('\x00').split('\x00')
# Remove empty lines.
ls_lines = list(filter(None, ls_lines))
if len(ls_lines) == 0:
raise FileNotFoundError(
'Path "{}" not found in tree {}.'.format(path, tree))
raise FileNotFoundError('Path "{}" not found in tree {}.'.format(
path, tree))
assert len(ls_lines) == 1
mode, type, sha1, name = ls_lines[0].split()
return mode, type, sha1, name
@ -230,8 +228,8 @@ class GitSession:
entries = {}
for line in output.strip('\x00').split('\x00'):
mode, type, hash, name = re.match(entry_regex, line).groups()
if (recursive and path is not None and
len(name) < len(canonical_path) and type == TREE_TYPE):
if (recursive and path is not None
and len(name) < len(canonical_path) and type == TREE_TYPE):
# In recursive mode, leave out the parents of the target dir.
continue
entries[name] = TreeEntry(mode, type, hash)
@ -239,8 +237,9 @@ class GitSession:
async def make_tree_from_entries(self, entries):
entry_format = '{} {} {}\t{}'
input = '\x00'.join(entry_format.format(mode, type, hash, name)
for name, (mode, type, hash) in entries.items())
input = '\x00'.join(
entry_format.format(mode, type, hash, name)
for name, (mode, type, hash) in entries.items())
tree = await self.git('mktree', '-z', input=input)
return tree
@ -276,8 +275,8 @@ class _Cache:
# that I know can cause problems. We might need to add more
# attributes here in the future. Note that other config files are
# disabled in _git_env below.
attributes_path = os.path.join(
self.trees_path, 'info', 'attributes')
attributes_path = os.path.join(self.trees_path, 'info',
'attributes')
with open(attributes_path, 'w') as attributes:
# Disable the 'text' attribute for all files.
attributes.write('* -text')
@ -323,15 +322,19 @@ class _Cache:
if base_tree:
await session.read_tree_into_index(base_tree)
try:
await session.merge_tree_into_index(
merge_tree, merge_path)
await session.merge_tree_into_index(merge_tree, merge_path)
except GitError as e:
raise MergeConflictError(e.stdout) from e
unified_tree = await session.make_tree_from_index()
return unified_tree
async def export_tree(self, tree, dest, previous_tree=None, *, force=False,
previous_index_file=None):
async def export_tree(self,
tree,
dest,
previous_tree=None,
*,
force=False,
previous_index_file=None):
'''This method is the core of `peru sync`. If the contents of "dest"
match "previous_tree", then export_tree() updates them to match "tree".
If not, it raises an error and doesn't touch any files.
@ -364,18 +367,16 @@ class _Cache:
# have to pay the cost to recreate it.
did_refresh = False
if previous_index_file:
session = GitSession(
self.trees_path, previous_index_file, dest)
session = GitSession(self.trees_path, previous_index_file,
dest)
stack.enter_context(delete_if_error(previous_index_file))
if not os.path.exists(previous_index_file):
did_refresh = True
await session.read_tree_and_stats_into_index(
previous_tree)
await session.read_tree_and_stats_into_index(previous_tree)
else:
session = stack.enter_context(self.clean_git_session(dest))
did_refresh = True
await session.read_tree_and_stats_into_index(
previous_tree)
await session.read_tree_and_stats_into_index(previous_tree)
# The fast path. If the previous tree is the same as the current
# one, and no files have changed at all, short-circuit.
@ -387,8 +388,7 @@ class _Cache:
# the tree has changed, or both. If we didn't refresh the index
# file above, we must do so now.
if not did_refresh:
await session.read_tree_and_stats_into_index(
previous_tree)
await session.read_tree_and_stats_into_index(previous_tree)
modified = await session.get_modified_files_skipping_deletes()
if modified and not force:
raise DirtyWorkingCopyError(
@ -404,8 +404,10 @@ class _Cache:
# are new in `tree` already existed in the working copy.
new_files = await session.get_new_files_in_tree(
previous_tree, tree)
existing_new_files = [f for f in new_files if f and
os.path.exists(os.path.join(dest, f))]
existing_new_files = [
f for f in new_files
if f and os.path.exists(os.path.join(dest, f))
]
existing_new_files.sort()
if existing_new_files:
raise DirtyWorkingCopyError(
@ -426,8 +428,8 @@ class _Cache:
mode, type, sha1, name = await session.get_info_for_path(
tree, path)
if type == 'tree':
raise IsADirectoryError('Path "{}" in tree {} is a directory.'
.format(path, tree))
raise IsADirectoryError(
'Path "{}" in tree {} is a directory.'.format(path, tree))
assert type == 'blob'
return (await session.read_bytes_from_file_hash(sha1))
@ -493,8 +495,8 @@ class _Cache:
subtree_base = None
if name in entries and entries[name].type == TREE_TYPE:
subtree_base = entries[name].hash
new_subtree = await self.modify_tree(
subtree_base, sub_modifications)
new_subtree = await self.modify_tree(subtree_base,
sub_modifications)
if new_subtree != empty_tree:
entries[name] = TreeEntry(TREE_MODE, TREE_TYPE, new_subtree)
# Delete an empty tree if it was actually a tree to begin with.
@ -529,8 +531,8 @@ def _format_file_lines(files):
if len(files) <= LINES_TO_SHOW:
lines = '\n'.join(files)
else:
lines = ('\n'.join(files[:LINES_TO_SHOW-1]) +
'\n...{} total'.format(len(files)))
lines = ('\n'.join(files[:LINES_TO_SHOW - 1]) + '\n...{} total'.format(
len(files)))
return lines

View File

@ -1,7 +1,6 @@
import os
import sys
# In Python versions prior to 3.4, __file__ returns a relative path. This path
# is fixed at load time, so if the program later cd's (as we do in tests, at
# least) __file__ is no longer valid. As a workaround, compute the absolute

View File

@ -87,6 +87,7 @@ class VerboseDisplay(BaseDisplay):
'''Waits until jobs are finished and then prints all of their output at
once, to make sure jobs don't get interleaved. We use '===' as a delimiter
to try to separate jobs from one another, and from other output.'''
def _job_started(self, job_id):
print('===', 'started', self.titles[job_id], '===', file=self.output)
@ -101,6 +102,7 @@ class VerboseDisplay(BaseDisplay):
class FancyDisplay(BaseDisplay):
'''Prints a multi-line, real-time display of all the latest output lines
from each job.'''
def __init__(self, *args):
super().__init__(*args)
# Every time we draw we need to erase the lines that were printed

View File

@ -4,8 +4,8 @@ import yaml
def set_module_field_in_file(yaml_file_path, module_name, field_name, new_val):
with open(yaml_file_path) as f:
yaml_text = f.read()
new_yaml_text = set_module_field(yaml_text, module_name,
field_name, new_val)
new_yaml_text = set_module_field(yaml_text, module_name, field_name,
new_val)
with open(yaml_file_path, "w") as f:
f.write(new_yaml_text)
@ -45,8 +45,8 @@ def _maybe_quote(val):
return val
def _append_module_field(yaml_text, yaml_dict, module_name,
field_name, new_val):
def _append_module_field(yaml_text, yaml_dict, module_name, field_name,
new_val):
module_fields = yaml_dict[module_name]
# use the last field to determine position and indentation
assert len(module_fields) > 0, "There aren't any fields here!"
@ -68,8 +68,7 @@ def _append_module_field(yaml_text, yaml_dict, module_name,
new_line_number -= 1
new_line = "{}{}: {}".format(indentation, field_name, new_val)
new_yaml_lines = (yaml_lines[:new_line_number] +
[new_line] +
new_yaml_lines = (yaml_lines[:new_line_number] + [new_line] +
yaml_lines[new_line_number:])
return "\n".join(new_yaml_lines)
@ -89,15 +88,15 @@ def _parse_yaml_text(yaml_text):
def _parse_events_list(events_list):
event = events_list.pop(0)
if (isinstance(event, yaml.StreamStartEvent) or
isinstance(event, yaml.DocumentStartEvent)):
if (isinstance(event, yaml.StreamStartEvent)
or isinstance(event, yaml.DocumentStartEvent)):
ret = _parse_events_list(events_list)
events_list.pop(-1)
return ret
elif (isinstance(event, yaml.ScalarEvent) or
isinstance(event, yaml.AliasEvent) or
isinstance(event, yaml.SequenceEndEvent) or
isinstance(event, yaml.MappingEndEvent)):
elif (isinstance(event, yaml.ScalarEvent)
or isinstance(event, yaml.AliasEvent)
or isinstance(event, yaml.SequenceEndEvent)
or isinstance(event, yaml.MappingEndEvent)):
return event
elif isinstance(event, yaml.SequenceStartEvent):
contents = []

View File

@ -3,11 +3,10 @@ import re
from .error import PrintableError
UNESCAPED_STAR_EXPR = (
r'(?<!\\)' # negative lookbehind assertion for more backslashes
r'(?<!\\)' # negative lookbehind assertion for more backslashes
r'(?:\\\\)*' # non-capturing group of an even number of backslashes
r'\*' # literal *
r'\*' # literal *
)
@ -44,10 +43,13 @@ def split_on_stars_interpreting_backslashes(s):
*'s and \'s, and leave them in as literals (to be regex-escaped in the next
step).'''
star_indices = [match.end() - 1 for match in
re.finditer(UNESCAPED_STAR_EXPR, s)]
literalized_parts = [part.replace(r'\*', '*').replace(r'\\', '\\')
for part in _split_on_indices(s, star_indices)]
star_indices = [
match.end() - 1 for match in re.finditer(UNESCAPED_STAR_EXPR, s)
]
literalized_parts = [
part.replace(r'\*', '*').replace(r'\\', '\\')
for part in _split_on_indices(s, star_indices)
]
return literalized_parts
@ -69,8 +71,8 @@ def glob_to_path_regex(glob):
for i, component in enumerate(components):
if component == '**':
if i == len(components) - 1:
raise GlobError(
glob, '** may not be the last component in a path.')
raise GlobError(glob,
'** may not be the last component in a path.')
else:
regex += r'(?:[^/]+/)*'
elif '**' in component:

View File

@ -12,24 +12,25 @@ async def checkout(runtime, scope, imports, path):
last_imports_tree = _get_last_imports(runtime)
index = _last_imports_index(runtime)
await runtime.cache.export_tree(
imports_tree, path, last_imports_tree, force=runtime.force,
imports_tree,
path,
last_imports_tree,
force=runtime.force,
previous_index_file=index)
_set_last_imports(runtime, imports_tree)
async def get_imports_tree(runtime, scope, imports, base_tree=None):
target_trees = await get_trees(runtime, scope, imports.keys())
imports_tree = await merge_imports_tree(
runtime.cache, imports, target_trees, base_tree)
imports_tree = await merge_imports_tree(runtime.cache, imports,
target_trees, base_tree)
return imports_tree
async def get_trees(runtime, scope, targets):
futures = [get_tree(runtime, scope, target) for target in targets]
trees = await gather_coalescing_exceptions(
futures,
runtime.display,
verbose=runtime.verbose)
futures, runtime.display, verbose=runtime.verbose)
return dict(zip(targets, trees))

View File

@ -15,7 +15,6 @@ from . import imports
from . import parser
from .runtime import Runtime
__doc__ = '''\
Usage:
peru [-hqv] [--file=<file>] [--sync-dir=<dir>] [--state-dir=<dir>]
@ -60,6 +59,7 @@ def peru_command(name, doc):
COMMAND_FNS[name] = f
COMMAND_DOCS[name] = doc
return f
return decorator
@ -88,8 +88,8 @@ Options:
''')
async def do_sync(params):
params.runtime.print_overrides()
await imports.checkout(
params.runtime, params.scope, params.imports, params.runtime.sync_dir)
await imports.checkout(params.runtime, params.scope, params.imports,
params.runtime.sync_dir)
params.runtime.warn_unused_overrides()
@ -124,9 +124,7 @@ async def do_reup(params):
modules = params.scope.get_modules_for_reup(names)
futures = [module.reup(params.runtime) for module in modules]
await gather_coalescing_exceptions(
futures,
params.runtime.display,
verbose=params.runtime.verbose)
futures, params.runtime.display, verbose=params.runtime.verbose)
if not params.args['--no-sync']:
# Do an automatic sync. Reparse peru.yaml to get the new revs.
new_scope, new_imports = parser.parse_file(params.runtime.peru_file)
@ -149,8 +147,8 @@ Options:
-v --verbose print everything
''')
async def do_clean(params):
await imports.checkout(
params.runtime, params.scope, {}, params.runtime.sync_dir)
await imports.checkout(params.runtime, params.scope, {},
params.runtime.sync_dir)
@peru_command('copy', '''\
@ -178,8 +176,8 @@ async def do_copy(params):
dest = tempfile.mkdtemp(prefix='peru_copy_')
else:
dest = params.args['<dest>']
tree = await imports.get_tree(
params.runtime, params.scope, params.args['<target>'])
tree = await imports.get_tree(params.runtime, params.scope,
params.args['<target>'])
await params.runtime.cache.export_tree(
tree, dest, force=params.runtime.force)
if not params.args['<dest>']:
@ -221,12 +219,15 @@ async def do_override(params):
del overrides[key]
else:
if params.args['--json']:
print(json.dumps({module: os.path.abspath(overrides[module])
for module in overrides}))
print(
json.dumps({
module: os.path.abspath(overrides[module])
for module in overrides
}))
else:
for module in sorted(overrides):
print('{}: {}'.format(
module, params.runtime.get_override(module)))
print('{}: {}'.format(module,
params.runtime.get_override(module)))
@peru_command('module', '''\
@ -326,8 +327,8 @@ def docopt_parse_args(argv):
return args
CommandParams = collections.namedtuple(
'CommandParams', ['args', 'runtime', 'scope', 'imports'])
CommandParams = collections.namedtuple('CommandParams',
['args', 'runtime', 'scope', 'imports'])
def force_utf8_in_ascii_mode_hack():
@ -337,10 +338,10 @@ def force_utf8_in_ascii_mode_hack():
peru inside of Docker. This is a hack to force emitting UTF8 in that case.
Hopefully it doesn't break anything important.'''
if sys.stdout.encoding == 'ANSI_X3.4-1968':
sys.stdout = open(sys.stdout.fileno(), mode='w', encoding='utf8',
buffering=1)
sys.stderr = open(sys.stderr.fileno(), mode='w', encoding='utf8',
buffering=1)
sys.stdout = open(
sys.stdout.fileno(), mode='w', encoding='utf8', buffering=1)
sys.stderr = open(
sys.stderr.fileno(), mode='w', encoding='utf8', buffering=1)
# Called as a setup.py entry point, or from __main__.py (`python3 -m peru`).

View File

@ -28,16 +28,16 @@ async def merge_imports_tree(cache, imports, target_trees, base_tree=None):
unified_tree, target_trees[target], path)
except MergeConflictError as e:
message = 'Merge conflict in import "{}" at "{}":\n\n{}'
e.message = message.format(
target, path, textwrap.indent(e.message, ' '))
e.message = message.format(target, path,
textwrap.indent(e.message, ' '))
raise
cache.keyval[key] = unified_tree
return unified_tree
def _cache_key(imports, target_trees, base_tree):
tree_paths = tuple((target_trees[target], paths) for
target, paths in imports.items())
tree_paths = tuple(
(target_trees[target], paths) for target, paths in imports.items())
return compute_key({
'base_tree': base_tree,
'tree_paths': tree_paths,

View File

@ -9,7 +9,6 @@ from . import imports
from .plugin import plugin_fetch, plugin_get_reup_fields
from . import scope
recursion_warning = '''\
WARNING: The peru module '{}' doesn't specify the 'recursive' field,
but its contents include a peru.yaml file. Peru's behavior here changed
@ -62,10 +61,9 @@ class Module:
if key in runtime.cache.keyval and not runtime.no_cache:
return runtime.cache.keyval[key]
with runtime.tmp_dir() as tmp_dir:
await plugin_fetch(
runtime.get_plugin_context(), self.type,
self.plugin_fields, tmp_dir,
runtime.display.get_handle(self.name))
await plugin_fetch(runtime.get_plugin_context(), self.type,
self.plugin_fields, tmp_dir,
runtime.display.get_handle(self.name))
tree = await runtime.cache.import_tree(tmp_dir)
# Note that we still *write* to cache even when --no-cache is True.
# That way we avoid confusing results on subsequent syncs.
@ -86,8 +84,8 @@ class Module:
return base_tree
# TODO: Get rid of this with 1.0, and move the self.recursive check up.
if not self.recursion_specified:
runtime.display.print(
'\n'.join(textwrap.wrap(recursion_warning.format(self.name))))
runtime.display.print('\n'.join(
textwrap.wrap(recursion_warning.format(self.name))))
if not self.recursive:
return base_tree
recursive_tree = await imports.get_imports_tree(
@ -126,11 +124,11 @@ class Module:
runtime.display.get_handle(self.name))
output_lines = []
for field, val in reup_fields.items():
if (field not in self.plugin_fields or
val != self.plugin_fields[field]):
if (field not in self.plugin_fields
or val != self.plugin_fields[field]):
output_lines.append(' {}: {}'.format(field, val))
set_module_field_in_file(
runtime.peru_file, self.yaml_name, field, val)
set_module_field_in_file(runtime.peru_file, self.yaml_name,
field, val)
if output_lines and not runtime.quiet:
runtime.display.print('reup ' + self.name)
for line in output_lines:

View File

@ -9,7 +9,6 @@ from .module import Module
from .rule import Rule
from .scope import Scope
DEFAULT_PERU_FILE_NAME = 'peru.yaml'
@ -37,8 +36,7 @@ def _parse_toplevel(blob, name_prefix):
rules = _extract_named_rules(blob, name_prefix)
imports = _extract_multimap_field(blob, 'imports')
if blob:
raise ParserError("Unknown toplevel fields: " +
", ".join(blob.keys()))
raise ParserError("Unknown toplevel fields: " + ", ".join(blob.keys()))
return Scope(modules, rules), imports
@ -63,7 +61,8 @@ def _extract_named_rules(blob, name_prefix):
def _extract_rule(name, blob):
_validate_name(name)
if 'build' in blob:
raise ParserError(textwrap.dedent('''\
raise ParserError(
textwrap.dedent('''\
The "build" field is no longer supported. If you need to
untar/unzip a curl module, use the "unpack" field.'''))
if 'files' in blob:
@ -113,8 +112,8 @@ def _build_module(name, type, blob, yaml_name):
for k, v in plugin_fields.items():
if not isinstance(k, str):
raise ParserError(
'Module field names must be strings. Found "{}".'
.format(repr(k)))
'Module field names must be strings. Found "{}".'.format(
repr(k)))
if isinstance(v, bool):
# Avoid the Python-specific True/False capitalization, to be
# consistent with what people will usually type in YAML.
@ -138,8 +137,8 @@ def _extract_optional_list_field(blob, name):
strings.'''
value = _optional_list(typesafe_pop(blob, name, []))
if value is None:
raise ParserError('"{}" field must be a string or a list.'
.format(name))
raise ParserError(
'"{}" field must be a string or a list.'.format(name))
return value
@ -175,7 +174,7 @@ def _optional_list(value):
produces uniform output for fields that may supply a single value or list
of values, like the `imports` field.'''
if isinstance(value, str):
return (value,)
return (value, )
elif isinstance(value, list):
return tuple(value)
@ -184,8 +183,8 @@ def _optional_list(value):
def typesafe_pop(d, field, default=object()):
if not isinstance(d, dict):
raise ParserError(
'Error parsing peru file: {} is not a map.'.format(repr(d)))
raise ParserError('Error parsing peru file: {} is not a map.'.format(
repr(d)))
if default == typesafe_pop.__defaults__[0]:
return d.pop(field)
else:
@ -194,8 +193,8 @@ def typesafe_pop(d, field, default=object()):
# Code for the duplicate keys warning
DuplicatedKey = collections.namedtuple(
'DuplicatedKey', ['key', 'first_line', 'second_line'])
DuplicatedKey = collections.namedtuple('DuplicatedKey',
['key', 'first_line', 'second_line'])
def _get_line_indentation(line):
@ -233,8 +232,9 @@ def _get_duplicate_keys_approximate(yaml_text):
# Check if the current key is a duplicate.
key = line.split(':')[0].strip()
if key in indent_to_keylines[current_indent]:
duplicates.append(DuplicatedKey(
key, indent_to_keylines[current_indent][key], line_num))
duplicates.append(
DuplicatedKey(key, indent_to_keylines[current_indent][key],
line_num))
# Remember it either way.
indent_to_keylines[current_indent][key] = line_num
return duplicates
@ -250,8 +250,8 @@ def warn_duplicate_keys(file_path):
duplicates = _get_duplicate_keys_approximate(text)
if not duplicates:
return
_warn('WARNING: Duplicate keys found in {}\n'
'These will overwrite each other:',
file_path)
_warn(
'WARNING: Duplicate keys found in {}\n'
'These will overwrite each other:', file_path)
for duplicate in duplicates:
_warn(' "{}" on lines {} and {}', *duplicate)

View File

@ -17,32 +17,31 @@ DEFAULT_PARALLEL_FETCH_LIMIT = 10
DEBUG_PARALLEL_COUNT = 0
DEBUG_PARALLEL_MAX = 0
PluginDefinition = namedtuple(
'PluginDefinition',
['type', 'sync_exe', 'reup_exe', 'fields', 'required_fields',
'optional_fields', 'cache_fields'])
PluginDefinition = namedtuple('PluginDefinition', [
'type', 'sync_exe', 'reup_exe', 'fields', 'required_fields',
'optional_fields', 'cache_fields'
])
PluginContext = namedtuple(
'PluginContext',
['cwd', 'plugin_cache_root', 'parallelism_semaphore', 'plugin_cache_locks',
'tmp_root'])
PluginContext = namedtuple('PluginContext', [
'cwd', 'plugin_cache_root', 'parallelism_semaphore', 'plugin_cache_locks',
'tmp_root'
])
async def plugin_fetch(plugin_context, module_type, module_fields, dest,
display_handle):
display_handle):
env = {'PERU_SYNC_DEST': dest}
await _plugin_job(plugin_context, module_type, module_fields, 'sync',
env, display_handle)
await _plugin_job(plugin_context, module_type, module_fields, 'sync', env,
display_handle)
async def plugin_get_reup_fields(plugin_context, module_type, module_fields,
display_handle):
display_handle):
with tmp_dir(plugin_context) as output_file_dir:
output_path = os.path.join(output_file_dir, 'reup_output')
env = {'PERU_REUP_OUTPUT': output_path}
await _plugin_job(
plugin_context, module_type, module_fields, 'reup', env,
display_handle)
await _plugin_job(plugin_context, module_type, module_fields, 'reup',
env, display_handle)
with open(output_path) as output_file:
fields = yaml.safe_load(output_file) or {}
@ -58,7 +57,7 @@ async def plugin_get_reup_fields(plugin_context, module_type, module_fields,
async def _plugin_job(plugin_context, module_type, module_fields, command, env,
display_handle):
display_handle):
# We take several locks and other context managers in here. Using an
# AsyncExitStack saves us from indentation hell.
async with contextlib.AsyncExitStack() as stack:
@ -70,8 +69,8 @@ async def _plugin_job(plugin_context, module_type, module_fields, command, env,
# as a shell command, rather than exec.
shell_command_line = subprocess.list2cmdline([exe])
complete_env = _plugin_env(
plugin_context, definition, module_fields, command, stack)
complete_env = _plugin_env(plugin_context, definition, module_fields,
command, stack)
complete_env.update(env)
# Use a lock to protect the plugin cache. It would be unsafe for two
@ -80,7 +79,8 @@ async def _plugin_job(plugin_context, module_type, module_fields, command, env,
# fields" as defined by plugin.yaml. For plugins that don't define
# cacheable fields, there is no cache dir (it's set to /dev/null) and
# the cache lock is a no-op.
await stack.enter_async_context(_plugin_cache_lock(plugin_context, definition, module_fields))
await stack.enter_async_context(
_plugin_cache_lock(plugin_context, definition, module_fields))
# Use a semaphore to limit the number of jobs that can run in parallel.
# Most plugin fetches hit the network, and for performance reasons we
@ -98,11 +98,14 @@ async def _plugin_job(plugin_context, module_type, module_fields, command, env,
try:
await create_subprocess_with_handle(
shell_command_line, display_handle, cwd=plugin_context.cwd,
env=complete_env, shell=True)
shell_command_line,
display_handle,
cwd=plugin_context.cwd,
env=complete_env,
shell=True)
except subprocess.CalledProcessError as e:
raise PluginRuntimeError(
module_type, module_fields, e.returncode, e.output)
raise PluginRuntimeError(module_type, module_fields, e.returncode,
e.output)
def _get_plugin_exe(definition, command):
@ -114,10 +117,8 @@ def _get_plugin_exe(definition, command):
raise RuntimeError('Unrecognized command name: ' + repr(command))
if not exe:
raise PluginPermissionsError(
"Module type '{0}' does not support {1}.",
definition.type,
command)
raise PluginPermissionsError("Module type '{0}' does not support {1}.",
definition.type, command)
if not os.path.exists(exe):
raise PluginPermissionsError('Plugin exe is missing: ' + exe)
if not os.access(exe, os.X_OK):
@ -126,28 +127,31 @@ def _get_plugin_exe(definition, command):
def _format_module_fields(module_fields):
return {'PERU_MODULE_{}'.format(name.upper()): value for
name, value in module_fields.items()}
return {
'PERU_MODULE_{}'.format(name.upper()): value
for name, value in module_fields.items()
}
def _validate_plugin_definition(definition, module_fields):
field_names_not_strings = [name for name in definition.fields
if not isinstance(name, str)]
field_names_not_strings = [
name for name in definition.fields if not isinstance(name, str)
]
if field_names_not_strings:
raise PluginModuleFieldError(
'Metadata field names must be strings: ' +
', '.join(repr(name) for name in field_names_not_strings))
raise PluginModuleFieldError('Metadata field names must be strings: ' +
', '.join(
repr(name)
for name in field_names_not_strings))
missing_module_fields = definition.required_fields - module_fields.keys()
if missing_module_fields:
raise PluginModuleFieldError(
'Required module field missing: ' +
', '.join(missing_module_fields))
raise PluginModuleFieldError('Required module field missing: ' +
', '.join(missing_module_fields))
unknown_module_fields = module_fields.keys() - definition.fields
if unknown_module_fields:
raise PluginModuleFieldError(
'Unknown module fields: ' + ', '.join(unknown_module_fields))
raise PluginModuleFieldError('Unknown module fields: ' +
', '.join(unknown_module_fields))
def _plugin_env(plugin_context, plugin_definition, module_fields, command,
@ -200,8 +204,8 @@ def _plugin_cache_path(plugin_context, definition, module_fields):
# This plugin is not cacheable.
return os.devnull
key = _plugin_cache_key(definition, module_fields)
plugin_cache = os.path.join(
plugin_context.plugin_cache_root, definition.type, key)
plugin_cache = os.path.join(plugin_context.plugin_cache_root,
definition.type, key)
makedirs(plugin_cache)
return plugin_cache
@ -210,8 +214,10 @@ def _plugin_cache_key(definition, module_fields):
assert definition.cache_fields, "Can't compute key for uncacheable type."
return cache.compute_key({
'type': definition.type,
'cacheable_fields': {field: module_fields.get(field, None)
for field in definition.cache_fields},
'cacheable_fields': {
field: module_fields.get(field, None)
for field in definition.cache_fields
},
})
@ -226,8 +232,8 @@ def _get_plugin_definition(module_type, module_fields, command):
with open(metadata_path) as metafile:
metadoc = yaml.safe_load(metafile) or {}
sync_exe = os.path.join(root, metadoc.pop('sync exe'))
reup_exe = (None if 'reup exe' not in metadoc
else os.path.join(root, metadoc.pop('reup exe')))
reup_exe = (None if 'reup exe' not in metadoc else os.path.join(
root, metadoc.pop('reup exe')))
required_fields = frozenset(metadoc.pop('required fields'))
optional_fields = frozenset(metadoc.pop('optional fields', []))
cache_fields = frozenset(metadoc.pop('cache fields', []))
@ -238,17 +244,18 @@ def _get_plugin_definition(module_type, module_fields, command):
module_type, metadoc))
overlap = required_fields & optional_fields
if overlap:
raise RuntimeError('Fields in {} are both required and optional: {}'
.format(module_type, overlap))
raise RuntimeError(
'Fields in {} are both required and optional: {}'.format(
module_type, overlap))
invalid = cache_fields - fields
if invalid:
raise RuntimeError(
'"cache fields" must also be either required or optional: ' +
str(invalid))
definition = PluginDefinition(
module_type, sync_exe, reup_exe, fields, required_fields,
optional_fields, cache_fields)
definition = PluginDefinition(module_type, sync_exe, reup_exe, fields,
required_fields, optional_fields,
cache_fields)
_validate_plugin_definition(definition, module_fields)
return definition
@ -265,8 +272,7 @@ def _find_plugin_dir(module_type):
else:
raise PluginCandidateError(
'No plugin found for `{}` module in paths:\n{}'.format(
module_type,
'\n'.join(_get_plugin_install_dirs())))
module_type, '\n'.join(_get_plugin_install_dirs())))
def _get_plugin_install_dirs():

View File

@ -3,7 +3,6 @@
import distutils.dir_util
import os
distutils.dir_util.copy_tree(
os.environ['PERU_MODULE_PATH'],
os.environ['PERU_SYNC_DEST'],

View File

@ -40,7 +40,7 @@ def format_bytes(num_bytes):
# Truncate floats instead of rounding.
float_str = str(num_bytes / threshold)
decimal_index = float_str.index('.')
truncated_float = float_str[:decimal_index+2]
truncated_float = float_str[:decimal_index + 2]
return truncated_float + unit
return '{}B'.format(num_bytes)
@ -64,8 +64,9 @@ def download_file(request, output_file, stdout=sys.stdout):
if file_size:
percentage = ' {}%'.format(round(100 * bytes_read / file_size))
total_kb = '/' + format_bytes(file_size)
print('downloaded{} {}{}'.format(percentage, kb_downloaded, total_kb),
file=stdout)
print(
'downloaded{} {}{}'.format(percentage, kb_downloaded, total_kb),
file=stdout)
return digest.hexdigest()
@ -88,8 +89,10 @@ def plugin_sync(url, sha1):
digest = download_file(request, output_file)
if sha1 and digest != sha1:
print('Bad checksum!\n url: {}\nexpected: {}\n actual: {}'
.format(url, sha1, digest), file=sys.stderr)
print(
'Bad checksum!\n url: {}\nexpected: {}\n actual: {}'.format(
url, sha1, digest),
file=sys.stderr)
sys.exit(1)
try:

View File

@ -17,7 +17,6 @@ REUP = os.environ['PERU_MODULE_REUP'] or 'master'
# to separate things out by repo url.
CACHE_ROOT = os.environ['PERU_PLUGIN_CACHE']
Result = namedtuple("Result", ["returncode", "output"])
@ -79,8 +78,8 @@ def already_has_rev(repo, rev):
if cat_result.returncode != 0:
return False
# Get the hash for the rev.
parse_result = git('rev-parse', rev, git_dir=repo, checked=False,
capture_output=True)
parse_result = git(
'rev-parse', rev, git_dir=repo, checked=False, capture_output=True)
if parse_result.returncode != 0:
return False
# Only return True for revs that are absolute hashes.
@ -116,8 +115,12 @@ def checkout_submodules(repo_path, rev, work_tree):
sub_relative_path = parser[section]['path']
sub_full_path = os.path.join(work_tree, sub_relative_path)
sub_url = parser[section]['url']
ls_tree = git('ls-tree', rev, sub_relative_path,
git_dir=repo_path, capture_output=True).output
ls_tree = git(
'ls-tree',
rev,
sub_relative_path,
git_dir=repo_path,
capture_output=True).output
# Normally when you run `git submodule add ...`, git puts two things in
# your repo: an entry in .gitmodules, and a commit object at the
# appropriate path inside your repo. However, it's possible for those
@ -140,8 +143,8 @@ def plugin_reup():
reup_output = os.environ['PERU_REUP_OUTPUT']
repo_path = clone_if_needed(URL)
git_fetch(URL, repo_path)
output = git('rev-parse', REUP, git_dir=repo_path,
capture_output=True).output
output = git(
'rev-parse', REUP, git_dir=repo_path, capture_output=True).output
with open(reup_output, 'w') as out_file:
print('rev:', output.strip(), file=out_file)

View File

@ -11,7 +11,6 @@ URL = os.environ['PERU_MODULE_URL']
REV = os.environ['PERU_MODULE_REV'] or 'default'
REUP = os.environ['PERU_MODULE_REUP'] or 'default'
Result = namedtuple("Result", ["returncode", "output"])
@ -27,8 +26,11 @@ def hg(*args, hg_dir=None, capture_output=False, checked=True):
stdout = subprocess.PIPE if capture_output else None
# Always let stderr print to the caller.
process = subprocess.Popen(command, stdin=subprocess.DEVNULL,
stdout=stdout, universal_newlines=True)
process = subprocess.Popen(
command,
stdin=subprocess.DEVNULL,
stdout=stdout,
universal_newlines=True)
output, _ = process.communicate()
if checked and process.returncode != 0:
sys.exit(1)
@ -48,7 +50,8 @@ def configure(repo_path):
# Set configs needed for cached repos.
hgrc_path = os.path.join(repo_path, '.hg', 'hgrc')
with open(hgrc_path, 'a') as f:
f.write(textwrap.dedent('''\
f.write(
textwrap.dedent('''\
[ui]
# prevent 'hg archive' from creating '.hg_archival.txt' files.
archivemeta = false
@ -61,8 +64,14 @@ def hg_pull(url, repo_path):
def already_has_rev(repo, rev):
res = hg('identify', '--debug', '--rev', rev, hg_dir=repo,
capture_output=True, checked=False)
res = hg(
'identify',
'--debug',
'--rev',
rev,
hg_dir=repo,
capture_output=True,
checked=False)
if res.returncode != 0:
return False
@ -88,8 +97,13 @@ def plugin_reup():
clone_if_needed(URL, CACHE_PATH)
hg_pull(URL, CACHE_PATH)
output = hg('identify', '--debug', '--rev', REUP, hg_dir=CACHE_PATH,
capture_output=True).output
output = hg(
'identify',
'--debug',
'--rev',
REUP,
hg_dir=CACHE_PATH,
capture_output=True).output
with open(reup_output, 'w') as output_file:
print('rev:', output.split()[0], file=output_file)

View File

@ -15,7 +15,10 @@ def svn(*args, svn_dir=None, capture_output=False):
stdout = subprocess.PIPE if capture_output else None
# Always let stderr print to the caller.
process = subprocess.Popen(
command, stdin=subprocess.DEVNULL, stdout=stdout, cwd=svn_dir,
command,
stdin=subprocess.DEVNULL,
stdout=stdout,
cwd=svn_dir,
universal_newlines=True)
output, _ = process.communicate()
if process.returncode != 0:
@ -38,13 +41,8 @@ def remote_head_rev(url):
def plugin_sync():
# Just fetch the target revision and strip the metadata.
# Plugin-level caching for Subversion is futile.
svn(
'export',
'--force',
'--revision',
os.environ['PERU_MODULE_REV'] or 'HEAD',
os.environ['PERU_MODULE_URL'],
os.environ['PERU_SYNC_DEST'])
svn('export', '--force', '--revision', os.environ['PERU_MODULE_REV']
or 'HEAD', os.environ['PERU_MODULE_URL'], os.environ['PERU_SYNC_DEST'])
def plugin_reup():

View File

@ -43,16 +43,14 @@ class Rule:
if self.move:
tree = await move_files(runtime.cache, tree, self.move)
if self.drop:
tree = await drop_files(
runtime.cache, tree, self.drop)
tree = await drop_files(runtime.cache, tree, self.drop)
if self.pick:
tree = await pick_files(runtime.cache, tree, self.pick)
if self.executable:
tree = await make_files_executable(
runtime.cache, tree, self.executable)
tree = await make_files_executable(runtime.cache, tree,
self.executable)
if self.export:
tree = await get_export_tree(
runtime.cache, tree, self.export)
tree = await get_export_tree(runtime.cache, tree, self.export)
runtime.cache.keyval[key] = tree
@ -77,15 +75,15 @@ async def _copy_files_modifications(_cache, tree, paths_multimap):
dest_is_dir = (dest_info.type == cache.TREE_TYPE)
adjusted_dest = dest
if dest_is_dir:
adjusted_dest = str(PurePosixPath(dest) /
PurePosixPath(source).name)
adjusted_dest = str(
PurePosixPath(dest) / PurePosixPath(source).name)
modifications[adjusted_dest] = source_info
return modifications
async def copy_files(_cache, tree, paths_multimap):
modifications = await _copy_files_modifications(
_cache, tree, paths_multimap)
modifications = await _copy_files_modifications(_cache, tree,
paths_multimap)
tree = await _cache.modify_tree(tree, modifications)
return tree
@ -94,8 +92,8 @@ async def move_files(_cache, tree, paths_multimap):
# First obtain the copies from the original tree. Moves are not ordered but
# happen all at once, so if you move a->b and b->c, the contents of c will
# always end up being b rather than a.
modifications = await _copy_files_modifications(
_cache, tree, paths_multimap)
modifications = await _copy_files_modifications(_cache, tree,
paths_multimap)
# Now add in deletions, but be careful not to delete a file that just got
# moved. Note that if "a" gets moved into "dir", it will end up at "dir/a",
# even if "dir" is deleted (because modify_tree always modifies parents
@ -156,12 +154,12 @@ async def make_files_executable(_cache, tree, globs_list):
async def get_export_tree(_cache, tree, export_path):
entries = await _cache.ls_tree(tree, export_path)
if not entries:
raise NoMatchingFilesError('Export path "{}" doesn\'t exist.'
.format(export_path))
raise NoMatchingFilesError(
'Export path "{}" doesn\'t exist.'.format(export_path))
entry = list(entries.values())[0]
if entry.type != cache.TREE_TYPE:
raise NoMatchingFilesError('Export path "{}" is not a directory.'
.format(export_path))
raise NoMatchingFilesError(
'Export path "{}" is not a directory.'.format(export_path))
return entry.hash

View File

@ -30,8 +30,8 @@ class _Runtime:
self._tmp_root = os.path.join(self.state_dir, 'tmp')
compat.makedirs(self._tmp_root)
self.overrides = KeyVal(os.path.join(self.state_dir, 'overrides'),
self._tmp_root)
self.overrides = KeyVal(
os.path.join(self.state_dir, 'overrides'), self._tmp_root)
self._used_overrides = set()
self.force = args.get('--force', False)
@ -71,17 +71,16 @@ class _Runtime:
self.peru_file = explicit_peru_file
self.sync_dir = explicit_sync_dir
elif explicit_peru_file or explicit_sync_dir:
raise CommandLineError(
'If the --file or --sync-dir is set, '
'the other must also be set.')
raise CommandLineError('If the --file or --sync-dir is set, '
'the other must also be set.')
else:
basename = explicit_basename or parser.DEFAULT_PERU_FILE_NAME
self.peru_file = find_project_file(os.getcwd(), basename)
self.sync_dir = os.path.dirname(self.peru_file)
self.state_dir = (args['--state-dir'] or
os.path.join(self.sync_dir, '.peru'))
self.cache_dir = (args['--cache-dir'] or env.get('PERU_CACHE_DIR') or
os.path.join(self.state_dir, 'cache'))
self.state_dir = (args['--state-dir']
or os.path.join(self.sync_dir, '.peru'))
self.cache_dir = (args['--cache-dir'] or env.get('PERU_CACHE_DIR')
or os.path.join(self.state_dir, 'cache'))
def tmp_dir(self):
dir = tempfile.TemporaryDirectory(dir=self._tmp_root)
@ -134,8 +133,8 @@ class _Runtime:
return
self.display.print('syncing with overrides:')
for name in names:
self.display.print(' {}: {}'.format(
name, self.get_override(name)))
self.display.print(' {}: {}'.format(name,
self.get_override(name)))
def warn_unused_overrides(self):
if self.quiet or self.no_overrides:

View File

@ -1,6 +1,5 @@
from .error import PrintableError
SCOPE_SEPARATOR = '.'
RULE_SEPARATOR = '|'
@ -27,18 +26,21 @@ class Scope:
rules.append(rule)
return module, tuple(rules)
async def resolve_module(self, runtime, module_str, logging_target_name=None):
async def resolve_module(self,
runtime,
module_str,
logging_target_name=None):
logging_target_name = logging_target_name or module_str
module_names = module_str.split(SCOPE_SEPARATOR)
return (await self._resolve_module_from_names(
runtime, module_names, logging_target_name))
return (await self._resolve_module_from_names(runtime, module_names,
logging_target_name))
async def _resolve_module_from_names(self, runtime, module_names,
logging_target_name):
logging_target_name):
next_module = self._get_module_checked(module_names[0])
for name in module_names[1:]:
next_scope = await _get_scope_or_fail(
runtime, logging_target_name, next_module)
next_scope = await _get_scope_or_fail(runtime, logging_target_name,
next_module)
if name not in next_scope.modules:
_error(logging_target_name, 'module {} not found in {}', name,
next_module.name)
@ -53,8 +55,8 @@ class Scope:
if module_names:
module = await self._resolve_module_from_names(
runtime, module_names, logging_target_name)
scope = await _get_scope_or_fail(
runtime, logging_target_name, module)
scope = await _get_scope_or_fail(runtime, logging_target_name,
module)
location_str = ' in module ' + module.name
if rule_name not in scope.rules:
_error(logging_target_name, 'rule {} not found{}', rule_name,
@ -65,8 +67,8 @@ class Scope:
for name in names:
if SCOPE_SEPARATOR in name:
raise PrintableError(
'Can\'t reup module "{}"; it belongs to another project.'
.format(name))
'Can\'t reup module "{}"; it belongs to another project.'.
format(name))
return [self._get_module_checked(name) for name in names]
def _get_module_checked(self, name):

View File

@ -25,9 +25,10 @@ def get_all_resources_filepaths():
resources_paths = ['VERSION']
resources_dir = os.path.join(module_root, 'resources')
for dirpath, dirnames, filenames in os.walk(resources_dir):
relpaths = [os.path.relpath(os.path.join(dirpath, f),
start=module_root)
for f in filenames]
relpaths = [
os.path.relpath(os.path.join(dirpath, f), start=module_root)
for f in filenames
]
resources_paths.extend(relpaths)
return resources_paths
@ -45,14 +46,12 @@ setuptools.setup(
version=get_version(),
url='https://github.com/buildinspace/peru',
author="Jack O'Connor <oconnor663@gmail.com>, "
"Sean Olson <olson.sean.k@gmail.com>",
"Sean Olson <olson.sean.k@gmail.com>",
license='MIT',
packages=['peru'],
package_data={'peru': get_all_resources_filepaths()},
entry_points={
'console_scripts': [
'peru=peru.main:main',
]
},
entry_points={'console_scripts': [
'peru=peru.main:main',
]},
install_requires=get_install_requires(),
)

15
test.py
View File

@ -9,10 +9,10 @@ TESTS_DIR = os.path.join(REPO_ROOT, 'tests')
def get_untracked_files():
output = subprocess.check_output(
['git', 'ls-files', '--other', '--directory', '--exclude-standard',
'-z'],
cwd=REPO_ROOT)
output = subprocess.check_output([
'git', 'ls-files', '--other', '--directory', '--exclude-standard', '-z'
],
cwd=REPO_ROOT)
return set(f for f in output.split(b'\0') if f)
@ -50,9 +50,10 @@ def main():
new_untracked = get_untracked_files()
if old_untracked != new_untracked:
print('Tests created untracked files:\n' +
'\n'.join(f.decode() for f in new_untracked - old_untracked),
file=sys.stderr)
print(
'Tests created untracked files:\n' + '\n'.join(
f.decode() for f in new_untracked - old_untracked),
file=sys.stderr)
sys.exit(1)
# Run the linter.

View File

@ -15,10 +15,8 @@ from peru.async_helpers import run_task
from peru.compat import makedirs
import peru.main
test_resources = Path(__file__).parent.resolve() / 'resources'
# Colons are a reserved character on Windows, so tests that cover filenames
# with colons need to do something else.
COLON = ':'
@ -31,9 +29,11 @@ def make_synchronous(f):
synchronous code, so for example test methods can be coroutines. It does
NOT let you call coroutines as regular functions *inside* another
coroutine. That will raise an "Event loop is running" error.'''
@functools.wraps(f)
def wrapper(*args, **kwargs):
return run_task(f(*args, **kwargs))
return wrapper
@ -96,16 +96,24 @@ def read_dir(startdir, *, excludes=(), binary=False):
def _format_contents(contents):
return ['{}: {}\n'.format(file, repr(contents[file]))
for file in sorted(contents.keys())]
return [
'{}: {}\n'.format(file, repr(contents[file]))
for file in sorted(contents.keys())
]
def assert_contents(dir, expected_contents, *, message='', excludes=(),
def assert_contents(dir,
expected_contents,
*,
message='',
excludes=(),
binary=False):
dir = Path(dir)
expected_contents = {Path(key): val for key, val
in expected_contents.items()}
actual_contents = read_dir(dir, excludes=excludes, binary=binary)
expected_contents = {
Path(key): val
for key, val in expected_contents.items()
}
actual_contents = read_dir(dir, excludes=excludes, binary=binary)
if expected_contents == actual_contents:
return
# Make sure we didn't exclude files we were checking for.
@ -113,13 +121,15 @@ def assert_contents(dir, expected_contents, *, message='', excludes=(),
excluded_files = full_contents.keys() - actual_contents.keys()
excluded_missing = expected_contents.keys() & excluded_files
if excluded_missing:
raise AssertionError('EXPECTED FILES WERE EXCLUDED FROM THE TEST: {}'
.format(excluded_missing))
raise AssertionError('EXPECTED FILES WERE EXCLUDED FROM THE TEST: {}'.
format(excluded_missing))
# Make a diff against expected and throw.
assertion_msg = "Contents didn't match:\n" + ''.join(
difflib.unified_diff(_format_contents(expected_contents),
_format_contents(actual_contents),
fromfile='expected', tofile='actual')).strip()
difflib.unified_diff(
_format_contents(expected_contents),
_format_contents(actual_contents),
fromfile='expected',
tofile='actual')).strip()
if message:
assertion_msg += '\n' + message
raise AssertionError(assertion_msg)
@ -193,7 +203,8 @@ class HgRepo(Repo):
self.run('hg', 'init')
hgrc_path = os.path.join(content_dir, '.hg', 'hgrc')
with open(hgrc_path, 'a') as f:
f.write(textwrap.dedent('''\
f.write(
textwrap.dedent('''\
[ui]
username = peru <peru>
'''))
@ -208,8 +219,8 @@ class SvnRepo(Repo):
self.url = Path(repo_dir).as_uri()
self.run('svnadmin', 'create', '.')
self.run('svn', 'import', content_dir, self.url,
'-m', 'initial commit')
self.run('svn', 'import', content_dir, self.url, '-m',
'initial commit')
def _check_executable(path, expectation):
@ -217,9 +228,8 @@ def _check_executable(path, expectation):
# Windows doesn't support the executable flag. Skip the check.
return
mode = Path(path).stat().st_mode
is_executable = (mode & stat.S_IXUSR != 0 and
mode & stat.S_IXGRP != 0 and
mode & stat.S_IXOTH != 0)
is_executable = (mode & stat.S_IXUSR != 0 and mode & stat.S_IXGRP != 0
and mode & stat.S_IXOTH != 0)
message = 'Expected {} to be {}executable.'.format(
path, 'not ' if not expectation else '')
assert is_executable == expectation, message
@ -242,10 +252,10 @@ class PeruTest(unittest.TestCase):
super().__init__(*args, **kwargs)
# Complain if it looks like an important test function is a generator.
for name in dir(self):
is_test = (name.startswith('test') or
name in ('setUp', 'tearDown'))
is_test = (name.startswith('test')
or name in ('setUp', 'tearDown'))
is_generator = inspect.isgeneratorfunction(getattr(self, name))
if is_test and is_generator:
raise TypeError("{}() is a generator, which makes it a silent "
"no-op!\nUse @make_synchronous or something."
.format(type(self).__name__ + '.' + name))
"no-op!\nUse @make_synchronous or something.".
format(type(self).__name__ + '.' + name))

View File

@ -7,12 +7,13 @@ from shared import PeruTest, make_synchronous
class AsyncTest(PeruTest):
@make_synchronous
async def test_safe_communicate(self):
# Test safe_communicate with both empty and non-empty input.
cat_command = [sys.executable, "-c",
"import sys; sys.stdout.write(sys.stdin.read())"]
cat_command = [
sys.executable, "-c",
"import sys; sys.stdout.write(sys.stdin.read())"
]
proc_empty = await asyncio.create_subprocess_exec(
*cat_command, stdin=PIPE, stdout=PIPE)
@ -21,8 +22,7 @@ class AsyncTest(PeruTest):
proc_nonempty = await asyncio.create_subprocess_exec(
*cat_command, stdin=PIPE, stdout=PIPE)
stdout, _ = await safe_communicate(
proc_nonempty, b"foo bar baz")
stdout, _ = await safe_communicate(proc_nonempty, b"foo bar baz")
self.assertEqual(stdout, b"foo bar baz")
# And test a case with None input as well.

View File

@ -34,8 +34,7 @@ class CacheTest(PeruTest):
await self.cache.export_tree(self.content_tree, export_dir)
assert_contents(export_dir, dirty_content)
# But it should suceed with the force flag.
await self.cache.export_tree(
self.content_tree, export_dir, force=True)
await self.cache.export_tree(self.content_tree, export_dir, force=True)
assert_contents(export_dir, self.content)
@make_synchronous
@ -50,7 +49,9 @@ class CacheTest(PeruTest):
self.content_tree, export_dir, previous_tree=self.content_tree)
# But it should succeed with the --force flag.
await self.cache.export_tree(
self.content_tree, export_dir, force=True,
self.content_tree,
export_dir,
force=True,
previous_tree=self.content_tree)
assert_contents(export_dir, self.content)
@ -75,14 +76,11 @@ class CacheTest(PeruTest):
async def test_import_with_files(self):
# Include a leading colon, to check that we escape pathspecs correctly
# with a leading ./
all_content = {'foo': '',
'bar': '',
COLON + 'baz/bing': ''}
all_content = {'foo': '', 'bar': '', COLON + 'baz/bing': ''}
test_dir = create_dir(all_content)
tree = await self.cache.import_tree(
test_dir, picks=['foo', COLON + 'baz'])
expected_content = {'foo': '',
COLON + 'baz/bing': ''}
expected_content = {'foo': '', COLON + 'baz/bing': ''}
out_dir = create_dir()
await self.cache.export_tree(tree, out_dir)
assert_contents(out_dir, expected_content)
@ -155,8 +153,10 @@ class CacheTest(PeruTest):
no_conflict_dirty_content['b/c'] += ' dirty'
no_conflict_dirty_dir = create_dir(no_conflict_dirty_content)
with self.assertRaises(peru.cache.DirtyWorkingCopyError):
await self.cache.export_tree(new_tree, no_conflict_dirty_dir,
previous_tree=self.content_tree)
await self.cache.export_tree(
new_tree,
no_conflict_dirty_dir,
previous_tree=self.content_tree)
@make_synchronous
async def test_missing_files_in_previous_tree(self):
@ -175,8 +175,8 @@ class CacheTest(PeruTest):
@make_synchronous
async def test_merge_trees(self):
merged_tree = await self.cache.merge_trees(
self.content_tree, self.content_tree, 'subdir')
merged_tree = await self.cache.merge_trees(self.content_tree,
self.content_tree, 'subdir')
expected_content = dict(self.content)
for path, content in self.content.items():
expected_content[os.path.join('subdir', path)] = content
@ -186,8 +186,8 @@ class CacheTest(PeruTest):
with self.assertRaises(peru.cache.MergeConflictError):
# subdir/ is already populated, so this merge should throw.
await self.cache.merge_trees(
merged_tree, self.content_tree, 'subdir')
await self.cache.merge_trees(merged_tree, self.content_tree,
'subdir')
@make_synchronous
async def test_merge_with_deep_prefix(self):
@ -206,8 +206,7 @@ class CacheTest(PeruTest):
async def test_read_file(self):
# Include a leading colon, to check that we escape pathspecs correctly
# with a leading ./
all_content = {'a': 'foo',
COLON + 'b/c': 'bar'}
all_content = {'a': 'foo', COLON + 'b/c': 'bar'}
test_dir = create_dir(all_content)
tree = await self.cache.import_tree(test_dir)
a_content = await self.cache.read_file(tree, 'a')
@ -235,12 +234,18 @@ class CacheTest(PeruTest):
@make_synchronous
async def test_import_with_specific_dir(self):
await self.do_excludes_and_files_test(
excludes=[], picks=['b'], expected={'b/c': 'bar', 'b/d': 'baz'})
excludes=[], picks=['b'], expected={
'b/c': 'bar',
'b/d': 'baz'
})
@make_synchronous
async def test_import_with_excluded_file(self):
await self.do_excludes_and_files_test(
excludes=['a'], picks=[], expected={'b/c': 'bar', 'b/d': 'baz'})
excludes=['a'], picks=[], expected={
'b/c': 'bar',
'b/d': 'baz'
})
@make_synchronous
async def test_import_with_excluded_dir(self):
@ -256,41 +261,42 @@ class CacheTest(PeruTest):
async def test_ls_tree(self):
# Use the recursive case to get valid entries for each file. We could
# hardcode these, but it would be messy and annoying to maintain.
entries = await self.cache.ls_tree(
self.content_tree, recursive=True)
entries = await self.cache.ls_tree(self.content_tree, recursive=True)
assert entries.keys() == {'a', 'b', 'b/c', 'b/d'}
assert (entries['a'].type == entries['b/c'].type ==
entries['b/d'].type == peru.cache.BLOB_TYPE)
assert (entries['a'].type == entries['b/c'].type == entries['b/d'].type
== peru.cache.BLOB_TYPE)
assert entries['b'].type == peru.cache.TREE_TYPE
# Check the non-recursive, non-path case.
self.assertDictEqual(
{'a': entries['a'], 'b': entries['b']},
(await self.cache.ls_tree(self.content_tree)))
self.assertDictEqual({
'a': entries['a'],
'b': entries['b']
}, (await self.cache.ls_tree(self.content_tree)))
# Check the single file case, and make sure paths are normalized.
self.assertDictEqual(
{'b/c': entries['b/c']},
(await self.cache.ls_tree(self.content_tree, 'b/c//./')))
self.assertDictEqual({
'b/c': entries['b/c']
}, (await self.cache.ls_tree(self.content_tree, 'b/c//./')))
# Check the single dir case. (Trailing slash shouldn't matter, because
# we nomalize it, but git will do the wrong thing if we forget
# normalization.)
self.assertDictEqual(
{'b': entries['b']},
(await self.cache.ls_tree(self.content_tree, 'b/')))
self.assertDictEqual({
'b': entries['b']
}, (await self.cache.ls_tree(self.content_tree, 'b/')))
# Check the recursive dir case.
self.assertDictEqual(
{'b': entries['b'], 'b/c': entries['b/c'], 'b/d': entries['b/d']},
(await self.cache.ls_tree(
self.content_tree, 'b', recursive=True)))
self.assertDictEqual({
'b': entries['b'],
'b/c': entries['b/c'],
'b/d': entries['b/d']
}, (await self.cache.ls_tree(self.content_tree, 'b', recursive=True)))
# Make sure that we don't skip over a target file in recursive mode.
self.assertDictEqual(
{'b/c': entries['b/c']},
(await self.cache.ls_tree(
self.content_tree, 'b/c', recursive=True)))
self.assertDictEqual({
'b/c': entries['b/c']
}, (await self.cache.ls_tree(self.content_tree, 'b/c',
recursive=True)))
@make_synchronous
async def test_modify_tree(self):
@ -300,32 +306,50 @@ class CacheTest(PeruTest):
cases = []
# Test regular deletions.
cases.append(({'a': None},
{'b/c': 'bar'}))
cases.append(({'a//./': None}, # Paths should get normalized.
{'b/c': 'bar'}))
cases.append(({'b': None},
{'a': 'foo'}))
cases.append(({'b/c': None},
{'a': 'foo'}))
cases.append(({'x/y/z': None},
{'a': 'foo', 'b/c': 'bar'}))
cases.append(({'b/x': None},
{'a': 'foo', 'b/c': 'bar'}))
cases.append(({'a': None}, {'b/c': 'bar'}))
cases.append((
{
'a//./': None
}, # Paths should get normalized.
{
'b/c': 'bar'
}))
cases.append(({'b': None}, {'a': 'foo'}))
cases.append(({'b/c': None}, {'a': 'foo'}))
cases.append(({'x/y/z': None}, {'a': 'foo', 'b/c': 'bar'}))
cases.append(({'b/x': None}, {'a': 'foo', 'b/c': 'bar'}))
# Test the case where we try to delete below a file.
cases.append(({'a/x': None},
{'a': 'foo', 'b/c': 'bar'}))
cases.append(({'a/x': None}, {'a': 'foo', 'b/c': 'bar'}))
# Test insertions.
cases.append(({'b': entries['a']},
{'a': 'foo', 'b': 'foo'}))
cases.append(({'x': entries['a']},
{'a': 'foo', 'x': 'foo', 'b/c': 'bar'}))
cases.append(({'x': entries['b']},
{'a': 'foo', 'b/c': 'bar', 'x/c': 'bar'}))
cases.append(({'d/e/f': entries['a']},
{'a': 'foo', 'b/c': 'bar', 'd/e/f': 'foo'}))
cases.append(({'d/e/f': entries['b']},
{'a': 'foo', 'b/c': 'bar', 'd/e/f/c': 'bar'}))
cases.append(({'b': entries['a']}, {'a': 'foo', 'b': 'foo'}))
cases.append(({
'x': entries['a']
}, {
'a': 'foo',
'x': 'foo',
'b/c': 'bar'
}))
cases.append(({
'x': entries['b']
}, {
'a': 'foo',
'b/c': 'bar',
'x/c': 'bar'
}))
cases.append(({
'd/e/f': entries['a']
}, {
'a': 'foo',
'b/c': 'bar',
'd/e/f': 'foo'
}))
cases.append(({
'd/e/f': entries['b']
}, {
'a': 'foo',
'b/c': 'bar',
'd/e/f/c': 'bar'
}))
for modifications, result in cases:
modified_tree = await self.cache.modify_tree(
@ -386,12 +410,16 @@ class CacheTest(PeruTest):
index_dir = create_dir()
index_file = os.path.join(index_dir, 'test_index_file')
await self.cache.export_tree(
self.content_tree, export_dir, previous_tree=self.content_tree,
self.content_tree,
export_dir,
previous_tree=self.content_tree,
previous_index_file=index_file)
# Finally, touch a again and rerun the export using the cached index.
bump_mtime_one_minute()
await self.cache.export_tree(
self.content_tree, export_dir, previous_tree=self.content_tree,
self.content_tree,
export_dir,
previous_tree=self.content_tree,
previous_index_file=index_file)
@make_synchronous

View File

@ -5,7 +5,6 @@ import shared
class CompatTest(shared.PeruTest):
def test_makedirs(self):
tmp_dir = shared.tmp_dir()
foo_dir = os.path.join(tmp_dir, "foo")

View File

@ -7,8 +7,9 @@ import peru
import shared
curl_plugin_path = abspath(
join(dirname(peru.__file__), 'resources', 'plugins', 'curl',
'curl_plugin.py'))
join(
dirname(peru.__file__), 'resources', 'plugins', 'curl',
'curl_plugin.py'))
loader = importlib.machinery.SourceFileLoader("curl_plugin", curl_plugin_path)
curl_plugin = loader.load_module()
@ -41,29 +42,22 @@ class CurlPluginTest(shared.PeruTest):
self.assertEqual('index.html',
curl_plugin.get_request_filename(request))
request.url = 'http://www.example.com/foo'
self.assertEqual('foo',
curl_plugin.get_request_filename(request))
request._info = {'Content-Disposition':
'attachment; filename=bar'}
self.assertEqual('bar',
curl_plugin.get_request_filename(request))
self.assertEqual('foo', curl_plugin.get_request_filename(request))
request._info = {'Content-Disposition': 'attachment; filename=bar'}
self.assertEqual('bar', curl_plugin.get_request_filename(request))
# Check quoted filenames.
request._info = {'Content-Disposition':
'attachment; filename="bar"'}
self.assertEqual('bar',
curl_plugin.get_request_filename(request))
request._info = {'Content-Disposition': 'attachment; filename="bar"'}
self.assertEqual('bar', curl_plugin.get_request_filename(request))
# Check backslashed quotes in filenames.
request._info = {'Content-Disposition':
'attachment; filename="bar\\""'}
self.assertEqual('bar"',
curl_plugin.get_request_filename(request))
request._info = {
'Content-Disposition': 'attachment; filename="bar\\""'
}
self.assertEqual('bar"', curl_plugin.get_request_filename(request))
def test_download_file_with_length(self):
content = b'xy' * 4096
request = MockRequest(
'some url',
{'Content-Length': len(content)},
content)
request = MockRequest('some url', {'Content-Length': len(content)},
content)
stdout = io.StringIO()
output_file = io.BytesIO()
sha1 = curl_plugin.download_file(request, output_file, stdout)
@ -79,9 +73,7 @@ class CurlPluginTest(shared.PeruTest):
stdout = io.StringIO()
output_file = io.BytesIO()
sha1 = curl_plugin.download_file(request, output_file, stdout)
self.assertEqual(
'downloaded 3B\n',
stdout.getvalue())
self.assertEqual('downloaded 3B\n', stdout.getvalue())
self.assertEqual(content, output_file.getvalue())
self.assertEqual(hashlib.sha1(content).hexdigest(), sha1)

View File

@ -109,6 +109,7 @@ class DisplayTest(shared.PeruTest):
class FakeTerminal:
'''Emulates a terminal by keeping track of a list of lines. Knows how to
interpret the ANSI escape sequences that are used by FancyDisplay.'''
def __init__(self):
self.lines = [io.StringIO()]
self.cursor_line = 0
@ -117,11 +118,10 @@ class FakeTerminal:
self.flushed = False
def write(self, string):
tokens = [display.ANSI_DISABLE_LINE_WRAP,
display.ANSI_ENABLE_LINE_WRAP,
display.ANSI_CLEAR_LINE,
display.ANSI_CURSOR_UP_ONE_LINE,
'\n']
tokens = [
display.ANSI_DISABLE_LINE_WRAP, display.ANSI_ENABLE_LINE_WRAP,
display.ANSI_CLEAR_LINE, display.ANSI_CURSOR_UP_ONE_LINE, '\n'
]
# The parens make this a capturing expression, so the tokens will be
# included in re.split()'s return list.
token_expr = '(' + '|'.join(re.escape(token) for token in tokens) + ')'

View File

@ -5,7 +5,6 @@ import yaml
from peru import edit_yaml
import shared
yaml_template = dedent("""\
a:
b: [1, 2, 3]
@ -15,7 +14,6 @@ yaml_template = dedent("""\
class EditYamlTest(shared.PeruTest):
def test_replace(self):
start_yaml = yaml_template.format("foo")
new_yaml = edit_yaml.set_module_field(start_yaml, "a", "c", "bar")
@ -40,11 +38,13 @@ class EditYamlTest(shared.PeruTest):
new_yaml = edit_yaml.set_module_field(intermediate, 'a', 'd', '.0')
expected_yaml = start_yaml + ' c: "5"\n d: ".0"\n'
self.assertEqual(expected_yaml, new_yaml)
self.assertDictEqual(yaml.safe_load(new_yaml), {'a': {
'b': 'foo',
'c': '5',
'd': '.0',
}})
self.assertDictEqual(
yaml.safe_load(new_yaml),
{'a': {
'b': 'foo',
'c': '5',
'd': '.0',
}})
def test_insert_with_last_field_as_dict(self):
start_yaml = dedent("""\

View File

@ -23,53 +23,49 @@ class GlobTest(shared.PeruTest):
]
for input, output in cases:
self.assertEqual(
output,
glob.split_on_stars_interpreting_backslashes(input),
output, glob.split_on_stars_interpreting_backslashes(input),
'Failed split for input {}'.format(input))
def test_glob_to_path_regex(self):
Case = collections.namedtuple('Case', ['glob', 'matches', 'excludes'])
cases = [
Case(glob='a/b/c',
matches=['a/b/c'],
excludes=['a/b', 'a/b/c/', '/a/b/c', 'a/b/c/d']),
Case(
glob='a/b/c',
matches=['a/b/c'],
excludes=['a/b', 'a/b/c/', '/a/b/c', 'a/b/c/d']),
# * should be able to match nothing.
Case(glob='a/*b/c',
matches=['a/b/c', 'a/xb/c'],
excludes=['a/x/c', 'a/c', 'a//c']),
Case(
glob='a/*b/c',
matches=['a/b/c', 'a/xb/c'],
excludes=['a/x/c', 'a/c', 'a//c']),
# But * by itself should never match an empty path component.
Case(glob='a/*/c',
matches=['a/b/c', 'a/boooo/c', 'a/*/c'],
excludes=['a/c', 'a/b/d/c', 'a//c']),
Case(
glob='a/*/c',
matches=['a/b/c', 'a/boooo/c', 'a/*/c'],
excludes=['a/c', 'a/b/d/c', 'a//c']),
# Similarly, ** does not match empty path components. It's tempting
# to allow this, but we never want '**/c' to match '/c'.
Case(glob='a/**/c',
matches=['a/b/c', 'a/d/e/f/g/c', 'a/c'],
excludes=['a/b/c/d', 'x/a/c', 'a//c']),
Case(glob='a/**/**/c',
matches=['a/b/c', 'a/d/e/f/g/c', 'a/c'],
excludes=['a/b/c/d', 'x/a/c', 'a//c']),
Case(glob='**/c',
matches=['a/b/c', 'c'],
excludes=['/c', 'c/d']),
Case(glob='**/*/c',
matches=['a/b/c', 'a/c'],
excludes=['c', '/c']),
Case(
glob='a/**/c',
matches=['a/b/c', 'a/d/e/f/g/c', 'a/c'],
excludes=['a/b/c/d', 'x/a/c', 'a//c']),
Case(
glob='a/**/**/c',
matches=['a/b/c', 'a/d/e/f/g/c', 'a/c'],
excludes=['a/b/c/d', 'x/a/c', 'a//c']),
Case(glob='**/c', matches=['a/b/c', 'c'], excludes=['/c', 'c/d']),
Case(
glob='**/*/c', matches=['a/b/c', 'a/c'], excludes=['c', '/c']),
# Leading slashes should be preserved if present.
Case(glob='/a',
matches=['/a'],
excludes=['a']),
Case(glob='/**/c',
matches=['/a/b/c', '/c'],
excludes=['c', 'a/b/c']),
Case(glob='/a', matches=['/a'], excludes=['a']),
Case(
glob='/**/c',
matches=['/a/b/c', '/c'],
excludes=['c', 'a/b/c']),
# Make sure special characters are escaped properly.
Case(glob='a|b',
matches=['a|b'],
excludes=['a', 'b']),
Case(glob='a|b', matches=['a|b'], excludes=['a', 'b']),
# Test escaped * characters.
Case(glob='a\\*',
matches=['a*'],
excludes=['a', 'aa']),
Case(glob='a\\*', matches=['a*'], excludes=['a', 'aa']),
]
for case in cases:
regex = glob.glob_to_path_regex(case.glob)

View File

@ -4,7 +4,6 @@ from peru.keyval import KeyVal
class KeyValTest(shared.PeruTest):
def test_keyval(self):
root = shared.create_dir()
tmp_dir = shared.create_dir()

View File

@ -5,7 +5,6 @@ from shared import create_dir, assert_contents, PeruTest, make_synchronous
class MergeTest(PeruTest):
@make_synchronous
async def setUp(self):
self.cache_dir = create_dir()
@ -18,11 +17,11 @@ class MergeTest(PeruTest):
@make_synchronous
async def test_merge_from_map(self):
imports = {'foo': ('path1',), 'bar': ('path2',)}
imports = {'foo': ('path1', ), 'bar': ('path2', )}
target_trees = {'foo': self.content_tree, 'bar': self.content_tree}
merged_tree = await merge_imports_tree(
self.cache, imports, target_trees)
merged_tree = await merge_imports_tree(self.cache, imports,
target_trees)
merged_dir = create_dir()
await self.cache.export_tree(merged_tree, merged_dir)
@ -39,8 +38,8 @@ class MergeTest(PeruTest):
imports = {'foo': ('path1', 'path2')}
target_trees = {'foo': self.content_tree}
merged_tree = await merge_imports_tree(
self.cache, imports, target_trees)
merged_tree = await merge_imports_tree(self.cache, imports,
target_trees)
merged_dir = create_dir()
await self.cache.export_tree(merged_tree, merged_dir)

View File

@ -14,7 +14,6 @@ def assert_parallel(n):
class ParallelismTest(shared.PeruTest):
def setUp(self):
# Make sure nothing is fishy with the jobs counter, and reset the max.
plugin.debug_assert_clean_parallel_count()

View File

@ -8,7 +8,6 @@ import shared
class ParserTest(shared.PeruTest):
def test_parse_empty_file(self):
scope, imports = parse_string('')
self.assertDictEqual(scope.modules, {})
@ -39,9 +38,10 @@ class ParserTest(shared.PeruTest):
self.assertIsInstance(module, Module)
self.assertEqual(module.name, "foo")
self.assertEqual(module.type, "sometype")
self.assertDictEqual(module.plugin_fields,
{"url": "http://www.example.com/",
"rev": "abcdefg"})
self.assertDictEqual(module.plugin_fields, {
"url": "http://www.example.com/",
"rev": "abcdefg"
})
def test_parse_module_default_rule(self):
input = dedent("""\
@ -63,7 +63,7 @@ class ParserTest(shared.PeruTest):
scope, imports = parse_string(input)
self.assertDictEqual(scope.modules, {})
self.assertDictEqual(scope.rules, {})
self.assertEqual(imports, {'foo': ('bar/',)})
self.assertEqual(imports, {'foo': ('bar/', )})
def test_parse_multimap_imports(self):
input = dedent('''\
@ -74,7 +74,7 @@ class ParserTest(shared.PeruTest):
scope, imports = parse_string(input)
self.assertDictEqual(scope.modules, {})
self.assertDictEqual(scope.rules, {})
self.assertEqual(imports, {'foo': ('bar/',)})
self.assertEqual(imports, {'foo': ('bar/', )})
def test_parse_empty_imports(self):
input = dedent('''\
@ -95,7 +95,8 @@ class ParserTest(shared.PeruTest):
def test_bad_rule_field_throw(self):
with self.assertRaises(ParserError):
parse_string(dedent("""\
parse_string(
dedent("""\
rule foo:
bad_field: junk
"""))
@ -208,10 +209,8 @@ class ParserTest(shared.PeruTest):
a: stuff
''')
duplicates = parser._get_duplicate_keys_approximate(yaml)
self.assertEqual(
[
('a', 5, 7),
('a', 1, 8),
('a', 8, 9),
],
duplicates)
self.assertEqual([
('a', 5, 7),
('a', 1, 8),
('a', 8, 9),
], duplicates)

View File

@ -9,7 +9,6 @@ import shared
class PathsTest(shared.PeruTest):
def setUp(self):
self.test_root = shared.create_dir()
@ -37,8 +36,9 @@ class PathsTest(shared.PeruTest):
makedirs(self.cwd)
def assert_success(self, sync_dir, state_dir, cache_dir, more_excludes=[]):
shared.assert_contents(sync_dir, {'bar': 'baz'},
excludes=['.peru', 'peru.yaml'] + more_excludes)
shared.assert_contents(
sync_dir, {'bar': 'baz'},
excludes=['.peru', 'peru.yaml'] + more_excludes)
assert os.path.isfile(os.path.join(state_dir, 'lastimports'))
assert os.path.isdir(os.path.join(cache_dir, 'trees'))
@ -47,34 +47,35 @@ class PathsTest(shared.PeruTest):
self.assert_success(self.project_dir, self.state_dir, self.cache_dir)
def test_peru_file_and_sync_dir_must_be_set_together(self):
for command in [['--sync-dir=junk', 'sync'],
['--file=junk', 'sync']]:
for command in [['--sync-dir=junk', 'sync'], ['--file=junk', 'sync']]:
with self.assertRaises(CommandLineError):
shared.run_peru_command(command, cwd=self.cwd)
def test_file_and_file_basename_incompatible(self):
with self.assertRaises(CommandLineError):
shared.run_peru_command(
['--file=foo', '--sync-dir=bar', '--file-basename=baz',
'sync'],
cwd=self.cwd)
shared.run_peru_command([
'--file=foo', '--sync-dir=bar', '--file-basename=baz', 'sync'
],
cwd=self.cwd)
def test_setting_all_flags(self):
cwd = shared.create_dir()
sync_dir = shared.create_dir()
state_dir = shared.create_dir()
cache_dir = shared.create_dir()
shared.run_peru_command(
['--file', self.peru_file, '--sync-dir', sync_dir,
'--state-dir', state_dir, '--cache-dir', cache_dir, 'sync'],
cwd)
shared.run_peru_command([
'--file', self.peru_file, '--sync-dir', sync_dir, '--state-dir',
state_dir, '--cache-dir', cache_dir, 'sync'
], cwd)
self.assert_success(sync_dir, state_dir, cache_dir)
def test_setting_all_env_vars(self):
cache_dir = shared.create_dir()
shared.run_peru_command(['sync'], self.cwd, env={
'PERU_CACHE_DIR': cache_dir,
})
shared.run_peru_command(['sync'],
self.cwd,
env={
'PERU_CACHE_DIR': cache_dir,
})
self.assert_success(self.project_dir, self.state_dir, cache_dir)
def test_flags_override_vars(self):
@ -99,5 +100,8 @@ class PathsTest(shared.PeruTest):
shutil.move(self.peru_file, os.path.join(self.project_dir, 'xxx'))
shared.run_peru_command(['--file-basename', 'xxx', 'sync'],
cwd=self.cwd)
self.assert_success(self.project_dir, self.state_dir, self.cache_dir,
more_excludes=['xxx'])
self.assert_success(
self.project_dir,
self.state_dir,
self.cache_dir,
more_excludes=['xxx'])

View File

@ -32,12 +32,11 @@ def test_plugin_fetch(context, type, fields, dest):
def test_plugin_get_reup_fields(context, type, fields):
handle = TestDisplayHandle()
return run_task(plugin.plugin_get_reup_fields(
context, type, fields, handle))
return run_task(
plugin.plugin_get_reup_fields(context, type, fields, handle))
class PluginsTest(shared.PeruTest):
def setUp(self):
self.content = {"some": "stuff", "foo/bar": "baz"}
self.content_dir = shared.create_dir(self.content)
@ -54,11 +53,15 @@ class PluginsTest(shared.PeruTest):
def tearDown(self):
plugin.debug_assert_clean_parallel_count()
def do_plugin_test(self, type, plugin_fields, expected_content, *,
def do_plugin_test(self,
type,
plugin_fields,
expected_content,
*,
fetch_dir=None):
fetch_dir = fetch_dir or shared.create_dir()
output = test_plugin_fetch(
self.plugin_context, type, plugin_fields, fetch_dir)
output = test_plugin_fetch(self.plugin_context, type, plugin_fields,
fetch_dir)
assert_contents(fetch_dir, expected_content)
return output
@ -82,8 +85,8 @@ class PluginsTest(shared.PeruTest):
def test_svn_plugin_reup(self):
repo = SvnRepo(self.content_dir)
plugin_fields = {'url': repo.url}
output = test_plugin_get_reup_fields(
self.plugin_context, 'svn', plugin_fields)
output = test_plugin_get_reup_fields(self.plugin_context, 'svn',
plugin_fields)
self.assertDictEqual({'rev': '1'}, output)
def test_git_plugin_with_submodule(self):
@ -96,8 +99,8 @@ class PluginsTest(shared.PeruTest):
content_repo.run('git', 'config', 'core.autocrlf', 'false')
submodule_dir = shared.create_dir({'another': 'file'})
submodule_repo = GitRepo(submodule_dir)
content_repo.run(
'git', 'submodule', 'add', '-q', submodule_dir, 'subdir/')
content_repo.run('git', 'submodule', 'add', '-q', submodule_dir,
'subdir/')
content_repo.run('git', 'commit', '-m', 'submodule commit')
expected_content = self.content.copy()
expected_content['subdir/another'] = 'file'
@ -108,9 +111,8 @@ class PluginsTest(shared.PeruTest):
shared.write_files(submodule_dir, {'more': 'stuff'})
submodule_repo.run('git', 'add', '-A')
submodule_repo.run('git', 'commit', '-m', 'more stuff')
subprocess.check_output(
['git', 'pull', '-q'],
cwd=os.path.join(self.content_dir, 'subdir'))
subprocess.check_output(['git', 'pull', '-q'],
cwd=os.path.join(self.content_dir, 'subdir'))
content_repo.run('git', 'commit', '-am', 'submodule update')
expected_content['subdir/more'] = 'stuff'
self.do_plugin_test('git', {'url': self.content_dir}, expected_content)
@ -158,9 +160,8 @@ class PluginsTest(shared.PeruTest):
def test_hg_plugin_multiple_fetches(self):
content_repo = HgRepo(self.content_dir)
head = content_repo.run(
'hg', 'identify', '--debug', '-r', '.'
).split()[0]
head = content_repo.run('hg', 'identify', '--debug', '-r',
'.').split()[0]
plugin_fields = {'url': self.content_dir, 'rev': head}
output = self.do_plugin_test('hg', plugin_fields, self.content)
self.assertEqual(output.count('hg clone'), 1)
@ -185,8 +186,8 @@ class PluginsTest(shared.PeruTest):
plugin_fields = {'url': self.content_dir}
# By default, the git plugin should reup from master.
expected_output = {'rev': master_head}
output = test_plugin_get_reup_fields(
self.plugin_context, 'git', plugin_fields)
output = test_plugin_get_reup_fields(self.plugin_context, 'git',
plugin_fields)
self.assertDictEqual(expected_output, output)
# Add some new commits and make sure master gets fetched properly.
repo.run('git', 'commit', '--allow-empty', '-m', 'junk')
@ -194,51 +195,49 @@ class PluginsTest(shared.PeruTest):
repo.run('git', 'commit', '--allow-empty', '-m', 'more junk')
new_master_head = repo.run('git', 'rev-parse', 'master')
expected_output['rev'] = new_master_head
output = test_plugin_get_reup_fields(
self.plugin_context, 'git', plugin_fields)
output = test_plugin_get_reup_fields(self.plugin_context, 'git',
plugin_fields)
self.assertDictEqual(expected_output, output)
# Now specify the reup target explicitly.
newbranch_head = repo.run('git', 'rev-parse', 'newbranch')
plugin_fields['reup'] = 'newbranch'
expected_output['rev'] = newbranch_head
output = test_plugin_get_reup_fields(
self.plugin_context, 'git', plugin_fields)
output = test_plugin_get_reup_fields(self.plugin_context, 'git',
plugin_fields)
self.assertDictEqual(expected_output, output)
def test_hg_plugin_reup(self):
repo = HgRepo(self.content_dir)
default_tip = repo.run(
'hg', 'identify', '--debug', '-r', 'default'
).split()[0]
default_tip = repo.run('hg', 'identify', '--debug', '-r',
'default').split()[0]
plugin_fields = {'url': self.content_dir}
# By default, the hg plugin should reup from default.
expected_output = {'rev': default_tip}
output = test_plugin_get_reup_fields(
self.plugin_context, 'hg', plugin_fields)
output = test_plugin_get_reup_fields(self.plugin_context, 'hg',
plugin_fields)
self.assertDictEqual(expected_output, output)
# Add some new commits and make sure master gets fetched properly.
shared.write_files(self.content_dir, {
'randomfile': "hg doesn't like empty commits"})
shared.write_files(self.content_dir,
{'randomfile': "hg doesn't like empty commits"})
repo.run('hg', 'commit', '-A', '-m', 'junk')
shared.write_files(self.content_dir, {
'randomfile': "hg still doesn't like empty commits"})
shared.write_files(
self.content_dir,
{'randomfile': "hg still doesn't like empty commits"})
repo.run('hg', 'branch', 'newbranch')
repo.run('hg', 'commit', '-A', '-m', 'more junk')
new_default_tip = repo.run(
'hg', 'identify', '--debug', '-r', 'default'
).split()[0]
new_default_tip = repo.run('hg', 'identify', '--debug', '-r',
'default').split()[0]
expected_output['rev'] = new_default_tip
output = test_plugin_get_reup_fields(
self.plugin_context, 'hg', plugin_fields)
output = test_plugin_get_reup_fields(self.plugin_context, 'hg',
plugin_fields)
self.assertDictEqual(expected_output, output)
# Now specify the reup target explicitly.
newbranch_tip = repo.run(
'hg', 'identify', '--debug', '-r', 'tip'
).split()[0]
newbranch_tip = repo.run('hg', 'identify', '--debug', '-r',
'tip').split()[0]
plugin_fields['reup'] = 'newbranch'
expected_output['rev'] = newbranch_tip
output = test_plugin_get_reup_fields(
self.plugin_context, 'hg', plugin_fields)
output = test_plugin_get_reup_fields(self.plugin_context, 'hg',
plugin_fields)
self.assertDictEqual(expected_output, output)
def test_curl_plugin_fetch(self):
@ -266,10 +265,13 @@ class PluginsTest(shared.PeruTest):
'unpack': type,
}
fetch_dir = shared.create_dir()
self.do_plugin_test('curl', fields, {
'not_exe.txt': 'Not executable.\n',
'exe.sh': 'echo Executable.\n',
}, fetch_dir=fetch_dir)
self.do_plugin_test(
'curl',
fields, {
'not_exe.txt': 'Not executable.\n',
'exe.sh': 'echo Executable.\n',
},
fetch_dir=fetch_dir)
shared.assert_not_executable(
os.path.join(fetch_dir, 'not_exe.txt'))
shared.assert_executable(os.path.join(fetch_dir, 'exe.sh'))
@ -292,13 +294,13 @@ class PluginsTest(shared.PeruTest):
digest.update(b'content')
real_hash = digest.hexdigest()
fields = {'url': test_url}
output = test_plugin_get_reup_fields(
self.plugin_context, 'curl', fields)
output = test_plugin_get_reup_fields(self.plugin_context, 'curl',
fields)
self.assertDictEqual({'sha1': real_hash}, output)
# Confirm that we get the same thing with a preexisting hash.
fields['sha1'] = 'preexisting junk'
output = test_plugin_get_reup_fields(
self.plugin_context, 'curl', fields)
output = test_plugin_get_reup_fields(self.plugin_context, 'curl',
fields)
self.assertDictEqual({'sha1': real_hash}, output)
def test_cp_plugin(self):
@ -337,18 +339,21 @@ class PluginsTest(shared.PeruTest):
plugin_yaml_file = plugin_prefix + 'plugin.yaml'
fake_config_dir = shared.create_dir({
fetch_file:
'#! /usr/bin/env python3\nprint("hey there!")\n',
reup_file: textwrap.dedent('''\
'#! /usr/bin/env python3\nprint("hey there!")\n',
reup_file:
textwrap.dedent('''\
#! /usr/bin/env python3
import os
outfile = os.environ['PERU_REUP_OUTPUT']
print("name: val", file=open(outfile, 'w'))
'''),
plugin_yaml_file: textwrap.dedent('''\
plugin_yaml_file:
textwrap.dedent('''\
sync exe: fetch.py
reup exe: reup.py
required fields: []
''')})
''')
})
os.chmod(os.path.join(fake_config_dir, fetch_file), 0o755)
os.chmod(os.path.join(fake_config_dir, reup_file), 0o755)
fetch_dir = shared.create_dir()
@ -363,17 +368,17 @@ class PluginsTest(shared.PeruTest):
config_path_variable = 'XDG_CONFIG_HOME'
with temporary_environment(config_path_variable, fake_config_dir):
output = test_plugin_fetch(
self.plugin_context, 'footype', {}, fetch_dir)
output = test_plugin_fetch(self.plugin_context, 'footype', {},
fetch_dir)
self.assertEqual('hey there!\n', output)
output = test_plugin_get_reup_fields(
self.plugin_context, 'footype', {})
output = test_plugin_get_reup_fields(self.plugin_context,
'footype', {})
self.assertDictEqual({'name': 'val'}, output)
def test_no_such_plugin(self):
with self.assertRaises(plugin.PluginCandidateError):
test_plugin_fetch(
self.plugin_context, 'nosuchtype!', {}, os.devnull)
test_plugin_fetch(self.plugin_context, 'nosuchtype!', {},
os.devnull)
@contextlib.contextmanager

View File

@ -17,8 +17,8 @@ class ReupIntegrationTest(shared.PeruTest):
f.write('new')
self.bar_repo.run('git', 'add', '-A')
self.bar_repo.run('git', 'commit', '-m', 'creating barfile')
self.bar_otherbranch = self.bar_repo.run(
'git', 'rev-parse', 'otherbranch')
self.bar_otherbranch = self.bar_repo.run('git', 'rev-parse',
'otherbranch')
def test_single_reup(self):
yaml_without_imports = dedent('''\
@ -41,8 +41,7 @@ class ReupIntegrationTest(shared.PeruTest):
reup: otherbranch
''').format(self.foo_dir, self.foo_master, self.bar_dir)
run_peru_command(['reup', 'foo'], test_dir)
assert_contents(test_dir, {'peru.yaml': expected},
excludes=['.peru'])
assert_contents(test_dir, {'peru.yaml': expected}, excludes=['.peru'])
def test_reup_sync(self):
yaml_with_imports = dedent('''\
@ -99,6 +98,10 @@ class ReupIntegrationTest(shared.PeruTest):
self.bar_otherbranch)
run_peru_command(['reup'], test_dir)
# This time we finally pull in barfile.
assert_contents(test_dir,
{'peru.yaml': expected, 'a': 'b', 'barfile': 'new'},
excludes=['.peru'])
assert_contents(
test_dir, {
'peru.yaml': expected,
'a': 'b',
'barfile': 'new'
},
excludes=['.peru'])

View File

@ -8,7 +8,6 @@ from shared import COLON
class RuleTest(shared.PeruTest):
@shared.make_synchronous
async def setUp(self):
self.cache_dir = shared.create_dir()
@ -25,16 +24,16 @@ class RuleTest(shared.PeruTest):
# A file copied into a directory should be placed into that directory.
# A directory or file copied into a file should overwrite that file.
copies = {'a': ['x', 'b', 'b/c'], 'b': ['a', 'y']}
tree = await rule.copy_files(
self.cache, self.content_tree, copies)
await shared.assert_tree_contents(self.cache, tree, {
'a/c': 'bar',
'b/a': 'foo',
'b/c': 'foo',
'x': 'foo',
'y/c': 'bar',
COLON + 'd': 'baz',
})
tree = await rule.copy_files(self.cache, self.content_tree, copies)
await shared.assert_tree_contents(
self.cache, tree, {
'a/c': 'bar',
'b/a': 'foo',
'b/c': 'foo',
'x': 'foo',
'y/c': 'bar',
COLON + 'd': 'baz',
})
@shared.make_synchronous
async def test_move(self):
@ -46,57 +45,59 @@ class RuleTest(shared.PeruTest):
await shared.assert_tree_contents(self.cache, tree, {
'a/c': 'bar',
'b/a': 'foo',
COLON + 'd': 'baz',
COLON + 'd': 'baz',
})
@shared.make_synchronous
async def test_drop(self):
drop_dir = await rule.drop_files(
self.cache, self.content_tree, ['b'])
await shared.assert_tree_contents(
self.cache, drop_dir, {'a': 'foo', COLON + 'd': 'baz'})
drop_dir = await rule.drop_files(self.cache, self.content_tree, ['b'])
await shared.assert_tree_contents(self.cache, drop_dir, {
'a': 'foo',
COLON + 'd': 'baz'
})
drop_file = await rule.drop_files(
self.cache, self.content_tree, ['a'])
await shared.assert_tree_contents(
self.cache, drop_file, {'b/c': 'bar', COLON + 'd': 'baz'})
drop_file = await rule.drop_files(self.cache, self.content_tree, ['a'])
await shared.assert_tree_contents(self.cache, drop_file, {
'b/c': 'bar',
COLON + 'd': 'baz'
})
drop_colon = await rule.drop_files(
self.cache, self.content_tree, [COLON + 'd'])
await shared.assert_tree_contents(
self.cache, drop_colon, {'a': 'foo', 'b/c': 'bar'})
drop_colon = await rule.drop_files(self.cache, self.content_tree,
[COLON + 'd'])
await shared.assert_tree_contents(self.cache, drop_colon, {
'a': 'foo',
'b/c': 'bar'
})
globs = await rule.drop_files(
self.cache, self.content_tree, ['**/c', '**/a'])
await shared.assert_tree_contents(
self.cache, globs, {COLON + 'd': 'baz'})
globs = await rule.drop_files(self.cache, self.content_tree,
['**/c', '**/a'])
await shared.assert_tree_contents(self.cache, globs,
{COLON + 'd': 'baz'})
@shared.make_synchronous
async def test_pick(self):
pick_dir = await rule.pick_files(
self.cache, self.content_tree, ['b'])
await shared.assert_tree_contents(
self.cache, pick_dir, {'b/c': 'bar'})
pick_dir = await rule.pick_files(self.cache, self.content_tree, ['b'])
await shared.assert_tree_contents(self.cache, pick_dir, {'b/c': 'bar'})
pick_file = await rule.pick_files(
self.cache, self.content_tree, ['a'])
await shared.assert_tree_contents(
self.cache, pick_file, {'a': 'foo'})
pick_file = await rule.pick_files(self.cache, self.content_tree, ['a'])
await shared.assert_tree_contents(self.cache, pick_file, {'a': 'foo'})
pick_colon = await rule.pick_files(
self.cache, self.content_tree, [COLON + 'd'])
await shared.assert_tree_contents(
self.cache, pick_colon, {COLON + 'd': 'baz'})
pick_colon = await rule.pick_files(self.cache, self.content_tree,
[COLON + 'd'])
await shared.assert_tree_contents(self.cache, pick_colon,
{COLON + 'd': 'baz'})
globs = await rule.pick_files(
self.cache, self.content_tree, ['**/c', '**/a'])
await shared.assert_tree_contents(
self.cache, globs, {'a': 'foo', 'b/c': 'bar'})
globs = await rule.pick_files(self.cache, self.content_tree,
['**/c', '**/a'])
await shared.assert_tree_contents(self.cache, globs, {
'a': 'foo',
'b/c': 'bar'
})
@shared.make_synchronous
async def test_executable(self):
exe = await rule.make_files_executable(
self.cache, self.content_tree, ['b/*'])
exe = await rule.make_files_executable(self.cache, self.content_tree,
['b/*'])
new_content_dir = shared.create_dir()
await self.cache.export_tree(exe, new_content_dir)
shared.assert_contents(new_content_dir, self.content)
@ -105,6 +106,5 @@ class RuleTest(shared.PeruTest):
@shared.make_synchronous
async def test_export(self):
b = await rule.get_export_tree(
self.cache, self.content_tree, 'b')
b = await rule.get_export_tree(self.cache, self.content_tree, 'b')
await shared.assert_tree_contents(self.cache, b, {'c': 'bar'})

View File

@ -12,7 +12,6 @@ class RuntimeTest(shared.PeruTest):
'a/b/c/junk': 'junk',
})
result = runtime.find_project_file(
os.path.join(test_dir, 'a', 'b', 'c'),
'find_me')
os.path.join(test_dir, 'a', 'b', 'c'), 'find_me')
expected = os.path.join(test_dir, 'a', 'find_me')
self.assertEqual(expected, result)

View File

@ -10,14 +10,16 @@ class ScopeTest(shared.PeruTest):
'a': {
'modules': {
'b': {
'modules': {'c': {}},
'modules': {
'c': {}
},
'rules': ['r'],
}
}
}
}
})
c, (r,) = run_task(scope.parse_target(DummyRuntime(), 'a.b.c|a.b.r'))
c, (r, ) = run_task(scope.parse_target(DummyRuntime(), 'a.b.c|a.b.r'))
assert type(c) is DummyModule and c.name == 'a.b.c'
assert type(r) is DummyRule and r.name == 'a.b.r'

View File

@ -22,7 +22,6 @@ PERU_MODULE_ROOT = os.path.abspath(
class SyncTest(shared.PeruTest):
def setUp(self):
self.test_dir = shared.create_dir()
self.peru_dir = os.path.join(self.test_dir, '.peru')
@ -37,18 +36,25 @@ class SyncTest(shared.PeruTest):
with open(os.path.join(dir, DEFAULT_PERU_FILE_NAME), 'w') as f:
f.write(yaml)
def do_integration_test(self, args, expected, *, cwd=None,
def do_integration_test(self,
args,
expected,
*,
cwd=None,
**peru_cmd_kwargs):
if not cwd:
cwd = self.test_dir
output = run_peru_command(args, cwd, **peru_cmd_kwargs)
assert_contents(self.test_dir, expected,
excludes=[DEFAULT_PERU_FILE_NAME, '.peru'])
assert_contents(
self.test_dir,
expected,
excludes=[DEFAULT_PERU_FILE_NAME, '.peru'])
return output
def test_basic_sync(self):
module_dir = shared.create_dir({'foo': 'bar'})
self.write_yaml('''\
self.write_yaml(
'''\
cp module foo:
path: {}
@ -67,7 +73,8 @@ class SyncTest(shared.PeruTest):
def test_no_cache_flag(self):
foo_dir = shared.create_dir({'foo': 'bar'})
self.write_yaml('''\
self.write_yaml(
'''\
cp module foo:
path: {}
@ -88,7 +95,8 @@ class SyncTest(shared.PeruTest):
def test_sync_from_subdir(self):
module_dir = shared.create_dir({'foo': 'bar'})
self.write_yaml('''\
self.write_yaml(
'''\
# Use a relative module path, to make sure it gets resolved
# relative to the project root and not the dir where peru was
# called.
@ -101,13 +109,15 @@ class SyncTest(shared.PeruTest):
subdir = os.path.join(self.test_dir, 'a', 'b')
peru.compat.makedirs(subdir)
run_peru_command(['sync'], subdir)
self.assertTrue(os.path.isdir(os.path.join(self.test_dir, '.peru')),
msg=".peru dir didn't end up in the right place")
self.assertTrue(
os.path.isdir(os.path.join(self.test_dir, '.peru')),
msg=".peru dir didn't end up in the right place")
assert_contents(os.path.join(self.test_dir, 'subdir'), {'foo': 'bar'})
def test_conflicting_imports(self):
module_dir = shared.create_dir({'foo': 'bar'})
self.write_yaml('''\
self.write_yaml(
'''\
cp module foo:
path: {0}
@ -148,12 +158,16 @@ class SyncTest(shared.PeruTest):
dir_a = shared.create_dir({'afile': 'stuff'})
dir_b = shared.create_dir()
# Create the peru.yaml file for B.
self.write_yaml('''\
self.write_yaml(
'''\
cp module a:
path: {}
''', dir_a, dir=dir_b)
''',
dir_a,
dir=dir_b)
# Now create the peru.yaml file in the actual test project.
self.write_yaml('''\
self.write_yaml(
'''\
imports:
b.a: a_via_b/
@ -162,7 +176,8 @@ class SyncTest(shared.PeruTest):
''', dir_b)
self.do_integration_test(['sync'], {'a_via_b/afile': 'stuff'})
# Test the error message from an invalid module.
self.write_yaml('''\
self.write_yaml(
'''\
imports:
b.missing_module: some_path
@ -181,14 +196,18 @@ class SyncTest(shared.PeruTest):
dir_a = shared.create_dir({'afile': 'aaa'})
dir_b = shared.create_dir({'exports/bfile': 'bbb'})
# Create the peru.yaml file for B.
self.write_yaml('''\
self.write_yaml(
'''\
imports:
a: exports/where_b_put_a
cp module a:
path: {}
''', dir_a, dir=dir_b)
''',
dir_a,
dir=dir_b)
# Now create the peru.yaml file in the actual test project.
self.write_yaml('''\
self.write_yaml(
'''\
imports:
b: where_c_put_b
@ -197,11 +216,11 @@ class SyncTest(shared.PeruTest):
path: {}
export: exports # omit the peru.yaml file from b
''', dir_b)
self.do_integration_test(
['sync'], {'where_c_put_b/bfile': 'bbb'})
self.do_integration_test(['sync'], {'where_c_put_b/bfile': 'bbb'})
# Repeat the same test with explicit 'recursive' settings.
self.write_yaml('''\
self.write_yaml(
'''\
imports:
b: where_c_put_b
@ -211,10 +230,11 @@ class SyncTest(shared.PeruTest):
export: exports # omit the peru.yaml file from b
recursive: true
''', dir_b)
self.do_integration_test(
['sync'], {'where_c_put_b/where_b_put_a/afile': 'aaa'})
self.do_integration_test(['sync'],
{'where_c_put_b/where_b_put_a/afile': 'aaa'})
self.write_yaml('''\
self.write_yaml(
'''\
imports:
b: where_c_put_b
@ -223,8 +243,7 @@ class SyncTest(shared.PeruTest):
export: exports # omit the peru.yaml file from b
recursive: false
''', dir_b)
self.do_integration_test(
['sync'], {'where_c_put_b/bfile': 'bbb'})
self.do_integration_test(['sync'], {'where_c_put_b/bfile': 'bbb'})
def test_recursive_import_error(self):
'''Errors that happen inside recursively-fetched targets should have
@ -234,15 +253,18 @@ class SyncTest(shared.PeruTest):
# Project NOTABLE_NAME has a BAD_MODULE in it.
dir_notable = shared.create_dir()
# Create the peru.yaml file for NOTABLE_NAME.
self.write_yaml('''\
self.write_yaml(
'''\
imports:
BAD_MODULE: ./
git module BAD_MODULE:
bad_field: stuff
# The error we get here will actually be that `url` is missing.
''', dir=dir_notable)
''',
dir=dir_notable)
# Now make our test project import it.
self.write_yaml('''\
self.write_yaml(
'''\
imports:
NOTABLE_NAME: ./notable
@ -259,12 +281,16 @@ class SyncTest(shared.PeruTest):
# Project B contains project A
dir_a = shared.create_dir({'afile': 'stuff'})
# Create project B with an unusual YAML filename.
dir_b = shared.create_dir({'alternate.yaml': textwrap.dedent('''\
dir_b = shared.create_dir({
'alternate.yaml':
textwrap.dedent('''\
cp module a:
path: {}
'''.format(dir_a))})
'''.format(dir_a))
})
# Now create the peru.yaml file in the actual test project.
self.write_yaml('''\
self.write_yaml(
'''\
imports:
b.a: a_via_b/
@ -297,16 +323,14 @@ class SyncTest(shared.PeruTest):
self.do_integration_test(['sync'], {'d': ''})
def test_rule_with_picked_files(self):
content = {name: '' for name in [
'foo',
'bar',
'special',
'baz/bing',
'baz/boo/a',
'baz/boo/b'
]}
content = {
name: ''
for name in
['foo', 'bar', 'special', 'baz/bing', 'baz/boo/a', 'baz/boo/b']
}
module_dir = shared.create_dir(content)
self.write_yaml('''\
self.write_yaml(
'''\
cp module foo:
path: {}
@ -318,17 +342,21 @@ class SyncTest(shared.PeruTest):
imports:
foo|filter: ./
''', module_dir)
filtered_content = {name: '' for name in [
'foo',
'special',
'baz/boo/a',
'baz/boo/b',
]}
filtered_content = {
name: ''
for name in [
'foo',
'special',
'baz/boo/a',
'baz/boo/b',
]
}
self.do_integration_test(['sync'], filtered_content)
def test_rule_with_picked_files_that_do_not_exist(self):
module_dir = shared.create_dir({'foo': 'bar'})
self.write_yaml('''\
self.write_yaml(
'''\
cp module foo:
path: {}
pick: idontexist
@ -339,15 +367,13 @@ class SyncTest(shared.PeruTest):
self.do_integration_test(['sync'], {})
def test_rule_with_exported_files_that_are_not_picked(self):
content = {name: '' for name in [
'foo',
'bar',
'baz/bing',
'baz/boo/a',
'baz/boo/b'
]}
content = {
name: ''
for name in ['foo', 'bar', 'baz/bing', 'baz/boo/a', 'baz/boo/b']
}
module_dir = shared.create_dir(content)
self.write_yaml('''\
self.write_yaml(
'''\
cp module foo:
path: {}
pick: foo
@ -362,7 +388,8 @@ class SyncTest(shared.PeruTest):
def test_rule_with_dropped_files(self):
content = {'foo': 'one', 'bar': 'two'}
module_dir = shared.create_dir(content)
self.write_yaml('''\
self.write_yaml(
'''\
cp module foobar:
path: {}
@ -383,7 +410,8 @@ class SyncTest(shared.PeruTest):
'''
content = {'foo': 'stuff'}
module_dir = shared.create_dir(content)
self.write_yaml('''\
self.write_yaml(
'''\
cp module foobar:
path: {}
drop: foo
@ -398,7 +426,8 @@ class SyncTest(shared.PeruTest):
def test_rule_with_executable(self):
contents = {'a.txt': '', 'b.txt': '', 'c.foo': ''}
module_dir = shared.create_dir(contents)
self.write_yaml('''\
self.write_yaml(
'''\
cp module foo:
path: {}
executable: "*.txt"
@ -411,7 +440,8 @@ class SyncTest(shared.PeruTest):
def test_rule_with_move(self):
module_dir = shared.create_dir({'a': 'foo', 'b/c': 'bar'})
self.write_yaml('''\
self.write_yaml(
'''\
cp module foo:
path: {}
move:
@ -424,7 +454,8 @@ class SyncTest(shared.PeruTest):
def test_rule_with_move_error(self):
module_dir = shared.create_dir()
self.write_yaml('''\
self.write_yaml(
'''\
cp module foo:
path: {}
move:
@ -433,17 +464,17 @@ class SyncTest(shared.PeruTest):
foo: ./
''', module_dir)
with raises_gathered(peru.rule.NoMatchingFilesError) as cm:
self.do_integration_test(['sync'],
{'newa': 'foo', 'newb/c': 'bar'})
self.do_integration_test(['sync'], {
'newa': 'foo',
'newb/c': 'bar'
})
assert 'doesntexist' in cm.exception.message
def test_rule_with_copied_files(self):
content = {
'foo': 'foo',
'bar/baz': 'baz'
}
content = {'foo': 'foo', 'bar/baz': 'baz'}
module_dir = shared.create_dir(content)
self.write_yaml('''\
self.write_yaml(
'''\
cp module foo:
path: {}
copy:
@ -466,7 +497,8 @@ class SyncTest(shared.PeruTest):
def test_alternate_cache(self):
module_dir = shared.create_dir({'foo': 'bar'})
self.write_yaml('''\
self.write_yaml(
'''\
cp module foo:
path: {}
@ -475,17 +507,16 @@ class SyncTest(shared.PeruTest):
''', module_dir)
cache_dir = shared.create_dir()
env = {'PERU_CACHE_DIR': cache_dir}
self.do_integration_test(['sync'], {'subdir/foo': 'bar'},
env=env)
self.do_integration_test(['sync'], {'subdir/foo': 'bar'}, env=env)
self.assertTrue(os.path.exists(os.path.join(cache_dir, 'plugins')))
self.assertTrue(os.path.exists(os.path.join(cache_dir, 'trees')))
self.assertTrue(os.path.exists(os.path.join(cache_dir, 'keyval')))
self.assertFalse(os.path.exists(
os.path.join(self.peru_dir, 'cache')))
self.assertFalse(os.path.exists(os.path.join(self.peru_dir, 'cache')))
def test_override(self):
module_dir = shared.create_dir({'foo': 'bar'})
self.write_yaml('''\
self.write_yaml(
'''\
cp module foo:
path: {}
@ -533,7 +564,8 @@ class SyncTest(shared.PeruTest):
def test_override_after_regular_sync(self):
module_dir = shared.create_dir({'foo': 'bar'})
self.write_yaml('''\
self.write_yaml(
'''\
cp module foo:
path: {}
@ -553,17 +585,21 @@ class SyncTest(shared.PeruTest):
module_a_dir = shared.create_dir({'foo': 'bar'})
# Module B imports module A.
module_b_dir = shared.create_dir()
self.write_yaml('''\
self.write_yaml(
'''\
cp module A:
path: {}
imports:
A: A/
''', module_a_dir, dir=module_b_dir)
''',
module_a_dir,
dir=module_b_dir)
# Module C (in self.test_dir) imports module B, and also directly
# imports module A. When we set an override for module A below, we'll
# want to check that *both* of these imports get overridden.
self.write_yaml('''\
self.write_yaml(
'''\
cp module B:
path: {}
recursive: true
@ -609,8 +645,7 @@ class SyncTest(shared.PeruTest):
# .peru/overrides/ at the root, so this tests that we resolve the
# stored path properly.
relative_path = os.path.relpath(override_dir, start=subdir)
run_peru_command(['override', 'add', 'foo', relative_path],
subdir)
run_peru_command(['override', 'add', 'foo', relative_path], subdir)
# Confirm that the right path is stored on disk.
expected_stored_path = os.path.relpath(
override_dir, start=self.test_dir)
@ -630,8 +665,10 @@ class SyncTest(shared.PeruTest):
imports:
foo: ./
''')
override_dir = shared.create_dir(
{'foo': 'override', '.peru/bar': 'baz'})
override_dir = shared.create_dir({
'foo': 'override',
'.peru/bar': 'baz'
})
run_peru_command(['override', 'add', 'foo', override_dir],
self.test_dir)
self.do_integration_test(['sync'], {'foo': 'override'})
@ -675,7 +712,8 @@ class SyncTest(shared.PeruTest):
def test_copy(self):
module_dir = shared.create_dir({'foo': 'bar'})
self.write_yaml('''\
self.write_yaml(
'''\
cp module foo:
path: {}
''', module_dir)
@ -693,12 +731,16 @@ class SyncTest(shared.PeruTest):
dir_a = shared.create_dir({'afile': 'stuff'})
dir_b = shared.create_dir()
# Create the peru.yaml file for B.
self.write_yaml('''\
self.write_yaml(
'''\
cp module a:
path: {}
''', dir_a, dir=dir_b)
''',
dir_a,
dir=dir_b)
# Now create the peru.yaml file in the actual test project.
self.write_yaml('''\
self.write_yaml(
'''\
cp module b:
path: {}
''', dir_b)
@ -706,7 +748,8 @@ class SyncTest(shared.PeruTest):
def test_clean(self):
module_dir = shared.create_dir({'foo': 'bar'})
self.write_yaml('''\
self.write_yaml(
'''\
imports:
foo: ./
cp module foo:
@ -751,8 +794,8 @@ class SyncTest(shared.PeruTest):
buffer = io.StringIO()
with redirect_stderr(buffer):
run_peru_command(['sync'], self.test_dir)
assert('WARNING' in buffer.getvalue())
assert('git module foo' in buffer.getvalue())
assert ('WARNING' in buffer.getvalue())
assert ('git module foo' in buffer.getvalue())
# Make sure --quiet suppresses the warning.
buffer = io.StringIO()
with redirect_stderr(buffer):
@ -795,7 +838,8 @@ class SyncTest(shared.PeruTest):
'''A no-op sync should be a single git command. Also check that index
files are deleted after any sync error.'''
module_dir = shared.create_dir({'foo': 'bar'})
self.write_yaml('''\
self.write_yaml(
'''\
cp module foo:
path: {}

View File

@ -5,7 +5,6 @@ import shared
class SharedTestCodeTest(shared.PeruTest):
def test_create_dir(self):
empty_dir = shared.create_dir()
self.assertListEqual([], os.listdir(empty_dir))
@ -25,10 +24,12 @@ class SharedTestCodeTest(shared.PeruTest):
test_dir = shared.create_dir(content)
read_content = shared.read_dir(test_dir)
self.assertDictEqual(content, read_content)
self.assertDictEqual({Path('foo'): 'a'},
shared.read_dir(test_dir, excludes=['bar']))
self.assertDictEqual({Path('foo'): 'a'},
shared.read_dir(test_dir, excludes=['bar/baz']))
self.assertDictEqual({
Path('foo'): 'a'
}, shared.read_dir(test_dir, excludes=['bar']))
self.assertDictEqual({
Path('foo'): 'a'
}, shared.read_dir(test_dir, excludes=['bar/baz']))
def test_assert_contents(self):
content = {'foo': 'a', 'bar/baz': 'b'}