blob: 5fd21cb9e0ef716816d4730d4f2470ba37e0d8a8 [file] [log] [blame] [edit]
#!/workspace/recipe_python_virtual_env/bin/python
"""This script allows execution of a recipe without buildbot.
Example:
./run_recipe.py lint --properties-file - <<EOF
{
'property1': 'value1',
'property2': 'value2',
}
EOF
"""
import argparse
import contextlib
import dataclasses
import datetime
import glob
import io
import json
import logging
import os
import pathlib
import pdb
import resource
import shutil
import subprocess
import sys
import tempfile
import time
import traceback
from typing import Dict, Union
import urllib.parse
# Allow imports from cq/scripts folder.
sys.path.insert(
0, os.path.realpath(os.path.join(os.path.dirname(__file__), os.pardir)))
# Allow imports from the root of continuous-tests
sys.path.insert(
0,
os.path.realpath(
os.path.join(
os.path.dirname(os.path.dirname(os.path.dirname(__file__))),
os.pardir)))
# pylint: disable=wrong-import-position
from helpers import data_manager
from helpers import repo_utils
from helpers import signal_utils
from helpers import timeout
from tools import subprocess42
SUCCESS = 0
FAILURE = 1
USAGE = """
%(prog)s <recipe_name> --properties-file <filename> [options]
The properties file should contain a Python dictionary. If the filename
"-" is used, then the dictionary is read from stdin, for example:
%(prog)s recipe_name --properties-file - <<EOF
{
'property1: 'value1',
'property2: 'value2',
}
EOF
The --eureka-root specifies which working eureka repository to run the tests in.
If unspecified, it will run in the current directory.
"""
# buildbot is reducing soft limit to 1024 and recipe inherits it from buildslave
_RLIMIT_NOFILE_SOFT = 8192
_RUNNING_STEP_NAME = None
_REVIEW_MESSAGE_CHAR_LIMIT = 10000
_TRUNCATED_MESSAGE = '...[truncated]'
Properties = Dict['str', Union[str, int]]
class RecipeRunnerError(Exception):
"""Generic exception for recipe errors."""
def get_tmp_dir():
"""Returns the correct tmp dir."""
tmp_dir = os.environ.get('CQ_TEMP_DIR')
if not tmp_dir:
tmp_dir = ('/usr/local/google/tmp'
if os.path.isdir('/usr/local/google') else tempfile.gettempdir())
return tmp_dir
def load_recipe_module(recipe):
"""Returns a reference to the module for a given recipe.
Args:
recipe: name of the recipe (filename in scripts/slave/recipes).
Returns:
Handle to the module for the given recipe file.
"""
sys_path_copy = sys.path[:]
sys.path.insert(
0, os.path.realpath(os.path.join(os.path.dirname(__file__), os.pardir)))
module = __import__('slave.recipe.' + recipe)
recipe_module = getattr(getattr(module, 'recipe'), recipe)
sys.path = sys_path_copy
return recipe_module
def load_recipe(recipe, build_name: str, **kwargs):
"""Loads a recipe from a name and properties structure.
Args:
recipe: name of the recipe (filename in scripts/slave/recipes).
build_name: name of the build to execute via this recipe.
**kwargs: additional named args to pass to the recipe
Returns:
A BaseRecipe instance corresponding to the given inputs.
Raises:
KeyError: if build_name not found in valid build names for the recipe.
"""
recipe_module = load_recipe_module(recipe)
if build_name not in recipe_module.GetValidBuildNames():
raise KeyError('Build name "%s" not valid for recipe "%s"' %
(build_name, recipe))
return recipe_module.CreateRecipe(build_name, **kwargs)
def run_recipe(recipe,
properties: Properties,
eureka_root: str,
use_debugger: bool = False):
"""Runs a recipe, mimicking BuildBot as needed.
Args:
recipe: name of the recipe (filename in scripts/slave/recipes).
properties: dictionary of properties for the given build.
eureka_root: the root directory to run the commands in.
use_debugger: Boolean param to pass to run_steps().
Returns:
one of SUCCESS or FAILURE.
"""
annotator = Annotator()
gcs_dir = properties.get('artifacts_dir')
executor = SubprocessExecutor(gcs_dir)
assert 'buildername' in properties
build_name = properties['buildername']
eureka_root_abs = os.path.abspath(eureka_root)
# manifest file name refer to http://google3/nest/engprod/devinfra/cast/austin/agent/source.py;l=190-191;rcl=628961120
manifest_output_path = (
pathlib.Path(executor.get_gcs_dir()) / repo_utils.MANIFEST_OVERRIDE_XML)
properties['manifest_override_path'] = str(manifest_output_path)
annotator.set_build_property(
'manifest_override_path', str(manifest_output_path))
dm = data_manager.DataManager(executor, properties)
dm.add_data('recipe_start_time', datetime.datetime.now())
recipe = load_recipe(
recipe,
build_name,
properties=properties,
annotator=annotator,
executor=executor,
data_manager=dm,
eureka_root=eureka_root_abs)
logging.info('Running setup steps.')
result = run_steps(recipe.get_setup_steps(), eureka_root_abs, annotator)
if result == SUCCESS:
logging.info('Running main recipe steps.')
result = run_steps(
recipe.get_steps(),
eureka_root_abs,
annotator,
use_debugger=use_debugger)
logging.info('Running teardown steps.')
run_steps(recipe.get_teardown_steps(), eureka_root_abs, annotator)
write_result_file(
annotator.message,
annotator.file_comments,
annotator.output_properties,
)
return result
def run_steps(steps, root_dir, annotator, use_debugger=False):
"""Runs a list of steps.
Args:
steps: A list of steps to run.
root_dir: The root directory to reset to prior to each step.
annotator: An instance of an Annotator to use to run each step.
use_debugger: If True will re-raise exception
Returns:
one of SUCCESS or FAILURE.
"""
global _RUNNING_STEP_NAME
result = SUCCESS
for step in steps:
# reset cwd to root before each step, allowing steps to change cwd
os.chdir(root_dir)
logging.info('Running step: %s', step.get_name())
exception = None
step_result = FAILURE
try:
_RUNNING_STEP_NAME = step.get_name()
step_result = annotator.run_step(step)
except Exception: # pylint: disable=broad-except
logging.exception('Encountered an unexpected error while running %s',
step.get_name())
exception = traceback.format_exc()
finally:
_RUNNING_STEP_NAME = None
if exception:
if use_debugger:
raise # pylint: disable=misplaced-bare-raise
return FAILURE # pylint: disable=lost-exception
result = max(result, step_result)
if result != SUCCESS and step.halt_on_failure():
logging.info('Halting on failure from step %s', step.get_name())
break
return result
class ExecutorInvalidKeywordArgsError(Exception):
"""SubprocessExecutor expects a single command list."""
class ExecutorInvalidArgsError(Exception):
"""SubprocessExecutor expects a single command list."""
class SubprocessExecutor(object):
"""Executes subprocess commands."""
def __init__(self, gcs_dir):
"""Constructor for SubprocessExecutor. Creates temporary directories."""
tempdir = get_tmp_dir()
self._tempdir = tempfile.mkdtemp(dir=tempdir)
self._gcs_dir = gcs_dir
self._cq_root = os.path.abspath(
os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
self._blocking_subprocesses = []
def get_cq_root(self):
"""Returns the absolute path to the root of the CQ src."""
return self._cq_root
def get_tmp_dir(self):
"""Returns a tempdir available to the executor."""
return self._tempdir
def get_gcs_dir(self):
"""This directory will be uploaded to GCS when a recipe finishes."""
return self._gcs_dir
def get_new_tmp_file(self):
"""Creates a new temporary file.
Returns:
A tuple of newly created temporary file descriptor and absolute path.
"""
return tempfile.mkstemp(dir=self.get_tmp_dir(), text=True)
def write_temp_file(self, contents: str):
"""Writes a temporary file.
Args:
contents: Text string to write
Returns:
The name of the file.
"""
tmp_file, tmp_file_path = self.get_new_tmp_file()
os.write(tmp_file, contents.encode('utf-8'))
os.close(tmp_file)
return tmp_file_path
def get_blocking_subprocesses(self):
"""Returns all the blocking subproccesses this executor has started."""
return self._blocking_subprocesses
def read_file(self, file_path):
"""Reads a file by path and returns the contents.
No sanity checking or validation is performed by this method.
Args:
file_path: path to a file, either relative from cwd or absolute.
Returns:
The contents of the file loaded from file_path.
"""
with open(file_path) as f:
return f.read()
def glob_files(self, glob_expr):
"""Performs a file glob and returns the list of files.
Args:
glob_expr: the file glob expression to search for.
Returns:
A list, the output of glob.glob(glob_expr).
"""
assert isinstance(glob_expr, str)
return glob.glob(glob_expr)
def exec_subprocess(self,
command,
check_output=False,
cwd=None,
*args,
**kwargs): # pylint: disable=keyword-arg-before-vararg
"""Execute a subprocess.
Args:
command: The command to execute.
check_output: Whether or not to raise a CalledProcessError on non-zero
return code.
cwd: The current working directory to use for the subprocess.
*args: passed to self._executor.subprocess
**kwargs: passed to self._executor.subprocess This should be used
sparingly at the recipe level because none of the information about the
subprocess is captured in any logs.
Returns:
Just stdout, if check_output=True, otherwise a tuple of
(returncode, stdout, stderr).
"""
returncode, stdout, stderr = self.subprocess(
command, *args, cwd=cwd, **kwargs)
if check_output:
if returncode != 0:
raise subprocess.CalledProcessError(
returncode, cmd=command, output=stdout + '\nstderr:\n' + stderr)
return stdout
return (returncode, stdout, stderr)
def subprocess(self, *args, **kwargs):
"""Executes a subprocess using subprocess.Popen.
Args:
*args: passed to subprocess.Popen
**kwargs: passed to subprocess.Popen; optionally, quiet=True can be passed
as a kwarg, in which case stdout/stderr from the subprocess will not be
written to sys. This kwarg will not get passed to subprocess.Popen.
Returns:
Tuple of (return code, full stdout output, full stderr output)
Raises:
ExecutorInvalidArgsError: if |args| is not a single command list.
"""
if len(args) != 1 or not isinstance(args[0], list):
raise ExecutorInvalidArgsError(
'Expected a tuple with a single list element, found args={}'.format(
args))
stdin = None
stdout = subprocess.PIPE
stderr = subprocess.PIPE
if 'stdin' in kwargs:
stdin = kwargs['stdin']
del kwargs['stdin']
if 'stdout' in kwargs:
stdout = kwargs['stdout']
del kwargs['stdout']
if 'stderr' in kwargs:
stderr = kwargs['stderr']
del kwargs['stderr']
sp, quiet = (self.start_subprocess(stdin, stdout, stderr, *args, **kwargs))
self._blocking_subprocesses.append(sp)
stdout, stderr = self.wait_for_subprocess_completion(sp, quiet)
logging.info('[%s] Completed command "%s" with return code %d',
time.strftime('%Y-%m-%d %H:%M:%S %Z'),
' '.join(map(str, args[0])), sp.returncode)
return (sp.returncode, stdout, stderr)
def subprocess_chain(self, subprocess_chain):
"""Executs a chain of subprocesses, piping output between processes.
Args:
subprocess_chain: A list of subprocesses, where each item is a tuple of
(command, **kwargs) to pass to subprocess.Popen. If command is empty, it
must be []. If **kwargs is empty, it must be {}.
e.g. [(['ls', '-al'], {'cwd':'path/to/foo'}), ([], {})]
Returns:
Tuple of (return code, full stdout output, full stderr output)
"""
assert isinstance(subprocess_chain, list)
logging.info('Starting command pipeline.')
prev_process = None
for command, kwargs in subprocess_chain:
assert isinstance(command, list)
assert isinstance(kwargs, dict)
if prev_process:
sp, quiet = self.start_subprocess(prev_process.stdout, subprocess.PIPE,
subprocess.PIPE, command, **kwargs)
prev_process.stdout.close() # Allow prev_process to receive a SIGPIPE
prev_process = sp
else:
# First process starts without piping in any stdin
prev_process, quiet = (
self.start_subprocess(None, subprocess.PIPE, subprocess.PIPE,
command, **kwargs))
self._blocking_subprocesses.append(prev_process)
stdout, stderr = self.wait_for_subprocess_completion(prev_process, quiet)
logging.info('Command pipeline finished with returncode %d',
prev_process.returncode)
return (prev_process.returncode, stdout, stderr)
def non_blocking_subprocess(self, *args, **kwargs):
"""Starts a non blocking subprocess using subprocess.Popen.
The new subprocess is detached from parent process and created under a new
process group.
Args:
*args: passed to subprocess.Popen
**kwargs: passed to subprocess.Popen.
Returns:
A tuple of newly created subprocess, log file descriptor and log file
path.
"""
kwargs['detached'] = True
file_descriptor, file_path = self.get_new_tmp_file()
sp, _ = self.start_subprocess(None, file_descriptor, file_descriptor, *args,
**kwargs)
logging.info('Started command %s, pid = %d', args, sp.pid)
return sp, file_descriptor, file_path
def start_subprocess(self, stdin, stdout, stderr, *args, **kwargs):
"""Executes a subprocess using subprocess.Popen.
Args:
stdin: A file stream, subprocess.PIPE, or None to read stdin from.
stdout: A file stream or subprocess.PIPE to redirect stdout.
stderr: A file stream or subprocess.PIPE to redirect stderr.
*args: passed to subprocess.Popen
**kwargs: passed to subprocess.Popen; optionally, quiet=True can be passed
as a kwarg, in which case stdout/stderr from the subprocess will not be
written to file. This kwarg will not get passed to subprocess.Popen.
text: If stdout and stderr should be treated as text, encoded in
the default encoding for the platform.
Returns:
Tuple of (subprocess, boolean indicating if subprocess output needs to be
saved.)
Raises:
ExecutorInvalidArgsError: if |args| is not a single command list.
ExecutorInvalidKeywordArgsError: if stdin, stdout, stderr in |kwargs|.
"""
for kwarg in ['stdin', 'stdout', 'stderr']:
if kwarg in kwargs:
raise ExecutorInvalidKeywordArgsError(
'{} cannot be a kwarg: kwargs={}'.format(kwarg, kwargs))
if len(args) != 1 or not isinstance(args[0], list):
raise ExecutorInvalidArgsError(
'Expected a tuple with a single list element, found args={}'.format(
args))
if 'cwd' in kwargs:
if kwargs['cwd'] is None:
cwd_message = 'with cwd=None, using {}'.format(os.path.abspath('.'))
else:
cwd_message = 'with specified cwd of "{}"'.format(kwargs['cwd'])
else:
cwd_message = 'with default cwd of "{}"'.format(os.path.abspath('.'))
text = kwargs.get('text', True)
quiet = not text
if 'quiet' in kwargs:
quiet = kwargs['quiet']
del kwargs['quiet']
logging.info('[%s] Running command: "%s" %s, with env: %s',
time.strftime('%Y-%m-%d %H:%M:%S %Z'),
' '.join(map(str, args[0])), cwd_message,
kwargs.get('env', {}))
if not text and not quiet:
raise ValueError('quiet and text cannot both be False')
# Add any extra args from 'env' to a copy of os.environ
extra_env = kwargs.pop('env', {})
env = os.environ.copy()
if extra_env:
env.update(extra_env)
if 'TMPDIR' not in env:
env['TMPDIR'] = self.get_tmp_dir()
sp = subprocess42.Popen(
stdin=stdin,
stdout=stdout,
stderr=stderr,
env=env,
text=text,
*args,
**kwargs)
return (sp, quiet)
def wait_for_subprocess_completion(self, sp, quiet):
"""Waits till subprocess is completed.
Args:
sp: A file stream or subprocess.PIPE to redirect stdout.
quiet: If True stdout/stderr from the subprocess will not be written to
file/console.
Returns:
Tuple of strings (stdout, stderr)
"""
stdout_array = []
stderr_array = []
for pipe, data in sp.yield_any():
if pipe == 'stdout':
if not quiet:
sys.stdout.write(data)
sys.stdout.flush()
stdout_array.append(data)
else:
if not quiet:
sys.stderr.write(data)
sys.stderr.flush()
stderr_array.append(data)
return ''.join(stdout_array), ''.join(stderr_array)
def print_command(command):
length = sys.__stdout__.write(json.dumps(command) + '\n')
sys.__stdout__.flush()
return length
class JsonIOWrapper(io.TextIOBase):
def __init__(self, channel):
super().__init__()
self._channel = channel
self._line = ''
def flush(self) -> None:
if self._line:
self._write_line()
def write(self, s: str) -> int:
self._line += s
if self._line.endswith('\n') or len(self._line) > 1000:
return self._write_line()
return 0
def _write_line(self) -> int:
log_command = {
'channel': self._channel,
'text': self._line,
}
self._line = ''
if _RUNNING_STEP_NAME:
log_command['step_name'] = _RUNNING_STEP_NAME
return print_command({
'log': log_command,
})
@dataclasses.dataclass(frozen=True)
class FileCommentRange:
start_line: int
start_character: int
end_line: int
end_character: int
@dataclasses.dataclass(frozen=True)
class FileComment:
file_path: str
location: Union[int, FileCommentRange, None]
message: str
class Annotator(object):
"""Prints annotation commands to be interpreted by the buildbot master."""
def __init__(self):
self._output_properties = {}
self._messages = []
self._file_comments = []
@property
def output_properties(self):
return self._output_properties
@property
def message(self):
return truncate('\n\n'.join(self._messages), 10000)
@property
def file_comments(self):
return self._file_comments[:100]
def run_step(self, step):
"""Runs a build step, printing annotations as necessary.
Arguments:
step: BaseStep instance to be executed.
Returns:
SUCCESS if the step runs to completion, FAILURE otherwise.
"""
print_command({
'start_step': {
'step_name': step.get_name(),
},
})
return_value = SUCCESS
result = False
try:
timeout_sec = step.get_timeout()
with signal_utils.sigterm_handler():
with timeout.timeout(timeout_secs=timeout_sec):
result = step.run()
assert isinstance(result, bool)
except timeout.TimeoutException:
result = step.result_on_timeout()
logging.error(
'Error: Step timed out. The current step timeout '
'threshold is %s seconds', timeout_sec)
step.kill_all_blocking_subprocesses()
except signal_utils.SigtermException:
logging.error('Error: Received SIGTERM. If this was run by Buildbot, '
'that means a step timed out because no logs was '
'produced for a specified period of time. We should stop '
'and try to clean up everything before Buildbot sends '
'a SIGKILL.')
step.set_halt_on_failure()
step.kill_all_blocking_subprocesses()
except Exception: # pylint: disable=broad-except
logging.exception('Step %s finished with an exception.', step.get_name())
raise
finally:
if not result:
return_value = FAILURE
step.kill_all_non_blocking_subprocesses()
print_command({
'finish_step': {
'step_name': step.get_name(),
'outcome': 'FAILED' if return_value == FAILURE else 'PASSED'
},
})
return return_value
def set_build_property(self, name, value):
"""Communicates a new build property to the buildbot master.
Args:
name: property name (must be a string)
value: property value (must be JSON-serializable)
"""
self._output_properties[name] = value
def add_message(self, message):
self._messages.append(message)
def add_comments(self, comments):
for filename, file_comments in comments.items():
if len(filename) > 200:
logging.warning('Skipping file comment because path is too long')
else:
assert isinstance(file_comments, list)
for comment in file_comments:
if 'line' in comment:
location = comment['line']
assert isinstance(location, int) and location > 0
elif 'range' in comment:
range_data = comment['range']
fields = [
('start_line', 1),
('start_character', 0),
('end_line', 1),
('end_character', 0)]
for field, minimum_value in fields:
assert field in range_data
assert isinstance(range_data[field], int)
assert range_data[field] >= minimum_value
location = FileCommentRange(
start_line=range_data['start_line'],
start_character=range_data['start_character'],
end_line=range_data['end_line'],
end_character=range_data['end_character'],
)
else:
location = None
self._file_comments.append(FileComment(
file_path=filename,
location=location,
message=truncate(comment['message'], 1000),
))
def truncate(s, max_length) -> str:
if len(s) > max_length:
return s[:(max_length - len(_TRUNCATED_MESSAGE))] + _TRUNCATED_MESSAGE
return s
def postmortem(type_, value, tb):
"""A sys.excepthook for handling exceptions with pdb."""
traceback.print_exception(type_, value, tb)
pdb.pm()
def load_recipe_properties(parsed_args) -> Properties:
"""Loads recipe properties from file or from params JSON."""
if parsed_args.properties_file:
filename = parsed_args.properties_file
properties_file = sys.stdin if filename == '-' else open(filename)
properties = json.loads(properties_file.read())
assert isinstance(properties, dict)
elif parsed_args.properties:
properties = json.loads(parsed_args.properties)
assert isinstance(properties, dict)
else:
raise RecipeRunnerError('properties are required')
return properties
def amend_recipe_properties(properties):
"""Set defaults and convert special values in recipe properties."""
properties = update_recipe_properties_from_environment(properties)
manifest_url = properties.get('manifest_url')
if manifest_url:
properties['repo_alias'] = repo_utils.manifest_alias(manifest_url)
return properties
def get_branch_root(properties) -> str:
"""Returns workspace_checkout_path from the properties if it exists."""
branch_root = properties.get('workspace_checkout_path')
if not branch_root:
raise ValueError('workspace_checkout_path is a required recipe parameter.')
if not os.path.exists(branch_root):
logging.info('Creating directory %s', branch_root)
os.makedirs(branch_root)
return branch_root
def write_result_file(message, file_comments, output_properties):
result_file = pathlib.Path('/workspace/output/result.json')
result_file.write_text(json.dumps({
'message': message,
'file_comments': [convert_file_comment(c) for c in file_comments],
'output_properties': output_properties,
}))
def convert_file_comment(file_comment):
comment = {
'file_path': file_comment.file_path,
'message': file_comment.message,
}
if isinstance(file_comment.location, int):
comment['line'] = file_comment.location
elif isinstance(file_comment.location, FileCommentRange):
comment['range'] = dataclasses.asdict(file_comment.location)
return comment
def get_build_system(austin_build_system):
return 'cq' if austin_build_system == 'presubmit' else 'catabuilder'
def get_gob_host_id(host_url: str) -> str:
"""Returns the ID of the GoB host. Derived from host_url."""
hostname = get_hostname(host_url)
if hostname:
parts = hostname.split('.')
if len(parts) == 3 and hostname.endswith('.googlesource.com'):
return parts[0]
if len(parts) == 5 and hostname.endswith('.git.corp.google.com'):
return parts[0]
raise ValueError('Unable to extract GoB host ID from host URL.')
def get_hostname(host_url: str) -> str:
return urllib.parse.urlparse(host_url).hostname
def get_depends_on(patchset_data):
return {
'change_number': int(patchset_data['change_number']),
'gob_host': get_gob_host_id(patchset_data['host_url']),
'patchset_number': int(patchset_data['patch_number']),
'project': patchset_data['project'],
'branch': patchset_data['target_branch'],
}
def get_patchset_properties():
patchset_data = json.loads(os.environ['AUSTIN_PATCHSETS'])
if not patchset_data:
return {}
main_patch = patchset_data[-1]
return {
'depends_on': [get_depends_on(p) for p in patchset_data[:-1]],
'gerrit': get_gob_host_id(main_patch['host_url']),
'issue': int(main_patch['change_number']),
'patch_project': main_patch['project'],
'patchset': int(main_patch['patch_number']),
'patch_branch': main_patch['target_branch'],
}
def update_recipe_properties_from_environment(recipe_properties):
return {
**recipe_properties,
'artifacts_dir': os.environ['AUSTIN_OUTPUT_ARTIFACT_PATH'],
'basil_build_id': os.environ['AUSTIN_BUILD_ID'],
'build_env': os.environ['AUSTIN_BUILD_ENVIRONMENT'],
'build_system': get_build_system(os.environ['AUSTIN_BUILD_SYSTEM']),
'buildset': os.environ['AUSTIN_BUILDSET_ID'],
'container_push_url': os.environ['AUSTIN_CONTAINER_PUSH_URL'],
'manifest_branch': os.environ['AUSTIN_MANIFEST_BRANCH'],
'manifest_url': os.environ['AUSTIN_MANIFEST_GIT_REPO_URL'],
'workspace_checkout_path': os.environ['AUSTIN_CHECKOUT_PATH'],
'rbe_instance': os.environ['AUSTIN_RBE_INSTANCE'],
**get_patchset_properties(),
}
def main(args):
"""Gets the recipe name and properties and runs an annotated run."""
print_command({
'enable_steps': {},
})
with contextlib.redirect_stdout(JsonIOWrapper('STDOUT')):
with contextlib.redirect_stderr(JsonIOWrapper('STDERR')):
logging.getLogger().setLevel(level=logging.INFO)
try:
return main_logged(args)
except BaseException:
logging.exception('An unexpected error occurred')
return 1
def main_logged(args):
# Increase NOFILE soft limit to allow bigger builds, see b/140431372
_, hard_limit = resource.getrlimit(resource.RLIMIT_NOFILE)
resource.setrlimit(resource.RLIMIT_NOFILE,
(min(hard_limit, _RLIMIT_NOFILE_SOFT), hard_limit))
parser = argparse.ArgumentParser(usage=USAGE)
parser.add_argument('recipe')
parser.add_argument(
'--pdb',
action='store_true',
help='Drop into a pdb session on unhandled exception')
parser.add_argument('--properties-file')
parser.add_argument('--properties')
parsed_args = parser.parse_args(args)
if parsed_args.pdb:
sys.excepthook = postmortem
properties = load_recipe_properties(parsed_args)
logging.info(
'Properties: %s', json.dumps(properties, indent=2, sort_keys=True))
properties = amend_recipe_properties(properties)
logging.info(
'Amended properties: %s',
json.dumps(properties, indent=2, sort_keys=True),
)
branch_root = get_branch_root(properties)
return run_recipe(
recipe=parsed_args.recipe,
properties=properties,
eureka_root=branch_root,
use_debugger=parsed_args.pdb)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))