- Distinguish from logging and reporting (so we can easily send mail
to users)
- - Certain classes of error will continually fail, so they should
- put in a different "seen" file which also skips them, unless
- we have some sort of gentle force. These are "soft ignores".
- - If we add soft ignores, you lose information from reports, so there needs
- to be some way to also have the soft ignore report a "cached error". This
- probably means augmenting the serialized set to be a serialized dict.
- Figure out a way of collecting blacklist data from .scripts/blacklisted
and aggregate it together
- Failed migrations should be wired to have wizard commands in them
output summary charts when I increase specificity
- Summary script should do something intelligent when distinguishing
between old-style and new-style installs
+ - Report code in wizard/command/__init__.py is ugly as sin
- Other stuff
- Don't use the scripts heuristics unless we're on scripts with the
import optparse
import errno
import pwd
+import shutil
import wizard
from wizard import util
# # XXX: update last symlink
os.chmod(log_dir, 0o777)
-def open_reports(log_dir, names=('warnings', 'errors')):
+class Report(object):
+ #: Set of indices that should be skipped
+ skip = None
+ def __init__(self, names, fobjs, skip):
+ self.skip = skip
+ for name, fobj in zip(names, fobjs):
+ setattr(self, name, fobj)
+
+def report_files(log_dir, names):
+ return [os.path.join(os.path.join(log_dir, "%s.txt" % x)) for x in names]
+
+def read_reports(log_dir, names):
"""
- Opens a number of reports files for auxiliary reporting. You can override what
- log files to generate using ``names``, which corresponds to the tuple
- of report files you will receive, i.e. the default returns a tuple
- ``(warnings.txt file object, errors.txt file object)``. Note that this will
- delete any information that was previously in the file (but those logfiles
- are backed up).
+ Reads a number of reports files. The return value is a :class:`Report`
+ object with attributes that are open file objects that correspond to ``names``.
+ """
+ return Report(names, [open(f, "r") for f in report_files(log_dir, names)], set())
+
+def open_reports(log_dir, names=('warnings', 'errors'), redo=False, append_names=()):
"""
- # must not be on AFS, since subprocesses won't be
- # able to write to the logfiles do the to the AFS patch.
- files = [os.path.join(os.path.join(log_dir, "%s.txt" % x)) for x in names]
+ Returns a :class:`Report` object configured appropriately for the
+ parameters passed. This object has attributes names + append_names which
+ contain file objects opened as "w". ``names`` report files are cleared unconditionally
+ when they are opened (i.e. are not preserved from run to run.) ``append_names``
+ report files are not cleared unless ``redo`` is True, and persist over
+ runs: assuming the convention that [0001] is the index of the deployment,
+ the ``skip`` attribute on the returned report object contains indexes that
+ should be skipped.
+ """
+ skip = set()
+ if not redo:
+ rr = read_reports(log_dir, append_names)
+ def build_set(skip, fobj):
+ skip |= set(int(l[1:5]) for l in fobj.read().splitlines())
+ fobj.close()
+ for name in append_names:
+ build_set(skip, getattr(rr, name))
+ else:
+ names += append_names
+ append_names = ()
+ files = report_files(log_dir, names)
+ append_files = report_files(log_dir, append_names)
+ # backup old reports
old_reports = os.path.join(log_dir, "old-reports")
rundir = os.path.join(old_reports, "run")
if not os.path.exists(old_reports):
for f in files:
if os.path.exists(f):
os.rename(f, rundir)
- return (open(f, "w") for f in files)
+ for f in append_files:
+ if os.path.exists(f):
+ shutil.copy(f, rundir)
+ return Report(names + append_names, [open(f, "w") for f in files] + [open(f, "a") for f in append_files], skip)
class NullLogHandler(logging.Handler):
"""Log handler that doesn't do anything"""
command.create_logdir(options.log_dir)
seen = sset.make(options.seen)
is_root = not os.getuid()
- warnings_report, errors_report = command.open_reports(options.log_dir)
+ report = command.open_reports(options.log_dir)
# loop stuff
errors = {}
i = 0
# we need to make another stack frame so that d and i get specific bindings.
def on_success(stdout, stderr):
if stderr:
- warnings_report.write("%s\n" % d.location)
+ report.warnings.write("%s\n" % d.location)
logging.warning("Warnings [%04d] %s:\n%s" % (i, d.location, stderr))
seen.add(d.location)
def on_error(e):
if name not in errors: errors[name] = []
errors[name].append(d)
logging.error("%s in [%04d] %s" % (name, i, d.location))
- errors_report.write("%s\n" % d.location)
+ report.errors.write("%s\n" % d.location)
return (on_success, on_error)
on_success, on_error = make_on_pair(d, i)
sh.call("wizard", "migrate", d.location, *child_args,
command.create_logdir(options.log_dir)
seen = sset.make(options.seen)
is_root = not os.getuid()
- lookup_report, warnings_report, errors_report, merge_report, verify_report = command.open_reports(options.log_dir, ('lookup', 'warnings', 'errors', 'merge', 'verify'))
+ report = command.open_reports(options.log_dir, ('lookup', 'warnings', 'errors'), options.redo, ('merge', 'verify'))
# loop stuff
errors = {}
i = 0
deploys = deploy.parse_install_lines(app, options.versions_path, user=options.user)
requested_deploys = itertools.islice(deploys, options.limit)
for i, d in enumerate(requested_deploys, 1):
- lookup_report.write("%04d %s\n" % (i, d.location))
+ report.lookup.write("%04d %s\n" % (i, d.location))
# check if we want to punt due to --limit
if d.location in seen:
continue
+ if i in report.skip:
+ continue
if is_root and not command.security_check_homedir(d.location):
continue
# XXX: we may be able to punt based on detected versions from d, which
# we need to make another stack frame so that d and i get specific bindings.
def on_success(stdout, stderr):
if stderr:
- warnings_report.write("[%04d] %s\n" % (i, d.location))
+ report.lookup.write("[%04d] %s\n" % (i, d.location))
logging.warning("[%04d] Warnings at [%s]:\n%s" % (i, d.location, stderr))
seen.add(d.location)
def on_error(e):
seen.add(d.location)
tmpdir = e.stdout.rstrip()
logging.warning("[%04d] Merge failed: resolve at [%s], source at [%s]" % (i, tmpdir, d.location))
- merge_report.write("[%04d] %s %s\n" % (i, tmpdir, d.location))
+ report.merge.write("[%04d] %s %s\n" % (i, tmpdir, d.location))
fails['merge'] += 1
else:
name = e.name
# This should actually be a warning, but
# it's a really common error
logging.info("[%04d] Could not verify application at %s" % (i, url))
- verify_report.write("[%04d] %s\n" % (i, url))
+ report.verify.write("[%04d] %s\n" % (i, url))
fails['verify'] += 1
else:
if name not in errors: errors[name] = []
errors[name].append(d)
msg = "[%04d] %s in %s" % (i, name, d.location)
logging.error(msg)
- errors_report.write(msg + "\n")
+ report.errors.write(msg + "\n")
return (on_success, on_error)
on_success, on_error = make_on_pair(d, i)
sh.call("wizard", "upgrade", d.location, *child_args,
baton.push(parser, "user")
parser.add_option("--force", dest="force", action="store_true",
default=False, help="Force running upgrade even if it's already at latest version.")
+ parser.add_option("--redo", dest="redo", action="store_true",
+ default=False, help="Redo failed upgrades; use this if you updated Wizard's code.")
options, args, = parser.parse_all(argv)
if len(args) > 1:
parser.error("too many arguments")