-import optparse
import logging
import os
import os.path
-import pwd
-import hashlib
-import errno
-import time
+import itertools
-import wizard
-from wizard import deploy, util, shell, sset, command
-from wizard.command import migrate
+from wizard import deploy, report, shell, sset, command
def main(argv, baton):
options, args = parse_args(argv, baton)
app = args[0]
base_args = calculate_base_args(options)
- sh = make_shell(options)
- seen = make_serialized_set(options)
+ sh = shell.ParallelShell.make(options.no_parallelize, options.max_processes)
+ command.create_logdir(options.log_dir)
+ seen = sset.make(options.seen)
is_root = not os.getuid()
- warnings_log, errors_log = open_aggregate_logs(options)
+ runtime = report.make_fresh(options.log_dir, "success", "warnings", "errors")
# loop stuff
errors = {}
i = 0
- # [] needed to workaround subtle behavior of frozenset("")
- for d in deploy.parse_install_lines([app], options.versions_path):
+ deploys = deploy.parse_install_lines(app, options.versions_path, user=options.user)
+ requested_deploys = itertools.islice(deploys, options.limit)
+ for i, d in enumerate(requested_deploys, 1):
# check if we want to punt due to --limit
- i += 1
- if options.limit and i > options.limit:
- break
if d.location in seen:
continue
- if is_root and not security_check_homedir(d):
+ if is_root and not command.security_check_homedir(d.location):
continue
+ logging.info("Processing %s" % d.location)
child_args = list(base_args)
# calculate the log file, if a log dir was specified
if options.log_dir:
- log_file = os.path.join(options.log_dir, calculate_log_name(i, d.location))
+ log_file = command.calculate_log_name(options.log_dir, i)
child_args.append("--log-file=" + log_file)
# actual meat
def make_on_pair(d, i):
# we need to make another stack frame so that d and i get specific bindings.
def on_success(stdout, stderr):
if stderr:
- warnings_log.write("%s\n" % d.location)
logging.warning("Warnings [%04d] %s:\n%s" % (i, d.location, stderr))
- seen.add(d.location)
+ runtime.write("warnings", i, d.location)
+ runtime.write("success", i, d.location)
def on_error(e):
if e.name == "wizard.command.migrate.AlreadyMigratedError" or \
e.name == "AlreadyMigratedError":
- seen.add(d.location)
logging.info("Skipped already migrated %s" % d.location)
else:
- name = e.name
- if name not in errors: errors[name] = []
- errors[name].append(d)
- logging.error("%s in [%04d] %s" % (name, i, d.location))
- errors_log.write("%s\n" % d.location)
+ errors.setdefault(e.name, []).append(d)
+ logging.error("%s in [%04d] %s", e.name, i, d.location)
+ runtime.write("errors", i, d.location)
return (on_success, on_error)
on_success, on_error = make_on_pair(d, i)
sh.call("wizard", "migrate", d.location, *child_args,
autoinstalls for a particular application found by parallel-find,
but with advanced reporting.
-When doing an actual run, it is recommended to use --seen to
-be able to resume gracefully (without it, mass-migrate must
-stat every install to find out if it migrated it yet).
-
This command is intended to be run as root on a server with
-the scripts AFS patch. You may run it as an unpriviledged
-user for testing purposes, but then you MUST NOT run this on
-untrusted repositories."""
+the scripts AFS patch."""
parser = command.WizardOptionParser(usage)
baton.push(parser, "log_dir")
- parser.add_option("--seen", dest="seen",
- default=None, help="File to read/write paths of already processed installs."
- "These will be skipped.")
- parser.add_option("--no-parallelize", dest="no_parallelize", action="store_true",
- default=False, help="Turn off parallelization")
- parser.add_option("--dry-run", dest="dry_run", action="store_true",
- default=False, help="Print commands that would be run. Implies --no-parallelize")
- parser.add_option("--max", dest="max",
- default=10, help="Maximum subprocesses to run concurrently")
+ baton.push(parser, "seen")
+ baton.push(parser, "no_parallelize")
+ baton.push(parser, "dry_run")
+ baton.push(parser, "max_processes")
parser.add_option("--force", dest="force", action="store_true",
default=False, help="Force migrations to occur even if .scripts or .git exists.")
- parser.add_option("--limit", dest="limit", type="int",
- default=0, help="Limit the number of autoinstalls to look at.")
+ baton.push(parser ,"limit")
baton.push(parser, "versions_path")
baton.push(parser, "srv_path")
+ baton.push(parser, "user")
options, args, = parser.parse_all(argv)
if len(args) > 1:
parser.error("too many arguments")
elif not args:
parser.error("must specify application to migrate")
- if options.dry_run:
- options.no_parallelize = True
return options, args
-def open_aggregate_logs(options):
- warnings_logname = "/tmp/wizard-migrate-warnings.log"
- errors_logname = "/tmp/wizard-migrate-errors.log"
- if options.log_dir:
- # must not be on AFS, since subprocesses won't be
- # able to write to the logfiles do the to the AFS patch.
- try:
- os.mkdir(options.log_dir)
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
- if options.force:
- options.log_dir = os.path.join(options.log_dir, str(int(time.time())))
- os.mkdir(options.log_dir) # if fails, be fatal
- os.chmod(options.log_dir, 0o777)
- warnings_logname = os.path.join(options.log_dir, "warnings.log")
- errors_logname = os.path.join(options.log_dir, "errors.log")
- warnings_log = open(warnings_logname, "a")
- errors_log = open(errors_logname, "a")
- return warnings_log, errors_log
-
-def security_check_homedir(d):
- uid = util.get_dir_uid(d.location)
- real = os.path.realpath(d.location)
- try:
- if not real.startswith(pwd.getpwuid(uid).pw_dir + "/"):
- logging.error("Security check failed, owner of deployment and"
- "owner of home directory mismatch for %s" % d.location)
- return False
- except KeyError:
- logging.error("Security check failed, could not look up"
- "owner of %s (uid %d)" % (d.location, uid))
- return False
- return True
-
def calculate_base_args(options):
- return command.makeBaseArgs(options, dry_run="--dry-run", srv_path="--srv-path",
+ return command.make_base_args(options, dry_run="--dry-run", srv_path="--srv-path",
force="--force")
-def calculate_log_name(i, dir):
- return "%04d" % i + dir.replace('/', '-') + ".log"
-
-def make_shell(options):
- if options.no_parallelize:
- return shell.DummyParallelShell()
- else:
- return shell.ParallelShell(max=int(options.max))
-
-def make_serialized_set(options):
- if options.seen:
- return sset.SerializedSet(options.seen)
- else:
- return sset.DummySerializedSet()