]> scripts.mit.edu Git - wizard.git/blobdiff - wizard/command/mass_migrate.py
Fix bug where php.ini not being rewritten for MediaWiki.
[wizard.git] / wizard / command / mass_migrate.py
index cbacff7b11ba62ca453c5f94c3eb13ec9329367d..c4f4389eca5c9551f47ef2b7c56cc04e2f30de91 100644 (file)
@@ -1,58 +1,56 @@
-import optparse
 import logging
 import os
 import os.path
-import pwd
+import itertools
 
-import wizard
-from wizard import deploy, util, shell, sset, command
-from wizard.command import migrate
+from wizard import deploy, report, shell, sset, command
 
 def main(argv, baton):
     options, args = parse_args(argv, baton)
     app = args[0]
     base_args = calculate_base_args(options)
-    sh = make_shell(options)
-    seen = make_serialized_set(options)
-    my_uid = os.getuid() # to see if we have root
+    sh = shell.ParallelShell.make(options.no_parallelize, options.max_processes)
+    command.create_logdir(options.log_dir)
+    seen = sset.make(options.seen)
+    is_root = not os.getuid()
+    runtime = report.make_fresh(options.log_dir, "success", "warnings", "errors")
     # loop stuff
     errors = {}
-    uid = None
-    for line in deploy.get_install_lines(options.versions_path):
-        # validate and filter the deployments
-        try:
-            d = deploy.Deployment.parse(line)
-        except deploy.DeploymentParseError, deploy.NoSuchApplication:
-            continue
-        name = d.application.name
-        if name != app: continue
+    i = 0
+    deploys = deploy.parse_install_lines(app, options.versions_path, user=options.user)
+    requested_deploys = itertools.islice(deploys, options.limit)
+    for i, d in enumerate(requested_deploys, 1):
+        # check if we want to punt due to --limit
         if d.location in seen:
             continue
-        # security check: see if the user's directory is the prefix of what
-        if not my_uid:
-            uid = util.get_dir_uid(d.location)
-            real = os.path.realpath(d.location)
-            if not real.startswith(pwd.getpwuid(uid).pw_dir + "/"):
-                logging.error("Security check failed, owner of deployment and owner of home directory mismatch for %s" % d.location)
-                continue
+        if is_root and not command.security_check_homedir(d.location):
+            continue
+        logging.info("Processing %s" % d.location)
+        child_args = list(base_args)
+        # calculate the log file, if a log dir was specified
+        if options.log_dir:
+            log_file = command.calculate_log_name(options.log_dir, i)
+            child_args.append("--log-file=" + log_file)
         # actual meat
-        def make_on_pair(d):
+        def make_on_pair(d, i):
+            # we need to make another stack frame so that d and i get specific bindings.
             def on_success(stdout, stderr):
-                seen.add(d.location)
+                if stderr:
+                    logging.warning("Warnings [%04d] %s:\n%s" % (i, d.location, stderr))
+                    runtime.write("warnings", i, d.location)
+                runtime.write("success", i, d.location)
             def on_error(e):
-                if e.name == "wizard.command.migrate.AlreadyMigratedError":
-                    seen.add(d.location)
+                if e.name == "wizard.command.migrate.AlreadyMigratedError" or \
+                   e.name == "AlreadyMigratedError":
                     logging.info("Skipped already migrated %s" % d.location)
                 else:
-                    name = e.name
-                    if name not in errors: errors[name] = []
-                    errors[name].append(d)
-                    logging.error("%s in %s" % (name, d.location))
+                    errors.setdefault(e.name, []).append(d)
+                    logging.error("%s in [%04d] %s", e.name, i, d.location)
+                    runtime.write("errors", i, d.location)
             return (on_success, on_error)
-        on_success, on_error = make_on_pair(d)
-        sh.wait() # wait for a parallel processing slot to be available
-        sh.callAsUser("wizard", "migrate", d.location, *base_args,
-                      uid=uid, on_success=on_success, on_error=on_error)
+        on_success, on_error = make_on_pair(d, i)
+        sh.call("wizard", "migrate", d.location, *child_args,
+                      on_success=on_success, on_error=on_error)
     sh.join()
     for name, deploys in errors.items():
         logging.warning("%s from %d installs" % (name, len(deploys)))
@@ -65,50 +63,28 @@ Essentially equivalent to running '%prog migrate' on all
 autoinstalls for a particular application found by parallel-find,
 but with advanced reporting.
 
-When doing an actual run, it is recommended to use --seen to
-be able to resume gracefully (without it, mass-migrate must
-stat every install to find out if it migrated it yet).
-
 This command is intended to be run as root on a server with
-the scripts AFS patch.  You may run it as an unpriviledged
-user for testing purposes, but then you MUST NOT run this on
-untrusted repositories."""
+the scripts AFS patch."""
     parser = command.WizardOptionParser(usage)
-    parser.add_option("--no-parallelize", dest="no_parallelize", action="store_true",
-            default=False, help="Turn off parallelization")
-    parser.add_option("--dry-run", dest="dry_run", action="store_true",
-            default=False, help="Print commands that would be run. Implies --no-parallelize")
-    parser.add_option("--max", dest="max",
-            default=10, help="Maximum subprocesses to run concurrently")
-    parser.add_option("--seen", dest="seen",
-            default=None, help="File to read/write paths of already processed installs. These will be skipped.")
+    baton.push(parser, "log_dir")
+    baton.push(parser, "seen")
+    baton.push(parser, "no_parallelize")
+    baton.push(parser, "dry_run")
+    baton.push(parser, "max_processes")
+    parser.add_option("--force", dest="force", action="store_true",
+            default=False, help="Force migrations to occur even if .wizard or .git exists.")
+    baton.push(parser ,"limit")
     baton.push(parser, "versions_path")
     baton.push(parser, "srv_path")
+    baton.push(parser, "user")
     options, args, = parser.parse_all(argv)
     if len(args) > 1:
         parser.error("too many arguments")
     elif not args:
         parser.error("must specify application to migrate")
-    if options.dry_run:
-        options.no_parallelize = True
     return options, args
 
 def calculate_base_args(options):
-    base_args = command.makeBaseArgs(options, dry_run="--dry-run", srv_path="--srv-path")
-    if not options.debug:
-        base_args.append("--quiet")
-    return base_args
-
-def make_shell(options):
-    if options.no_parallelize:
-        sh = shell.DummyParallelShell()
-    else:
-        sh = shell.ParallelShell(max=int(options.max))
-    return sh
+    return command.make_base_args(options, dry_run="--dry-run", srv_path="--srv-path",
+            force="--force")
 
-def make_serialized_set(options):
-    if options.seen:
-        seen = sset.SerializedSet(options.seen)
-    else:
-        seen = sset.DummySerializedSet()
-    return seen