- Better error message if daemon/scripts-security-upd
is not on scripts-security-upd list
-- Fix retarded logging mechanism
- - This is interesting because the non-retarded way would be
- to have each subprocess send data through stdout, but
- this runs the risk of filling up the tubes. Maybe just
- create a log directory and give each process different
- files.
-
- The great initial deploy:
- Turn on mediawiki new autoinstaller
- Migrate all mediawiki installs
baton.add("--srv-path", dest="srv_path",
default=getenvpath("WIZARD_SRV_PATH") or "/afs/athena.mit.edu/contrib/scripts/git/autoinstalls",
help="Location of autoinstall Git repositories, such that $REPO_PATH/$APP.git is a repository (for development work). Environment variable is WIZARD_SRV_PATH.")
+ baton.add("--log-dir", dest="log_dir",
+ default=getenvpath("WIZARD_LOG_DIR") or None,
+ help="Log files for Wizard children processes are placed here.")
try:
command_name = args[0]
except IndexError:
app = mediawiki.Application('mediawiki')
d = deploy.Deployment(os.path.join(os.path.dirname(os.path.abspath(__file__)), "mediawiki"))
result = app.extract(d)
- assert result['WIZARD_PROXYKEY'] == '"wizard_proxykey"'
+ assert result['WIZARD_SECRETKEY'] == '"wizard_proxykey"'
assert result['WIZARD_SCRIPTPATH'] == '"/wizard_scriptpath"'
assert result['WIZARD_SESSIONNAME'] == 'wizard_SID'
assert result['WIZARD_DBSERVER'] == '"wizard_dbserver"'
import logging
+import traceback
import os
import sys
import optparse
def makeLogger(options, numeric_args):
global logging_setup
if logging_setup: return logging.getLogger()
- context = " ".join(numeric_args)
logger = logging.getLogger()
logger.setLevel(logging.INFO)
stderr = logging.StreamHandler(sys.stderr)
- stderr.setFormatter(logging.Formatter(" " * int(options.indent) + '%(levelname)s: %(message)s'))
- dateFormat = "%H:%M:%S"
- if options.context:
- logformatter = logging.Formatter("%(asctime)s %(levelname)s -- " + context + ": %(message)s", dateFormat)
- else:
- logformatter = logging.Formatter("%(asctime)s %(levelname)s: %(message)s", dateFormat)
+ stderr.setFormatter(logging.Formatter('%(levelname)s: %(message)s'))
if not options.quiet: logger.addHandler(stderr)
else: logger.addHandler(NullLogHandler()) # prevent default
if options.log_file:
file = logging.FileHandler(options.log_file)
+ logformatter = logging.Formatter("%(asctime)s %(levelname)s: %(message)s", "%H:%M:%S")
file.setFormatter(logformatter)
logger.addHandler(file)
- if options.log_file_chmod:
- os.chmod(options.log_file, int(options.log_file_chmod, 8))
if options.debug:
logger.setLevel(logging.DEBUG)
else:
stderr.setLevel(logging.INFO)
if options.log_file:
file.setLevel(logging.INFO)
+ old_excepthook = sys.excepthook
+ def our_excepthook(type, value, tb):
+ logging.info("".join(traceback.format_exception(type,value,tb)))
+ old_excepthook(type, value, traceback)
+ sys.excepthook = our_excepthook
logging_setup = True
return logger
"""Takes parsed options, and breaks them back into a command
line string that we can pass into a subcommand"""
args = []
- grab["log_file"]= "--log-file"
grab["debug"] = "--debug"
grab["verbose"] = "--verbose"
- grab["indent"] = "--indent"
grab["quiet"] = "--quiet"
#grab["log_db"] = "--log-db"
for k,flag in grab.items():
value = getattr(options, k)
- if not value and k != "indent": continue
+ if not value: continue
args.append(flag)
if type(value) is not bool:
- if k == "indent":
- value += 4
args.append(str(value))
- args.append("--context") # always have context for a subcommand
return args
class NullLogHandler(logging.Handler):
default=boolish(os.getenv("WIZARD_QUIET")), help="Turns off output to stdout. Environment variable is WIZARD_QUIET")
group.add_option("--log-file", dest="log_file", metavar="FILE",
default=None, help="Logs verbose output to file")
- group.add_option("--log-file-chmod", dest="log_file_chmod", metavar="CHMOD",
- default=None, help="Chmod the log file after opening. Number is octal. You must chmod the log file 666 and place the file in /tmp if subprocesses are running as different users.")
- group.add_option("--indent", dest="indent", metavar="WIDTH",
- default=0, help="Indents stdout, useful for nested calls")
- group.add_option("--context", dest="context", action="store_true",
- default=False, help="Adds context to logs, useful for parallel processing")
self.add_option_group(group)
options, numeric_args = self.parse_args(argv)
makeLogger(options, numeric_args)
import os
import os.path
import pwd
+import hashlib
+import errno
+import time
import wizard
from wizard import deploy, util, shell, sset, command
sh = make_shell(options)
seen = make_serialized_set(options)
my_uid = os.getuid() # to see if we have root
+ if options.log_dir:
+ # must not be on AFS
+ try:
+ os.mkdir(options.log_dir)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+ options.log_dir = os.path.join(options.log_dir, str(int(time.time())))
+ os.mkdir(options.log_dir) # if fails, be fatal
+ os.chmod(options.log_dir, 0o777)
# loop stuff
errors = {}
for line in deploy.get_install_lines(options.versions_path):
+ child_args = list(base_args)
# validate and filter the deployments
try:
d = deploy.Deployment.parse(line)
if not real.startswith(pwd.getpwuid(uid).pw_dir + "/"):
logging.error("Security check failed, owner of deployment and owner of home directory mismatch for %s" % d.location)
continue
+ # calculate the log file, if a log dir was specified
+ if options.log_dir:
+ log_file = os.path.join(options.log_dir, shorten(d.location) + ".log")
+ child_args.append("--log-file=" + log_file)
# actual meat
def make_on_pair(d):
def on_success(stdout, stderr):
return (on_success, on_error)
on_success, on_error = make_on_pair(d)
sh.wait() # wait for a parallel processing slot to be available
- sh.call("wizard", "migrate", d.location, *base_args,
+ sh.call("wizard", "migrate", d.location, *child_args,
on_success=on_success, on_error=on_error)
sh.join()
for name, deploys in errors.items():
default=None, help="File to read/write paths of already processed installs. These will be skipped.")
baton.push(parser, "versions_path")
baton.push(parser, "srv_path")
+ baton.push(parser, "log_dir")
options, args, = parser.parse_all(argv)
if len(args) > 1:
parser.error("too many arguments")
base_args.append("--quiet")
return base_args
+def shorten(dir):
+ hash = hashlib.sha1(dir).hexdigest()[0:7]
+ return hash + dir.replace('/', '~')
+
def make_shell(options):
if options.no_parallelize:
sh = shell.DummyParallelShell()
if len(self.running) < self.max: return
# now, wait for open pids.
try:
- pid, status = os.waitpid(-1, 0)
+ self.reap(*os.waitpid(-1, 0))
except OSError as e:
if e.errno == errno.ECHILD: return
- raise e
+ raise
+ def join(self):
+ """Waits for all of our subprocesses to terminate."""
+ try:
+ while True:
+ self.reap(*os.waitpid(-1, 0))
+ except OSError as e:
+ if e.errno == errno.ECHILD: return
+ raise
+ def reap(self, pid, status):
+ """Reaps a process."""
# ooh, zombie process. reap it
proc, args, python, on_success, on_error = self.running.pop(pid)
# XXX: this is slightly dangerous; should actually use
on_error(eclass(proc.returncode, args, stdout, stderr))
return
on_success(stdout, stderr)
- def join(self):
- """Waits for all of our subprocesses to terminate."""
- try:
- while os.waitpid(-1, 0):
- pass
- except OSError as e:
- if e.errno == errno.ECHILD: return
- raise e
+
class DummyParallelShell(ParallelShell):
"""Same API as :class:`ParallelShell`, but doesn't actually
from dateutil.tz import tzoffset
from datetime import datetime
-from wizard import deploy, log
+from wizard import deploy, old_log
def getTestFile(file):
return os.path.join(os.path.dirname(os.path.abspath(__file__)), file)
def test_deploy_log_load():
# this also is test_deploy_source_parse() and test_application_version_parse()
- dlog = log.DeployLog.load(deploy.Deployment(getTestFile("log_test")))
+ dlog = old_log.DeployLog.load(deploy.Deployment(getTestFile("old_log_test")))
assert dlog[0].datetime == datetime(2006, 3, 23, 10, 7, 40, tzinfo=tzoffset(None, -5 * 60 * 60))
assert dlog[0].user == "unknown"
- assert isinstance(dlog[0].source, log.TarballInstall)
+ assert isinstance(dlog[0].source, old_log.TarballInstall)
assert dlog[0].source.location == "/afs/athena.mit.edu/contrib/scripts/deploy/mediawiki.tar.gz"
assert dlog[0].source.isDev == False
assert dlog[0].version == deploy.applications()["mediawiki"].makeVersion('1.5.6')
assert dlog[1].datetime == datetime(2007, 10, 17, 3, 38, 2, tzinfo=tzoffset(None, -4 * 60 * 60))
assert dlog[1].user == "quentin@QUICHE-LORRAINE.MIT.EDU"
- assert isinstance(dlog[1].source, log.OldUpdate)
+ assert isinstance(dlog[1].source, old_log.OldUpdate)
assert dlog[1].source.isDev == True
assert dlog[1].version == deploy.applications()["mediawiki"].makeVersion('1.5.6')
fully qualified exception name.
"""
lines = output.split("\n")
- for line in lines[1:]: # skip the "traceback" line
+ cue = False
+ for line in lines[1:]:
line = line.rstrip()
- if line[0] == ' ': continue
- if line[-1] == ":":
- return line[:-1]
- else:
- return line
+ if not line: continue
+ if line[0] == ' ':
+ cue = True
+ continue
+ if cue:
+ cue = False
+ if line[-1] == ":":
+ result = line[:-1]
+ else:
+ result = line
+ return result
def get_dir_uid(dir):
"""Finds the uid of the person who owns this directory."""