import logging
import sys
import os
+import errno
import wizard
-import wizard.util
+from wizard import util
wizard_bin = sys.argv[0]
"""
This is the path to the wizard executable as specified
-by the caller; it lets us recursively invoke wizard. Example::
-
- from wizard import shell
- sh = shell.Shell()
- sh.call(shell.wizard_bin, "list")
+by the caller; it lets us recursively invoke wizard.
"""
def is_python(args):
"""Detects whether or not an argument list invokes a Python program."""
- return args[0] == "python" or args[0] == wizard_bin
+ return args[0] == "python" or args[0] == "wizard"
+
+def drop_priviledges(dir, log_file):
+ """
+ Checks if we are running as root. If we are, attempt to drop
+ priviledges to the user who owns ``dir``, by re-calling
+ itself using sudo with exec, such that the new process subsumes our
+ current one. If ``log_file`` is passed, the file is chown'ed
+ to the user we are dropping priviledges to, so the subprocess
+ can write to it.
+ """
+ if os.getuid():
+ return
+ uid = util.get_dir_uid(dir)
+ if not uid:
+ return
+ args = []
+ for k,v in os.environ.items():
+ if k.startswith('WIZARD_') or k == "SSH_GSSAPI_NAME":
+ args.append("%s=%s" % (k,v))
+ args += sys.argv
+ logging.debug("Dropping priviledges")
+ if log_file: os.chown(log_file, uid, -1)
+ os.execlp('sudo', 'sudo', '-u', '#' + str(uid), *args)
class Shell(object):
"""
def call(self, *args, **kwargs):
"""
Performs a system call. The actual executable and options should
- be passed as arguments to this function. Several keyword arguments
+ be passed as arguments to this function. It will magically
+ ensure that 'wizard' as a command works. Several keyword arguments
are also supported:
:param python: explicitly marks the subprocess as Python or not Python
for improved error reporting. By default, we use
:func:`is_python` to autodetect this.
:param input: input to feed the subprocess on standard input.
- :returns: a tuple of strings ``(stdout, stderr)``
+ :param interactive: whether or not directly hook up all pipes
+ to the controlling terminal, to allow interaction with subprocess.
+ :param strip: if ``True``, instead of returning a tuple,
+ return the string stdout output of the command with trailing newlines
+ removed. This emulates the behavior of backticks and ``$()`` in Bash.
+ Prefer to use :meth:`eval` instead (you should only need to explicitly
+ specify this if you are using another wrapper around this function).
+ :param log: if True, we log the call as INFO, if False, we log the call
+ as DEBUG, otherwise, we detect based on ``strip``.
+ :param stdout:
+ :param stderr:
+ :param stdin: a file-type object that will be written to or read from as a pipe.
+ :returns: a tuple of strings ``(stdout, stderr)``, or a string ``stdout``
+ if ``strip`` is specified.
>>> sh = Shell()
>>> sh.call("echo", "Foobar")
('Foobar\\n', '')
-
- .. note::
-
- This function does not munge trailing whitespace. A common
- idiom for dealing with this is::
-
- sh.call("echo", "Foobar")[0].rstrip()
+ >>> sh.call("cat", input='Foobar')
+ ('Foobar', '')
"""
+ self._wait()
+ kwargs.setdefault("interactive", False)
+ kwargs.setdefault("strip", False)
kwargs.setdefault("python", None)
- logging.info("Running `" + ' '.join(args) + "`")
+ kwargs.setdefault("log", None)
+ kwargs.setdefault("stdout", subprocess.PIPE)
+ kwargs.setdefault("stdin", subprocess.PIPE)
+ kwargs.setdefault("stderr", subprocess.PIPE)
+ msg = "Running `" + ' '.join(args) + "`"
+ if kwargs["strip"] and not kwargs["log"] is True or kwargs["log"] is False:
+ logging.debug(msg)
+ else:
+ logging.info(msg)
if self.dry:
- return
+ if kwargs["strip"]:
+ return ''
+ return None, None
if kwargs["python"] is None and is_python(args):
kwargs["python"] = True
+ if args[0] == "wizard":
+ args = list(args)
+ args[0] = wizard_bin
+ kwargs.setdefault("input", None)
+ if kwargs["interactive"]:
+ stdout=sys.stdout
+ stdin=sys.stdin
+ stderr=sys.stderr
+ else:
+ stdout=kwargs["stdout"]
+ stdin=kwargs["stdin"]
+ stderr=kwargs["stderr"]
# XXX: There is a possible problem here where we can fill up
# the kernel buffer if we have 64KB of data. This shouldn't
# be a problem, and the fix for such case would be to write to
# ourself, and then setting up a
# SIGCHILD handler to write a single byte to the pipe to get
# us out of select() when a subprocess exits.
- proc = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
- if hasattr(self, "_async"):
- self._async(proc, args, **kwargs)
+ proc = subprocess.Popen(args, stdout=stdout, stderr=stderr, stdin=stdin)
+ if self._async(proc, args, **kwargs):
return proc
- kwargs.setdefault("input", None)
stdout, stderr = proc.communicate(kwargs["input"])
- self._log(stdout, stderr)
+ # can occur if we were doing interactive communication; i.e.
+ # we didn't pass in PIPE.
+ if stdout is None:
+ stdout = ""
+ if stderr is None:
+ stderr = ""
+ if not kwargs["interactive"]:
+ if kwargs["strip"]:
+ self._log(None, stderr)
+ else:
+ self._log(stdout, stderr)
if proc.returncode:
if kwargs["python"]: eclass = PythonCallError
else: eclass = CallError
raise eclass(proc.returncode, args, stdout, stderr)
+ if kwargs["strip"]:
+ return str(stdout).rstrip("\n")
return (stdout, stderr)
def _log(self, stdout, stderr):
"""Logs the standard output and standard input from a command."""
logging.debug("STDOUT:\n" + stdout)
if stderr:
logging.debug("STDERR:\n" + stderr)
+ def _wait(self):
+ pass
+ def _async(self, *args, **kwargs):
+ return False
def callAsUser(self, *args, **kwargs):
"""
Performs a system call as a different user. This is only possible
uid = kwargs.pop("uid", None)
kwargs.setdefault("python", is_python(args))
if not user and not uid: return self.call(*args, **kwargs)
- if wizard.util.get_operator_name():
+ if os.getenv("SSH_GSSAPI_NAME"):
# This might be generalized as "preserve some environment"
- args.insert(0, "SSH_GSSAPI_NAME=" + wizard.util.get_operator_name())
+ args = list(args)
+ args.insert(0, "SSH_GSSAPI_NAME=" + os.getenv("SSH_GSSAPI_NAME"))
if uid: return self.call("sudo", "-u", "#" + str(uid), *args, **kwargs)
if user: return self.call("sudo", "-u", user, *args, **kwargs)
def safeCall(self, *args, **kwargs):
return self.callAsUser(*args, **kwargs)
else:
return self.call(*args, **kwargs)
+ def eval(self, *args, **kwargs):
+ """
+ Evaluates a command and returns its output, with trailing newlines
+ stripped (like backticks in Bash). This is a convenience method for
+ calling :meth:`call` with ``strip``.
+
+ >>> sh = Shell()
+ >>> sh.eval("echo", "Foobar")
+ 'Foobar'
+ """
+ kwargs["strip"] = True
+ return self.call(*args, **kwargs)
class ParallelShell(Shell):
"""
with ``max`` subprocesses, and result in callback execution
when they finish.
- Before enqueueing a system call with :meth:`call` or :meth:`callAsUser`,
- you should wait for an open slot using :meth:`wait`; otherwise,
- ``max`` rate limiting will have no effect. For example::
-
- sh = ParallelShell()
- for command in commands_to_execute_in_parallel:
- sh.wait()
- sh.call(*command)
- sh.join()
-
.. method:: call(*args, **kwargs)
- Enqueues a system call for parallel processing. Keyword arguments
+ Enqueues a system call for parallel processing. If there are
+ no openings in the queue, this will block. Keyword arguments
are the same as :meth:`Shell.call` with the following additions:
:param on_success: Callback function for success (zero exit status).
processing. Keyword arguments are the same as
:meth:`Shell.callAsUser` with the additions of keyword
arguments from :meth:`call`.
+
+ .. method:: safeCall(*args, **kwargs)
+
+ Enqueues a "safe" call for parallel processing. Keyword
+ arguments are the same as :meth:`Shell.safeCall` with the
+ additions of keyword arguments from :meth:`call`.
+
+ .. method:: eval(*args, **kwargs)
+
+ No difference from :meth:`call`. Consider having a
+ non-parallel shell if the program you are shelling out
+ to is fast.
+
"""
def __init__(self, dry = False, max = 10):
super(ParallelShell, self).__init__(dry=dry)
self.running = {}
self.max = max # maximum of commands to run in parallel
- def _async(self, proc, args, python, on_success, on_error):
+ @staticmethod
+ def make(no_parallelize, max):
+ """Convenience method oriented towards command modules."""
+ if no_parallelize:
+ return DummyParallelShell()
+ else:
+ return ParallelShell(max=max)
+ def _async(self, proc, args, python, on_success, on_error, **kwargs):
"""
Gets handed a :class:`subprocess.Proc` object from our deferred
execution. See :meth:`Shell.call` source code for details.
"""
self.running[proc.pid] = (proc, args, python, on_success, on_error)
- def wait(self):
+ return True # so that the parent function returns
+ def _wait(self):
"""
- Blocking call that waits for an open subprocess slot. You should
- call this before enqueuing.
-
- .. note::
-
- This method may become unnecessary in the future.
+ Blocking call that waits for an open subprocess slot. This is
+ automatically called by :meth:`Shell.call`.
"""
# XXX: This API sucks; the actuall call/callAsUser call should
# probably block automatically (unless I have a good reason not to)
if len(self.running) < self.max: return
# now, wait for open pids.
try:
- pid, status = os.waitpid(-1, 0)
+ self.reap(*os.waitpid(-1, 0))
+ except OSError as e:
+ if e.errno == errno.ECHILD: return
+ raise
+ def join(self):
+ """Waits for all of our subprocesses to terminate."""
+ try:
+ while True:
+ self.reap(*os.waitpid(-1, 0))
except OSError as e:
if e.errno == errno.ECHILD: return
- raise e
+ raise
+ def reap(self, pid, status):
+ """Reaps a process."""
# ooh, zombie process. reap it
proc, args, python, on_success, on_error = self.running.pop(pid)
# XXX: this is slightly dangerous; should actually use
on_error(eclass(proc.returncode, args, stdout, stderr))
return
on_success(stdout, stderr)
- def join(self):
- """Waits for all of our subprocesses to terminate."""
- try:
- while os.waitpid(-1, 0):
- pass
- except OSError as e:
- if e.errno == errno.ECHILD: return
- raise e
+
+# Setup a convenience global instance
+shell = Shell()
+call = shell.call
+callAsUser = shell.callAsUser
+safeCall = shell.safeCall
+eval = shell.eval
class DummyParallelShell(ParallelShell):
"""Same API as :class:`ParallelShell`, but doesn't actually
self.stdout = stdout
self.stderr = stderr
def __str__(self):
- return "CallError [%d]" % self.code
+ compact = self.stderr.rstrip().split("\n")[-1]
+ return "%s (exited with %d)\n%s" % (compact, self.code, self.stderr)
class PythonCallError(CallError):
"""
#: Name of the uncaught exception.
name = None
def __init__(self, code, args, stdout, stderr):
- self.name = wizard.util.get_exception_name(stderr)
+ if stderr: self.name = util.get_exception_name(stderr)
CallError.__init__(self, code, args, stdout, stderr)
def __str__(self):
- return "PythonCallError [%s]" % self.name
+ if self.name:
+ return "PythonCallError [%s]\n%s" % (self.name, self.stderr)
+ else:
+ return "PythonCallError\n%s" % self.stderr