"""Detects whether or not an argument list invokes a Python program."""
return args[0] == "python" or args[0] == "wizard"
-def drop_priviledges():
+def drop_priviledges(dir, log_file):
"""
Checks if we are running as root. If we are, attempt to drop
- priviledges to the user who owns the current directory, by re-calling
+ priviledges to the user who owns ``dir``, by re-calling
itself using sudo with exec, such that the new process subsumes our
- current one.
+ current one. If ``log_file`` is passed, the file is chown'ed
+ to the user we are dropping priviledges to, so the subprocess
+ can write to it.
"""
if os.getuid():
return
- uid = util.get_dir_uid('.')
+ uid = util.get_dir_uid(dir)
if not uid:
return
args = []
for k,v in os.environ.items():
- if k.startswith('WIZARD_'):
+ if k.startswith('WIZARD_') or k == "SSH_GSSAPI_NAME":
args.append("%s=%s" % (k,v))
args += sys.argv
- sys.stderr.write("Dropping priviledges\n")
+ logging.debug("Dropping priviledges")
+ if log_file: os.chown(log_file, uid, -1)
os.execlp('sudo', 'sudo', '-u', '#' + str(uid), *args)
class Shell(object):
removed. This emulates the behavior of backticks and ``$()`` in Bash.
Prefer to use :meth:`eval` instead (you should only need to explicitly
specify this if you are using another wrapper around this function).
+ :param log: if True, we log the call as INFO, if False, we log the call
+ as DEBUG, otherwise, we detect based on ``strip``.
+ :param stdout:
+ :param stderr:
+ :param stdin: a file-type object that will be written to or read from as a pipe.
:returns: a tuple of strings ``(stdout, stderr)``, or a string ``stdout``
if ``strip`` is specified.
>>> sh.call("cat", input='Foobar')
('Foobar', '')
"""
+ if hasattr(self, "_wait"):
+ self._wait()
+ kwargs.setdefault("interactive", False)
+ kwargs.setdefault("strip", False)
kwargs.setdefault("python", None)
- logging.info("Running `" + ' '.join(args) + "`")
+ kwargs.setdefault("log", None)
+ kwargs.setdefault("stdout", subprocess.PIPE)
+ kwargs.setdefault("stdin", subprocess.PIPE)
+ kwargs.setdefault("stderr", subprocess.PIPE)
+ msg = "Running `" + ' '.join(args) + "`"
+ if kwargs["strip"] and not kwargs["log"] is True or kwargs["log"] is False:
+ logging.debug(msg)
+ else:
+ logging.info(msg)
if self.dry:
- return
+ if kwargs["strip"]:
+ return ''
+ return None, None
if kwargs["python"] is None and is_python(args):
kwargs["python"] = True
if args[0] == "wizard":
args = list(args)
args[0] = wizard_bin
kwargs.setdefault("input", None)
- kwargs.setdefault("interactive", False)
- kwargs.setdefault("strip", False)
if kwargs["interactive"]:
stdout=sys.stdout
stdin=sys.stdin
stderr=sys.stderr
else:
- stdout=subprocess.PIPE
- stdin=subprocess.PIPE
- stderr=subprocess.PIPE
+ stdout=kwargs["stdout"]
+ stdin=kwargs["stdin"]
+ stderr=kwargs["stderr"]
# XXX: There is a possible problem here where we can fill up
# the kernel buffer if we have 64KB of data. This shouldn't
# be a problem, and the fix for such case would be to write to
return proc
stdout, stderr = proc.communicate(kwargs["input"])
if not kwargs["interactive"]:
- self._log(stdout, stderr)
+ if kwargs["strip"]:
+ self._log(None, stderr)
+ else:
+ self._log(stdout, stderr)
if proc.returncode:
if kwargs["python"]: eclass = PythonCallError
else: eclass = CallError
with ``max`` subprocesses, and result in callback execution
when they finish.
- Before enqueueing a system call with :meth:`call` or :meth:`callAsUser`,
- you should wait for an open slot using :meth:`wait`; otherwise,
- ``max`` rate limiting will have no effect. For example::
-
- sh = ParallelShell()
- for command in commands_to_execute_in_parallel:
- sh.wait()
- sh.call(*command)
- sh.join()
-
.. method:: call(*args, **kwargs)
- Enqueues a system call for parallel processing. Keyword arguments
+ Enqueues a system call for parallel processing. If there are
+ no openings in the queue, this will block. Keyword arguments
are the same as :meth:`Shell.call` with the following additions:
:param on_success: Callback function for success (zero exit status).
super(ParallelShell, self).__init__(dry=dry)
self.running = {}
self.max = max # maximum of commands to run in parallel
+ @staticmethod
+ def make(no_parallelize, max):
+ """Convenience method oriented towards command modules."""
+ if no_parallelize:
+ return DummyParallelShell()
+ else:
+ return ParallelShell(max=max)
def _async(self, proc, args, python, on_success, on_error, **kwargs):
"""
Gets handed a :class:`subprocess.Proc` object from our deferred
execution. See :meth:`Shell.call` source code for details.
"""
self.running[proc.pid] = (proc, args, python, on_success, on_error)
- def wait(self):
+ def _wait(self):
"""
- Blocking call that waits for an open subprocess slot. You should
- call this before enqueuing.
-
- .. note::
-
- This method may become unnecessary in the future.
+ Blocking call that waits for an open subprocess slot. This is
+ automatically called by :meth:`Shell.call`.
"""
# XXX: This API sucks; the actuall call/callAsUser call should
# probably block automatically (unless I have a good reason not to)
self.stdout = stdout
self.stderr = stderr
def __str__(self):
- return "CallError [%d]" % self.code
+ compact = self.stderr.rstrip().split("\n")[-1]
+ return "%s (exited with %d)\n%s" % (compact, self.code, self.stderr)
class PythonCallError(CallError):
"""
CallError.__init__(self, code, args, stdout, stderr)
def __str__(self):
if self.name:
- return "PythonCallError [%s]" % self.name
+ return "PythonCallError [%s]\n%s" % (self.name, self.stderr)
else:
- return "PythonCallError"
+ return "PythonCallError\n%s" % self.stderr