diff mbox

[Branch,~linaro-validation/lava-dispatcher/trunk] Rev 544: add an --output-dir argument for the dispatcher to put structured data in

Message ID 20130127214711.1835.17407.launchpad@ackee.canonical.com
State Accepted
Headers show

Commit Message

Michael-Doyle Hudson Jan. 27, 2013, 9:47 p.m. UTC
Merge authors:
  Michael Hudson-Doyle (mwhudson)
Related merge proposals:
  https://code.launchpad.net/~mwhudson/lava-dispatcher/output-dir/+merge/144603
  proposed by: Michael Hudson-Doyle (mwhudson)
------------------------------------------------------------
revno: 544 [merge]
committer: Michael Hudson-Doyle <michael.hudson@linaro.org>
branch nick: trunk
timestamp: Mon 2013-01-28 10:46:16 +1300
message:
  add an --output-dir argument for the dispatcher to put structured data in
modified:
  lava/dispatcher/commands.py
  lava_dispatcher/actions/launch_control.py
  lava_dispatcher/context.py
  lava_dispatcher/job.py


--
lp:lava-dispatcher
https://code.launchpad.net/~linaro-validation/lava-dispatcher/trunk

You are subscribed to branch lp:lava-dispatcher.
To unsubscribe from this branch go to https://code.launchpad.net/~linaro-validation/lava-dispatcher/trunk/+edit-subscription
diff mbox

Patch

=== modified file 'lava/dispatcher/commands.py'
--- lava/dispatcher/commands.py	2013-01-04 14:35:51 +0000
+++ lava/dispatcher/commands.py	2013-01-23 22:28:49 +0000
@@ -53,6 +53,10 @@ 
             type=int,
             help="Used internally by LAVA scheduler.")
         parser.add_argument(
+            "--output-dir",
+            default=None,
+            help="Directory to put structured output in.")
+        parser.add_argument(
             "--validate", action='store_true',
             help="Just validate the job file, do not execute any steps.")
         parser.add_argument(
@@ -111,7 +115,9 @@ 
         else:
             json_jobdata['target'] = self.args.target
             jobdata = json.dumps(json_jobdata)
-        job = LavaTestJob(jobdata, oob_file, config)
+        if self.args.output_dir and not os.path.isdir(self.args.output_dir):
+            os.makedirs(self.args.output_dir)
+        job = LavaTestJob(jobdata, oob_file, config, self.args.output_dir)
 
         #FIXME Return status
         if self.args.validate:

=== modified file 'lava_dispatcher/actions/launch_control.py'
--- lava_dispatcher/actions/launch_control.py	2012-11-20 21:22:17 +0000
+++ lava_dispatcher/actions/launch_control.py	2013-01-23 22:37:05 +0000
@@ -236,6 +236,7 @@ 
         try:
             result = dashboard.put_ex(json_bundle, job_name, stream)
             print >> self.context.oob_file, 'dashboard-put-result:', result
+            self.context.output.write_named_data('result-bundle', result)
             logging.info("Dashboard : %s" % result)
         except xmlrpclib.Fault, err:
             logging.warning("xmlrpclib.Fault occurred")

=== modified file 'lava_dispatcher/context.py'
--- lava_dispatcher/context.py	2013-01-16 23:35:53 +0000
+++ lava_dispatcher/context.py	2013-01-23 23:10:37 +0000
@@ -19,6 +19,7 @@ 
 # with this program; if not, see <http://www.gnu.org/licenses>.
 
 import atexit
+import logging
 import os
 import sys
 import tempfile
@@ -29,14 +30,66 @@ 
 from lava_dispatcher.utils import rmtree
 
 
+def _write_and_flush(fobj, data):
+    fobj.write(data)
+    fobj.flush()
+
+
+class _Forwarder(object):
+    """A file-like object that just forwards data written to it to a callable.
+    """
+
+    def __init__(self, callback):
+        self.callback = callback
+
+    def write(self, data):
+        self.callback(data)
+
+    def flush(self):
+        pass
+
+
+class Outputter(object):
+
+    def __init__(self, output_dir):
+        self.output_dir = output_dir
+        if output_dir:
+            self.output_txt = open(os.path.join(output_dir, 'output.txt'), 'w')
+        else:
+            self.output_txt = None
+
+        self.logfile_read = _Forwarder(self.serial_output)
+
+        self._log_handler = logging.StreamHandler(_Forwarder(self.log_output))
+        FORMAT = '<LAVA_DISPATCHER>%(asctime)s %(levelname)s: %(message)s'
+        DATEFMT = '%Y-%m-%d %I:%M:%S %p'
+        self._log_handler.setFormatter(
+            logging.Formatter(fmt=FORMAT, datefmt=DATEFMT))
+        del logging.root.handlers[:]
+        del logging.root.filters[:]
+        logging.root.addHandler(self._log_handler)
+
+    def serial_output(self, data):
+        _write_and_flush(sys.stdout, data)
+        if self.output_txt is not None:
+            _write_and_flush(self.output_txt, data)
+
+    # Currently all output is treated the same way.
+    log_output = serial_output
+
+    def write_named_data(self, name, data):
+        if self.output_dir is None:
+            return
+        with open(os.path.join(self.output_dir, name), 'w') as outf:
+            outf.write(data)
+
+
 class LavaContext(object):
-    def __init__(self, target, dispatcher_config, oob_file, job_data):
+    def __init__(self, target, dispatcher_config, oob_file, job_data, output_dir):
         self.config = dispatcher_config
         self.job_data = job_data
-        # This is the file-like object to send serial output from the device
-        # to.  We just send it to stdout for now, but soon we'll do something
-        # cleverer.
-        self.logfile_read = sys.stdout
+        self.output = Outputter(output_dir)
+        self.logfile_read = self.output.logfile_read
         device_config = get_device_config(
             target, dispatcher_config.config_dir)
         self._client = TargetBasedClient(self, device_config)

=== modified file 'lava_dispatcher/job.py'
--- lava_dispatcher/job.py	2013-01-16 23:27:28 +0000
+++ lava_dispatcher/job.py	2013-01-23 22:37:53 +0000
@@ -111,11 +111,11 @@ 
 
 
 class LavaTestJob(object):
-    def __init__(self, job_json, oob_file, config):
+    def __init__(self, job_json, oob_file, config, output_dir):
         self.job_status = 'pass'
         self.load_job_data(job_json)
         self.context = LavaContext(
-            self.target, config, oob_file, self.job_data)
+            self.target, config, oob_file, self.job_data, output_dir)
 
     def load_job_data(self, job_json):
         self.job_data = json.loads(job_json)