=== modified file 'lava_dispatcher/actions/launch_control.py'
@@ -21,17 +21,25 @@
import json
import os
-import shutil
-import tarfile
import logging
+import tempfile
import urlparse
+import xmlrpclib
+
+import lava_dispatcher.utils as utils
from lava_tool.authtoken import AuthenticatingServerProxy, MemoryAuthBackend
from lava_dispatcher.actions import BaseAction
from lava_dispatcher.client.base import OperationFailed
-import xmlrpclib
-import traceback
+from lava_dispatcher.test_data import create_attachment
+
+
+class GatherResultsError(Exception):
+ def __init__(self, msg, bundles=[]):
+ super(GatherResultsError, self).__init__(msg)
+ self.bundles = bundles
+
def _get_dashboard(server, token):
if not server.endswith("/"):
@@ -52,7 +60,8 @@
if parsed_server.port:
userless_server += ':' + str(parsed_server.port)
userless_server += parsed_server.path
- auth_backend = MemoryAuthBackend([(parsed_server.username, userless_server, token)])
+ auth_backend = MemoryAuthBackend(
+ [(parsed_server.username, userless_server, token)])
else:
logging.warn(
"specifying a user without a token is unlikely to work")
@@ -77,7 +86,6 @@
return dashboard
-
class cmd_submit_results(BaseAction):
parameters_schema = {
@@ -91,64 +99,103 @@
'additionalProperties': False,
}
+ def _get_bundles(self, files):
+ bundles = []
+ errors = []
+ for fname in files:
+ if os.path.splitext(fname)[1] != ".bundle":
+ continue
+ content = None
+ try:
+ with open(fname, 'r') as f:
+ content = f.read()
+ bundles.append(json.loads(content))
+ except ValueError:
+ msg = 'Error adding result bundle %s' % fname
+ errors.append(msg)
+ logging.exception(msg)
+ if content:
+ logging.info('Adding bundle as attachment')
+ attachment = create_attachment(fname, content)
+ self.context.test_data.add_attachments([attachment])
+ except:
+ msg = 'Unknown error processing bundle' % fname
+ logging.exception(msg)
+ errors.append(msg)
+
+ if len(errors) > 0:
+ msg = ' '.join(errors)
+ raise GatherResultsError(msg, bundles)
+ return bundles
+
def _get_bundles_from_device(self, result_disk):
- err_msg = ''
- status = 'fail'
- device_bundles = []
+ bundles = []
try:
- status, err_msg, result_path = self.client.retrieve_results(
- result_disk)
+ result_path = self.client.retrieve_results(result_disk)
if result_path is not None:
- try:
- tar = tarfile.open(result_path)
- for tarinfo in tar:
- if os.path.splitext(tarinfo.name)[1] == ".bundle":
- f = tar.extractfile(tarinfo)
- content = f.read()
- f.close()
- device_bundles.append(json.loads(content))
- tar.close()
- except:
- logging.warning(traceback.format_exc())
- status = 'fail'
- err_msg = err_msg + " Some test case result appending failed."
- logging.warning(err_msg)
+ d = tempfile.mkdtemp(dir=self.client.target_device.scratch_dir)
+ files = utils.extract_targz(result_path, d)
+ bundles = self._get_bundles(files)
+ except GatherResultsError:
+ raise
except:
- logging.exception('retrieve_results failed')
- return device_bundles, status, err_msg
+ msg = 'unable to retrieve results from target'
+ logging.exception(msg)
+ raise GatherResultsError(msg)
+ return bundles
def _get_results_from_host(self):
- status = 'pass'
- err_msg = ''
- host_bundles = []
+ bundles = []
+ errors = []
try:
bundle_list = os.listdir(self.context.host_result_dir)
for bundle_name in bundle_list:
bundle = "%s/%s" % (self.context.host_result_dir, bundle_name)
- f = open(bundle)
- content = f.read()
- f.close()
- host_bundles.append(json.loads(content))
+ content = None
+ try:
+ f = open(bundle)
+ content = f.read()
+ f.close()
+ bundles.append(json.loads(content))
+ except ValueError:
+ msg = 'Error adding host result bundle %s' % bundle
+ errors.append(msg)
+ logging.exception(msg)
+ if content:
+ logging.info('Adding bundle as attachment')
+ attachment = create_attachment(bundle, content)
+ self.context.test_data.add_attachments([attachment])
except:
- print traceback.format_exc()
- status = 'fail'
- err_msg = err_msg + " Some test case result appending failed."
- return host_bundles, status, err_msg
-
+ msg = 'Error getting all results from host'
+ logging.exception(msg)
+ raise GatherResultsError(msg, bundles)
+
+ if len(errors) > 0:
+ msg = ' '.join(errors)
+ raise GatherResultsError(msg, bundles)
+
+ return bundles
def run(self, server, stream, result_disk="testrootfs", token=None):
all_bundles = []
status = 'pass'
err_msg = ''
if self.context.any_device_bundles:
- device_bundles, status, err_msg = self._get_bundles_from_device(result_disk)
- all_bundles.extend(device_bundles)
+ try:
+ bundles = self._get_bundles_from_device(result_disk)
+ all_bundles.extend(bundles)
+ except GatherResultsError as gre:
+ err_msg = gre.message
+ status = 'fail'
+ all_bundles.extend(gre.bundles)
if self.context.any_host_bundles:
- host_bundles, host_status, host_err_msg = self._get_results_from_host()
- all_bundles.extend(host_bundles)
- if status == 'pass':
- status = host_status
- err_msg += host_err_msg
+ try:
+ bundles = self._get_results_from_host()
+ all_bundles.extend(bundles)
+ except GatherResultsError as gre:
+ err_msg += ' ' + gre.message
+ status = 'fail'
+ all_bundles.extend(gre.bundles)
self.context.test_data.add_result('gather_results', status, err_msg)
@@ -187,12 +234,13 @@
try:
result = dashboard.put_ex(json_bundle, job_name, stream)
print >> self.context.oob_file, 'dashboard-put-result:', result
- logging.info("Dashboard : %s" %result)
+ logging.info("Dashboard : %s" % result)
except xmlrpclib.Fault, err:
logging.warning("xmlrpclib.Fault occurred")
logging.warning("Fault code: %d" % err.faultCode)
logging.warning("Fault string: %s" % err.faultString)
raise OperationFailed("could not push to dashboard")
+
class cmd_submit_results_on_host(cmd_submit_results):
pass
=== modified file 'lava_dispatcher/client/targetdevice.py'
@@ -21,7 +21,6 @@
import contextlib
import logging
import os
-import shutil
import time
from lava_dispatcher.client.base import (
@@ -32,6 +31,7 @@
get_target,
)
from lava_dispatcher.utils import (
+ mk_targz,
logging_system,
)
@@ -84,11 +84,11 @@
td = self.target_device
td.power_off(self.proc)
- tarbase = os.path.join(td.scratch_dir, 'lava_results')
+ tar = os.path.join(td.scratch_dir, 'lava_results.tgz')
result_dir = self.context.config.lava_result_dir
with td.file_system(td.config.root_part, result_dir) as mnt:
- tarbase = shutil.make_archive(tarbase, 'gztar', mnt)
- return 'pass', '', tarbase
+ mk_targz(tar, mnt)
+ return tar
def get_test_data_attachments(self):
'''returns attachments to go in the "lava_results" test run'''
=== modified file 'lava_dispatcher/device/master.py'
@@ -41,6 +41,7 @@
from lava_dispatcher.utils import (
logging_spawn,
logging_system,
+ mk_targz,
string_to_list,
)
from lava_dispatcher.client.base import (
@@ -275,8 +276,8 @@
yield os.path.join(tfdir, target_name)
finally:
- tf = os.path.join(self.scratch_dir, 'fs')
- tf = shutil.make_archive(tf, 'gztar', tfdir)
+ tf = os.path.join(self.scratch_dir, 'fs.tgz')
+ mk_targz(tf, tfdir)
shutil.rmtree(tfdir)
self.proc.sendcontrol('c') # kill SimpleHTTPServer
@@ -524,10 +525,7 @@
:param tarfile: path and filename of the tgz to output
"""
with image_partition_mounted(image, partno) as mntdir:
- cmd = "sudo tar -C %s -czf %s ." % (mntdir, tarfile)
- rc = logging_system(cmd)
- if rc:
- raise RuntimeError("Failed to create tarball: %s" % tarfile)
+ mk_targz(tarfile, mntdir, asroot=True)
def _deploy_linaro_rootfs(session, rootfs):
=== modified file 'lava_dispatcher/utils.py'
@@ -65,6 +65,40 @@
return d
+def mk_targz(tfname, rootdir, basedir='.', asroot=False):
+ """ Similar shutil.make_archive but it doesn't blow up with unicode errors
+ """
+ from lava_dispatcher.client.base import CriticalError
+ cmd = 'tar -C %s -czf %s %s' % (rootdir, tfname, basedir)
+ if asroot:
+ cmd = 'sudo %s' % cmd
+ if logging_system(cmd):
+ raise CriticalError('Unable to make tarball of: %s' % rootdir)
+
+
+def _list_files(dirname):
+ files = []
+ for f in os.listdir(dirname):
+ f = os.path.join(dirname, f)
+ if os.path.isdir(f):
+ files.extend(_list_files(f))
+ elif os.path.isfile(f):
+ files.append(f)
+ return files
+
+
+def extract_targz(tfname, tmpdir):
+ """ Extracts the contents of a .tgz file to the tmpdir. It then returns
+ a list of all the files (full path). This is being used to get around
+ issues that python's tarfile seems to have with unicode
+ """
+ from lava_dispatcher.client.base import CriticalError
+ if logging_system('tar -C %s -xzf %s' % (tmpdir, tfname)):
+ raise CriticalError('Unable to extract tarball: %s' % tfname)
+
+ return _list_files(tmpdir)
+
+
def ensure_directory(path):
""" ensures the path exists, if it doesn't it will be created
"""
@@ -114,7 +148,7 @@
def expect(self, *args, **kw):
# some expect should not be logged because it is so much noise.
- if 'lava_no_logging' in kw:
+ if 'lava_no_logging' in kw:
del kw['lava_no_logging']
return self.expect(*args, **kw)
@@ -139,7 +173,8 @@
def drain(self):
"""this is a one-off of the pexect __interact that ignores STDIN and
- handles an error that happens when we call read just after the process exits
+ handles an error that happens when we call read just after the process
+ exits
"""
try:
self._spawn__interact_copy(escape_character=chr(29))
@@ -155,7 +190,7 @@
# XXX Duplication: we should reuse lava-test TestArtifacts
def generate_bundle_file_name(test_name):
- return ("{test_id}.{time.tm_year:04}-{time.tm_mon:02}-{time.tm_mday:02}T"
+ return ("{test_id}.{time.tm_year:04}-{time.tm_mon:02}-{time.tm_mday:02}T"
"{time.tm_hour:02}:{time.tm_min:02}:{time.tm_sec:02}Z").format(
test_id=test_name,
time=datetime.datetime.utcnow().timetuple())