diff mbox

[Branch,~linaro-validation/lava-scheduler/trunk] Rev 263: update to latest version of trunk

Message ID 20130917212324.28197.14844.launchpad@ackee.canonical.com
State Accepted
Headers show

Commit Message

Fu Wei Fu Sept. 17, 2013, 9:23 p.m. UTC
Merge authors:
  Neil Williams (codehelp)
  Senthil Kumaran S (stylesen)
Related merge proposals:
  https://code.launchpad.net/~codehelp/lava-scheduler/1223264/+merge/185473
  proposed by: Neil Williams (codehelp)
  review: Approve - Matthew Hart (matthew-hart)
  https://code.launchpad.net/~codehelp/lava-scheduler/group-size-limit/+merge/185217
  proposed by: Neil Williams (codehelp)
  review: Approve - Senthil Kumaran S (stylesen)
  https://code.launchpad.net/~stylesen/lava-scheduler/fix-bug-1224260/+merge/185211
  proposed by: Senthil Kumaran S (stylesen)
  review: Approve - Neil Williams (codehelp)
  https://code.launchpad.net/~stylesen/lava-scheduler/fix-bug-1224261/+merge/185207
  proposed by: Senthil Kumaran S (stylesen)
  review: Approve - Neil Williams (codehelp)
------------------------------------------------------------
revno: 263 [merge]
committer: Fu Wei <fu.wei@linaro.org>
branch nick: lmp
timestamp: Tue 2013-09-17 22:59:18 +0800
message:
  update to latest version of trunk
modified:
  lava_scheduler_app/api.py
  lava_scheduler_app/models.py
  lava_scheduler_app/utils.py
  lava_scheduler_app/views.py


--
lp:lava-scheduler
https://code.launchpad.net/~linaro-validation/lava-scheduler/trunk

You are subscribed to branch lp:lava-scheduler.
To unsubscribe from this branch go to https://code.launchpad.net/~linaro-validation/lava-scheduler/trunk/+edit-subscription
diff mbox

Patch

=== modified file 'lava_scheduler_app/api.py'
--- lava_scheduler_app/api.py	2013-09-02 15:14:15 +0000
+++ lava_scheduler_app/api.py	2013-09-13 11:39:18 +0000
@@ -200,10 +200,15 @@ 
 
         pending_jobs_by_device = {}
 
-        jobs = TestJob.objects.filter(status=TestJob.SUBMITTED)\
+        jobs_res = TestJob.objects.filter(status=TestJob.SUBMITTED)\
             .values_list('requested_device_type_id')\
             .annotate(pending_jobs=(Count('id')))
-        pending_jobs_by_device.update(dict(jobs))
+        jobs = {}
+        jobs_hash = dict(jobs_res)
+        for job in jobs_hash:
+            if job:
+                jobs[job] = jobs_hash[job]
+        pending_jobs_by_device.update(jobs)
 
         # Get rest of the devices and put number of pending jobs as 0.
         device_types = DeviceType.objects.values_list('name', flat=True)

=== modified file 'lava_scheduler_app/models.py'
--- lava_scheduler_app/models.py	2013-09-04 14:46:59 +0000
+++ lava_scheduler_app/models.py	2013-09-12 06:28:07 +0000
@@ -81,7 +81,7 @@ 
                 continue
             else:
                 raise DevicesUnavailableException(
-                    "Requested %d %s device(s) - only %d available." % (count, board, all_devices[board]))
+                    "Requested %d %s device(s) - only %d available." % (count, board, all_devices.get(board,0)))
     return True
 
 

=== modified file 'lava_scheduler_app/utils.py'
--- lava_scheduler_app/utils.py	2013-09-10 13:20:02 +0000
+++ lava_scheduler_app/utils.py	2013-09-17 14:59:18 +0000
@@ -22,6 +22,7 @@ 
 import socket
 import urlparse
 import simplejson
+import models
 
 
 def rewrite_hostname(result_url):
@@ -89,6 +90,8 @@ 
     group_count = 0
     for clients in json_jobdata["device_group"]:
         group_count += int(clients["count"])
+    if group_count <= 1:
+        raise models.JSONDataError("Only one device requested in a MultiNode job submission.")
     for clients in json_jobdata["device_group"]:
         role = str(clients["role"])
         count = int(clients["count"])
@@ -96,8 +99,10 @@ 
         for c in range(0, count):
             node_json[role].append({})
             node_json[role][c]["timeout"] = json_jobdata["timeout"]
-            node_json[role][c]["job_name"] = json_jobdata["job_name"]
-            node_json[role][c]["tags"] = clients["tags"]
+            if json_jobdata.get("job_name", False):
+                node_json[role][c]["job_name"] = json_jobdata["job_name"]
+            if clients.get("tags", False):
+                node_json[role][c]["tags"] = clients["tags"]
             node_json[role][c]["group_size"] = group_count
             node_json[role][c]["target_group"] = target_group
             node_json[role][c]["actions"] = node_actions[role]
@@ -105,7 +110,11 @@ 
 
             node_json[role][c]["role"] = role
             # multinode node stage 2
-            node_json[role][c]["logging_level"] = json_jobdata["logging_level"]
+            if json_jobdata.get("logging_level", False):
+                node_json[role][c]["logging_level"] = \
+                    json_jobdata["logging_level"]
+            if json_jobdata.get("priority", False):
+                node_json[role][c]["priority"] = json_jobdata["priority"]
             node_json[role][c]["device_type"] = clients["device_type"]
 
     return node_json

=== modified file 'lava_scheduler_app/views.py'
--- lava_scheduler_app/views.py	2013-09-02 15:14:15 +0000
+++ lava_scheduler_app/views.py	2013-09-12 06:28:07 +0000
@@ -53,6 +53,7 @@ 
     TestJob,
     JSONDataError,
     validate_job_json,
+    DevicesUnavailableException,
 )
 
 
@@ -606,7 +607,8 @@ 
                     "lava_scheduler_app/job_submit.html",
                     response_data, RequestContext(request))
 
-            except (JSONDataError, ValueError) as e:
+            except (JSONDataError, ValueError, DevicesUnavailableException) \
+                    as e:
                 response_data["error"] = str(e)
                 response_data["json_input"] = request.POST.get("json-input")
                 return render_to_response(
@@ -842,7 +844,7 @@ 
                     response_data, RequestContext(request))
             else:
                 return redirect(job)
-        except Exception as e:
+        except (JSONDataError, ValueError, DevicesUnavailableException) as e:
             response_data["error"] = str(e)
             response_data["json_input"] = definition
             return render_to_response(