/external/autotest/scheduler/shard/ |
D | shard_client.py | 163 job_ids = [j['id'] for j in jobs_serialized] 164 logging.info('Heartbeat response contains jobs %s', job_ids) 176 id__in=job_ids, hostqueueentry__complete=True) 210 job_ids = list(models.Job.objects.filter( 214 for job_to_upload in models.Job.objects.filter(pk__in=job_ids).all(): 219 def _mark_jobs_as_uploaded(self, job_ids): argument 223 models.Job.objects.filter(pk__in=job_ids).update(shard=self.shard) 249 job_ids = list(models.Job.objects.filter( 257 return job_ids, host_ids, host_statuses
|
D | shard_client_integration_tests.py | 51 job_ids, host_ids = client._get_known_ids() 52 assert(job_ids == []) 92 job_ids, host_ids = client._get_known_ids() 93 assert(set(job_ids) == set([job.id]))
|
/external/autotest/server/cros/dynamic_suite/ |
D | job_status.py | 155 job_ids = [j.id for j in jobs] 156 while job_ids: 161 for job_id in list(job_ids): 164 job_ids.remove(job_id) 166 if job_ids: 189 job_ids = [j.id for j in jobs] 190 while job_ids: 195 for job_id in list(job_ids): 198 job_ids.remove(job_id) 200 if job_ids:
|
D | suite.py | 1056 job_ids = [job.id for job in self._jobs] 1057 self._afe.run('abort_host_queue_entries', job__id__in=job_ids)
|
/external/autotest/scheduler/ |
D | query_managers.py | 263 def _get_job_acl_groups(self, job_ids): argument 272 return self._get_many2many_dict(query, job_ids) 276 def _get_job_ineligible_hosts(self, job_ids): argument 282 return self._get_many2many_dict(query, job_ids) 286 def _get_job_dependencies(self, job_ids): argument 292 return self._get_many2many_dict(query, job_ids)
|
D | agent_task.py | 432 job_ids = hqes.values_list('job', flat=True).distinct() 433 assert job_ids.count() == 1, ("BaseAgentTask's queue entries " 436 job = models.Job.objects.get(id=job_ids[0])
|
D | monitor_db_unittest.py | 220 def _convert_jobs_to_metahosts(self, *job_ids): argument 221 sql_tuple = '(' + ','.join(str(i) for i in job_ids) + ')'
|
/external/chromium-trace/catapult/third_party/mapreduce/mapreduce/ |
D | shuffler.py | 200 job_ids = yield pipeline_common.Append(*[mapper.job_id for mapper in 202 result = yield _CollectOutputFiles(job_ids) 204 yield _CleanupOutputFiles(job_ids) 218 def run(self, job_ids): argument 220 for job_id in job_ids: 233 def run(self, job_ids): argument 234 for job_id in job_ids:
|
/external/autotest/cli/ |
D | job.py | 67 job_ids = [] 73 job_ids.append(job_id) 76 return (job_ids, job_names) 88 (job_ids, job_names) = self.__split_jobs_between_ids_names() 90 for items, tag in [(job_ids, tag_id),
|
/external/autotest/frontend/afe/ |
D | models.py | 1182 def get_status_counts(self, job_ids): argument 1187 if not job_ids: 1189 id_list = '(%s)' % ','.join(str(job_id) for job_id in job_ids) 1197 all_job_counts = dict((job_id, {}) for job_id in job_ids) 1554 job_ids = set([]) 1569 job_ids |= set([j.id for j in query]) 1571 if job_ids: 1575 'candidates': ','.join([str(i) for i in job_ids])}) 1576 job_ids -= set([j.id for j in query]) 1578 if job_ids: [all …]
|
D | rpc_interface.py | 597 job_ids = list(models.Job.objects.filter( 602 if not job_ids: 606 afe_job_id__in=job_ids).exclude(
|
/external/autotest/frontend/tko/ |
D | rpc_interface.py | 135 job_ids = set() 141 job_ids.add(job_id) 145 return list(job_ids)
|
/external/autotest/contrib/ |
D | compare_suite.py | 326 job_ids = [int(id) for id in args.jobs.split(',')] variable 330 models.Job.objects.filter(id__in=job_ids)] 338 for job_id in job_ids:
|
/external/autotest/site_utils/ |
D | test_push.py | 305 job_ids = [job.id for job in 308 for job_id in job_ids]
|