• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/usr/bin/python
2# pylint: disable=missing-docstring
3
4import logging, os, signal, unittest
5import common
6import mock
7from autotest_lib.client.common_lib import enum, global_config, host_protections
8from autotest_lib.database import database_connection
9from autotest_lib.frontend import setup_django_environment
10from autotest_lib.frontend.afe import frontend_test_utils, models
11from autotest_lib.frontend.afe import model_attributes
12from autotest_lib.scheduler import drone_manager, email_manager
13from autotest_lib.scheduler import monitor_db, scheduler_models
14from autotest_lib.scheduler import scheduler_config
15from autotest_lib.scheduler import scheduler_lib
16
17HqeStatus = models.HostQueueEntry.Status
18HostStatus = models.Host.Status
19
20class NullMethodObject(object):
21    _NULL_METHODS = ()
22
23    def __init__(self):
24        def null_method(*args, **kwargs):
25            pass
26
27        for method_name in self._NULL_METHODS:
28            setattr(self, method_name, null_method)
29
30# the SpecialTask names here must match the suffixes used on the SpecialTask
31# results directories
32_PidfileType = enum.Enum('verify', 'cleanup', 'repair', 'job', 'gather',
33                         'parse', 'archive', 'reset', 'provision')
34
35
36_PIDFILE_TO_PIDFILE_TYPE = {
37        drone_manager.AUTOSERV_PID_FILE: _PidfileType.JOB,
38        drone_manager.CRASHINFO_PID_FILE: _PidfileType.GATHER,
39        drone_manager.PARSER_PID_FILE: _PidfileType.PARSE,
40        drone_manager.ARCHIVER_PID_FILE: _PidfileType.ARCHIVE,
41        }
42
43
44_PIDFILE_TYPE_TO_PIDFILE = dict((value, key) for key, value
45                                in _PIDFILE_TO_PIDFILE_TYPE.iteritems())
46
47
48class MockConnectionManager(object):
49    """docstring for MockConnectionManager"""
50
51    db = None
52
53    def __init__(self):
54        super(MockConnectionManager, self).__init__()
55
56    def get_connection(self):
57        assert MockConnectionManager.db
58        return MockConnectionManager.db
59
60
61class MockDroneManager(NullMethodObject):
62    """
63    Public attributes:
64    max_runnable_processes_value: value returned by max_runnable_processes().
65            tests can change this to activate throttling.
66    """
67    _NULL_METHODS = ('reinitialize_drones', 'copy_to_results_repository',
68                     'copy_results_on_drone', 'trigger_refresh', 'sync_refresh')
69
70    class _DummyPidfileId(object):
71        """
72        Object to represent pidfile IDs that is opaque to the scheduler code but
73        still debugging-friendly for us.
74        """
75        def __init__(self, working_directory, pidfile_name, num_processes=None):
76            self._working_directory = working_directory
77            self._pidfile_name = pidfile_name
78            self._num_processes = num_processes
79            self._paired_with_pidfile = None
80
81
82        def key(self):
83            """Key for MockDroneManager._pidfile_index"""
84            return (self._working_directory, self._pidfile_name)
85
86
87        def __str__(self):
88            return os.path.join(self._working_directory, self._pidfile_name)
89
90
91        def __repr__(self):
92            return '<_DummyPidfileId: %s>' % str(self)
93
94
95    def __init__(self):
96        super(MockDroneManager, self).__init__()
97        self.process_capacity = 100
98
99        # maps result_dir to set of tuples (file_path, file_contents)
100        self._attached_files = {}
101        # maps pidfile IDs to PidfileContents
102        self._pidfiles = {}
103        # pidfile IDs that haven't been created yet
104        self._future_pidfiles = []
105        # maps _PidfileType to the most recently created pidfile ID of that type
106        self._last_pidfile_id = {}
107        # maps (working_directory, pidfile_name) to pidfile IDs
108        self._pidfile_index = {}
109        # maps process to pidfile IDs
110        self._process_index = {}
111        # tracks pidfiles of processes that have been killed
112        self._pids_to_signals_received = {}
113        # pidfile IDs that have just been unregistered (so will disappear on the
114        # next cycle)
115        self._unregistered_pidfiles = set()
116        # Pids to write exit status for at end of tick
117        self._set_pidfile_exit_status_queue = []
118
119    # utility APIs for use by the test
120
121    def finish_process(self, pidfile_type, exit_status=0):
122        pidfile_id = self._last_pidfile_id[pidfile_type]
123        self._set_pidfile_exit_status(pidfile_id, exit_status)
124
125
126    def finish_specific_process(self, working_directory, pidfile_name):
127        pidfile_id = self.pidfile_from_path(working_directory, pidfile_name)
128        self._set_pidfile_exit_status(pidfile_id, 0)
129
130    def finish_active_process_on_host(self, host_id):
131        match = 'hosts/host%d/' % host_id
132        for pidfile_id in self.nonfinished_pidfile_ids():
133            if pidfile_id._working_directory.startswith(match):
134                self._set_pidfile_exit_status(pidfile_id, 0)
135                break
136        else:
137          raise KeyError('No active process matched %s' % match)
138
139    def _set_pidfile_exit_status(self, pidfile_id, exit_status):
140        assert pidfile_id is not None
141        contents = self._pidfiles[pidfile_id]
142        contents.exit_status = exit_status
143        contents.num_tests_failed = 0
144
145
146    def was_last_process_killed(self, pidfile_type, sigs):
147        pidfile_id = self._last_pidfile_id[pidfile_type]
148        return sigs == self._pids_to_signals_received[pidfile_id]
149
150
151    def nonfinished_pidfile_ids(self):
152        return [pidfile_id for pidfile_id, pidfile_contents
153                in self._pidfiles.iteritems()
154                if pidfile_contents.exit_status is None]
155
156
157    def running_pidfile_ids(self):
158        return [pidfile_id for pidfile_id in self.nonfinished_pidfile_ids()
159                if self._pidfiles[pidfile_id].process is not None]
160
161
162    def pidfile_from_path(self, working_directory, pidfile_name):
163        return self._pidfile_index[(working_directory, pidfile_name)]
164
165
166    def attached_files(self, working_directory):
167        """
168        Return dict mapping path to contents for attached files with specified
169        paths.
170        """
171        return dict((path, contents) for path, contents
172                    in self._attached_files.get(working_directory, [])
173                    if path is not None)
174
175
176    # DroneManager emulation APIs for use by monitor_db
177
178    def get_orphaned_autoserv_processes(self):
179        return set()
180
181
182    def total_running_processes(self):
183        return sum(pidfile_id._num_processes
184                   for pidfile_id in self.nonfinished_pidfile_ids())
185
186
187    def max_runnable_processes(self, username, drone_hostnames_allowed):
188        return self.process_capacity - self.total_running_processes()
189
190
191    def refresh(self):
192        for pidfile_id in self._unregistered_pidfiles:
193            # intentionally handle non-registered pidfiles silently
194            self._pidfiles.pop(pidfile_id, None)
195        self._unregistered_pidfiles = set()
196
197
198    def execute_actions(self):
199        # executing an "execute_command" causes a pidfile to be created
200        for pidfile_id in self._future_pidfiles:
201            # Process objects are opaque to monitor_db
202            process = object()
203            self._pidfiles[pidfile_id].process = process
204            self._process_index[process] = pidfile_id
205        self._future_pidfiles = []
206
207        for pidfile_id in self._set_pidfile_exit_status_queue:
208            self._set_pidfile_exit_status(pidfile_id, 271)
209        self._set_pidfile_exit_status_queue = []
210
211
212    def attach_file_to_execution(self, result_dir, file_contents,
213                                 file_path=None):
214        self._attached_files.setdefault(result_dir, set()).add((file_path,
215                                                                file_contents))
216        return 'attach_path'
217
218
219    def _initialize_pidfile(self, pidfile_id):
220        if pidfile_id not in self._pidfiles:
221            assert pidfile_id.key() not in self._pidfile_index
222            self._pidfiles[pidfile_id] = drone_manager.PidfileContents()
223            self._pidfile_index[pidfile_id.key()] = pidfile_id
224
225
226    def _set_last_pidfile(self, pidfile_id, working_directory, pidfile_name):
227        if working_directory.startswith('hosts/'):
228            # such paths look like hosts/host1/1-verify, we'll grab the end
229            type_string = working_directory.rsplit('-', 1)[1]
230            pidfile_type = _PidfileType.get_value(type_string)
231        else:
232            pidfile_type = _PIDFILE_TO_PIDFILE_TYPE[pidfile_name]
233        self._last_pidfile_id[pidfile_type] = pidfile_id
234
235
236    def execute_command(self, command, working_directory, pidfile_name,
237                        num_processes, log_file=None, paired_with_pidfile=None,
238                        username=None, drone_hostnames_allowed=None):
239        logging.debug('Executing %s in %s', command, working_directory)
240        pidfile_id = self._DummyPidfileId(working_directory, pidfile_name)
241        if pidfile_id.key() in self._pidfile_index:
242            pidfile_id = self._pidfile_index[pidfile_id.key()]
243        pidfile_id._num_processes = num_processes
244        pidfile_id._paired_with_pidfile = paired_with_pidfile
245
246        self._future_pidfiles.append(pidfile_id)
247        self._initialize_pidfile(pidfile_id)
248        self._pidfile_index[(working_directory, pidfile_name)] = pidfile_id
249        self._set_last_pidfile(pidfile_id, working_directory, pidfile_name)
250        return pidfile_id
251
252
253    def get_pidfile_contents(self, pidfile_id, use_second_read=False):
254        if pidfile_id not in self._pidfiles:
255            logging.debug('Request for nonexistent pidfile %s', pidfile_id)
256        return self._pidfiles.get(pidfile_id, drone_manager.PidfileContents())
257
258
259    def is_process_running(self, process):
260        return True
261
262
263    def register_pidfile(self, pidfile_id):
264        self._initialize_pidfile(pidfile_id)
265
266
267    def unregister_pidfile(self, pidfile_id):
268        self._unregistered_pidfiles.add(pidfile_id)
269
270
271    def declare_process_count(self, pidfile_id, num_processes):
272        pidfile_id.num_processes = num_processes
273
274
275    def absolute_path(self, path):
276        return 'absolute/' + path
277
278
279    def write_lines_to_file(self, file_path, lines, paired_with_process=None):
280        # TODO: record this
281        pass
282
283
284    def get_pidfile_id_from(self, execution_tag, pidfile_name):
285        default_pidfile = self._DummyPidfileId(execution_tag, pidfile_name,
286                                               num_processes=0)
287        return self._pidfile_index.get((execution_tag, pidfile_name),
288                                       default_pidfile)
289
290
291    def kill_process(self, process, sig=signal.SIGKILL):
292        pidfile_id = self._process_index[process]
293
294        if pidfile_id not in self._pids_to_signals_received:
295            self._pids_to_signals_received[pidfile_id] = set()
296        self._pids_to_signals_received[pidfile_id].add(sig)
297
298        if signal.SIGKILL == sig:
299            self._set_pidfile_exit_status_queue.append(pidfile_id)
300
301
302class MockEmailManager(NullMethodObject):
303    _NULL_METHODS = ('send_queued_emails', 'send_email')
304
305    def enqueue_notify_email(self, subject, message):
306        logging.warning('enqueue_notify_email: %s', subject)
307        logging.warning(message)
308
309
310class SchedulerFunctionalTest(unittest.TestCase,
311                              frontend_test_utils.FrontendTestMixin):
312    # some number of ticks after which the scheduler is presumed to have
313    # stabilized, given no external changes
314    _A_LOT_OF_TICKS = 10
315
316    def setUp(self):
317        self._frontend_common_setup()
318        self._set_stubs()
319        self._set_global_config_values()
320        self._create_dispatcher()
321
322        logging.basicConfig(level=logging.DEBUG)
323
324
325    def _create_dispatcher(self):
326        self.dispatcher = monitor_db.Dispatcher()
327
328
329    def tearDown(self):
330        self._database.disconnect()
331        self._frontend_common_teardown()
332
333
334    def _set_stubs(self):
335        self.mock_config = global_config.FakeGlobalConfig()
336        self.god.stub_with(global_config, 'global_config', self.mock_config)
337
338        self.mock_drone_manager = MockDroneManager()
339        drone_manager._set_instance(self.mock_drone_manager)
340
341        self.mock_email_manager = MockEmailManager()
342        self.god.stub_with(email_manager, 'manager', self.mock_email_manager)
343
344        self._database = (
345            database_connection.TranslatingDatabase.get_test_database(
346                translators=scheduler_lib._DB_TRANSLATORS))
347        self._database.connect(db_type='django')
348        self.god.stub_with(monitor_db, '_db', self._database)
349        self.god.stub_with(scheduler_models, '_db', self._database)
350
351        MockConnectionManager.db = self._database
352        scheduler_lib.ConnectionManager = MockConnectionManager
353
354        monitor_db.initialize_globals()
355        scheduler_models.initialize_globals()
356
357        patcher = mock.patch(
358                'autotest_lib.scheduler.luciferlib.is_lucifer_enabled',
359                lambda: False)
360        patcher.start()
361        self.addCleanup(patcher.stop)
362
363
364    def _set_global_config_values(self):
365        self.mock_config.set_config_value('SCHEDULER', 'pidfile_timeout_mins',
366                                          1)
367        self.mock_config.set_config_value('SCHEDULER', 'enable_archiving', True)
368        self.mock_config.set_config_value('SCHEDULER',
369                                          'clean_interval_minutes', 60)
370        self.mock_config.set_config_value('SCHEDULER',
371                                          'max_parse_processes', 50)
372        self.mock_config.set_config_value('SCHEDULER',
373                                          'max_transfer_processes', 50)
374        self.mock_config.set_config_value('SCHEDULER',
375                                          'clean_interval_minutes', 50)
376        self.mock_config.set_config_value('SCHEDULER',
377                                          'max_provision_retries', 1)
378        self.mock_config.set_config_value('SCHEDULER', 'max_repair_limit', 1)
379        self.mock_config.set_config_value(
380                'SCHEDULER', 'secs_to_wait_for_atomic_group_hosts', 600)
381        self.mock_config.set_config_value(
382                'SCHEDULER', 'inline_host_acquisition', True)
383        scheduler_config.config.read_config()
384
385
386    def _initialize_test(self):
387        self.dispatcher.initialize()
388
389
390    def _run_dispatcher(self):
391        for _ in xrange(self._A_LOT_OF_TICKS):
392            self.dispatcher.tick()
393
394
395    def test_idle(self):
396        self._initialize_test()
397        self._run_dispatcher()
398
399
400    def _assert_process_executed(self, working_directory, pidfile_name):
401        process_was_executed = self.mock_drone_manager.was_process_executed(
402                'hosts/host1/1-verify', drone_manager.AUTOSERV_PID_FILE)
403        self.assert_(process_was_executed,
404                     '%s/%s not executed' % (working_directory, pidfile_name))
405
406
407    def _update_instance(self, model_instance):
408        return type(model_instance).objects.get(pk=model_instance.pk)
409
410
411    def _check_statuses(self, queue_entry, queue_entry_status,
412                        host_status=None):
413        self._check_entry_status(queue_entry, queue_entry_status)
414        if host_status:
415            self._check_host_status(queue_entry.host, host_status)
416
417
418    def _check_entry_status(self, queue_entry, status):
419        # update from DB
420        queue_entry = self._update_instance(queue_entry)
421        self.assertEquals(queue_entry.status, status)
422
423
424    def _check_host_status(self, host, status):
425        # update from DB
426        host = self._update_instance(host)
427        self.assertEquals(host.status, status)
428
429
430    def _run_pre_job_verify(self, queue_entry):
431        self._run_dispatcher() # launches verify
432        self._check_statuses(queue_entry, HqeStatus.VERIFYING,
433                             HostStatus.VERIFYING)
434        self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
435
436
437    def test_simple_job(self):
438        self._initialize_test()
439        job, queue_entry = self._make_job_and_queue_entry()
440        self._run_pre_job_verify(queue_entry)
441        self._run_dispatcher() # launches job
442        self._check_statuses(queue_entry, HqeStatus.RUNNING, HostStatus.RUNNING)
443        self._finish_job(queue_entry)
444        self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)
445        self._assert_nothing_is_running()
446
447
448    def _setup_for_pre_job_reset(self):
449        self._initialize_test()
450        job, queue_entry = self._make_job_and_queue_entry()
451        job.reboot_before = model_attributes.RebootBefore.ALWAYS
452        job.save()
453        return queue_entry
454
455
456    def _run_pre_job_reset_job(self, queue_entry):
457        self._run_dispatcher() # reset
458        self._check_statuses(queue_entry, HqeStatus.RESETTING,
459                             HostStatus.RESETTING)
460        self.mock_drone_manager.finish_process(_PidfileType.RESET)
461        self._run_dispatcher() # job
462        self._finish_job(queue_entry)
463
464
465    def test_pre_job_reset(self):
466        queue_entry = self._setup_for_pre_job_reset()
467        self._run_pre_job_reset_job(queue_entry)
468
469
470    def _run_pre_job_reset_one_failure(self):
471        queue_entry = self._setup_for_pre_job_reset()
472        self._run_dispatcher() # reset
473        self.mock_drone_manager.finish_process(_PidfileType.RESET,
474                                               exit_status=256)
475        self._run_dispatcher() # repair
476        self._check_statuses(queue_entry, HqeStatus.QUEUED,
477                             HostStatus.REPAIRING)
478        self.mock_drone_manager.finish_process(_PidfileType.REPAIR)
479        return queue_entry
480
481
482    def test_pre_job_reset_failure(self):
483        queue_entry = self._run_pre_job_reset_one_failure()
484        # from here the job should run as normal
485        self._run_pre_job_reset_job(queue_entry)
486
487
488    def test_pre_job_reset_double_failure(self):
489        # TODO (showard): this test isn't perfect.  in reality, when the second
490        # reset fails, it copies its results over to the job directory using
491        # copy_results_on_drone() and then parses them.  since we don't handle
492        # that, there appear to be no results at the job directory.  the
493        # scheduler handles this gracefully, parsing gets effectively skipped,
494        # and this test passes as is.  but we ought to properly test that
495        # behavior.
496        queue_entry = self._run_pre_job_reset_one_failure()
497        self._run_dispatcher() # second reset
498        self.mock_drone_manager.finish_process(_PidfileType.RESET,
499                                               exit_status=256)
500        self._run_dispatcher()
501        self._check_statuses(queue_entry, HqeStatus.FAILED,
502                             HostStatus.REPAIR_FAILED)
503        # nothing else should run
504        self._assert_nothing_is_running()
505
506
507    def _assert_nothing_is_running(self):
508        self.assertEquals(self.mock_drone_manager.running_pidfile_ids(), [])
509
510
511    def _setup_for_post_job_cleanup(self):
512        self._initialize_test()
513        job, queue_entry = self._make_job_and_queue_entry()
514        job.reboot_after = model_attributes.RebootAfter.ALWAYS
515        job.save()
516        return queue_entry
517
518
519    def _run_post_job_cleanup_failure_up_to_repair(self, queue_entry,
520                                                   include_verify=True):
521        if include_verify:
522            self._run_pre_job_verify(queue_entry)
523        self._run_dispatcher() # job
524        self.mock_drone_manager.finish_process(_PidfileType.JOB)
525        self._run_dispatcher() # parsing + cleanup
526        self.mock_drone_manager.finish_process(_PidfileType.PARSE)
527        self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,
528                                               exit_status=256)
529        self._run_dispatcher() # repair, HQE unaffected
530        return queue_entry
531
532
533    def test_post_job_cleanup_failure(self):
534        queue_entry = self._setup_for_post_job_cleanup()
535        self._run_post_job_cleanup_failure_up_to_repair(queue_entry)
536        self._check_statuses(queue_entry, HqeStatus.COMPLETED,
537                             HostStatus.REPAIRING)
538        self.mock_drone_manager.finish_process(_PidfileType.REPAIR)
539        self._run_dispatcher()
540        self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)
541
542
543    def test_post_job_cleanup_failure_repair_failure(self):
544        queue_entry = self._setup_for_post_job_cleanup()
545        self._run_post_job_cleanup_failure_up_to_repair(queue_entry)
546        self.mock_drone_manager.finish_process(_PidfileType.REPAIR,
547                                               exit_status=256)
548        self._run_dispatcher()
549        self._check_statuses(queue_entry, HqeStatus.COMPLETED,
550                             HostStatus.REPAIR_FAILED)
551
552
553    def _ensure_post_job_process_is_paired(self, queue_entry, pidfile_type):
554        pidfile_name = _PIDFILE_TYPE_TO_PIDFILE[pidfile_type]
555        queue_entry = self._update_instance(queue_entry)
556        pidfile_id = self.mock_drone_manager.pidfile_from_path(
557                queue_entry.execution_path(), pidfile_name)
558        self.assert_(pidfile_id._paired_with_pidfile)
559
560
561    def _finish_job(self, queue_entry):
562        self._check_statuses(queue_entry, HqeStatus.RUNNING)
563        self.mock_drone_manager.finish_process(_PidfileType.JOB)
564        self._run_dispatcher() # launches parsing
565        self._check_statuses(queue_entry, HqeStatus.PARSING)
566        self._ensure_post_job_process_is_paired(queue_entry, _PidfileType.PARSE)
567        self._finish_parsing()
568
569
570    def _finish_parsing(self):
571        self.mock_drone_manager.finish_process(_PidfileType.PARSE)
572        self._run_dispatcher()
573
574
575    def _create_reverify_request(self):
576        host = self.hosts[0]
577        models.SpecialTask.schedule_special_task(
578                host=host, task=models.SpecialTask.Task.VERIFY)
579        return host
580
581
582    def test_requested_reverify(self):
583        host = self._create_reverify_request()
584        self._run_dispatcher()
585        self._check_host_status(host, HostStatus.VERIFYING)
586        self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
587        self._run_dispatcher()
588        self._check_host_status(host, HostStatus.READY)
589
590
591    def test_requested_reverify_failure(self):
592        host = self._create_reverify_request()
593        self._run_dispatcher()
594        self.mock_drone_manager.finish_process(_PidfileType.VERIFY,
595                                               exit_status=256)
596        self._run_dispatcher() # repair
597        self._check_host_status(host, HostStatus.REPAIRING)
598        self.mock_drone_manager.finish_process(_PidfileType.REPAIR)
599        self._run_dispatcher()
600        self._check_host_status(host, HostStatus.READY)
601
602
603    def _setup_for_do_not_verify(self):
604        self._initialize_test()
605        job, queue_entry = self._make_job_and_queue_entry()
606        queue_entry.host.protection = host_protections.Protection.DO_NOT_VERIFY
607        queue_entry.host.save()
608        return queue_entry
609
610
611    def test_do_not_verify_job(self):
612        queue_entry = self._setup_for_do_not_verify()
613        self._run_dispatcher() # runs job directly
614        self._finish_job(queue_entry)
615
616
617    def test_do_not_verify_job_with_cleanup(self):
618        queue_entry = self._setup_for_do_not_verify()
619        queue_entry.job.reboot_before = model_attributes.RebootBefore.ALWAYS
620        queue_entry.job.save()
621
622        self._run_dispatcher() # cleanup
623        self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)
624        self._run_dispatcher() # job
625        self._finish_job(queue_entry)
626
627
628    def test_do_not_verify_pre_job_cleanup_failure(self):
629        queue_entry = self._setup_for_do_not_verify()
630        queue_entry.job.reboot_before = model_attributes.RebootBefore.ALWAYS
631        queue_entry.job.save()
632
633        self._run_dispatcher() # cleanup
634        self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,
635                                               exit_status=256)
636        self._run_dispatcher() # failure ignored; job runs
637        self._finish_job(queue_entry)
638
639
640    def test_do_not_verify_post_job_cleanup_failure(self):
641        queue_entry = self._setup_for_do_not_verify()
642        queue_entry.job.reboot_after = model_attributes.RebootAfter.ALWAYS
643        queue_entry.job.save()
644
645        self._run_post_job_cleanup_failure_up_to_repair(queue_entry,
646                                                        include_verify=False)
647        # failure ignored, host still set to Ready
648        self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)
649        self._run_dispatcher() # nothing else runs
650        self._assert_nothing_is_running()
651
652
653    def test_do_not_verify_requested_reverify_failure(self):
654        host = self._create_reverify_request()
655        host.protection = host_protections.Protection.DO_NOT_VERIFY
656        host.save()
657
658        self._run_dispatcher()
659        self.mock_drone_manager.finish_process(_PidfileType.VERIFY,
660                                               exit_status=256)
661        self._run_dispatcher()
662        self._check_host_status(host, HostStatus.READY) # ignore failure
663        self._assert_nothing_is_running()
664
665
666    def test_job_abort_in_verify(self):
667        self._initialize_test()
668        job = self._create_job(hosts=[1])
669        queue_entries = list(job.hostqueueentry_set.all())
670        self._run_dispatcher() # launches verify
671        self._check_statuses(queue_entries[0], HqeStatus.VERIFYING)
672        job.hostqueueentry_set.update(aborted=True)
673        self._run_dispatcher() # kills verify, launches cleanup
674        self.assert_(self.mock_drone_manager.was_last_process_killed(
675                _PidfileType.VERIFY, set([signal.SIGKILL])))
676        self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)
677        self._run_dispatcher()
678
679
680    def test_job_abort(self):
681        self._initialize_test()
682        job = self._create_job(hosts=[1])
683        job.run_reset = False
684        job.save()
685        queue_entries = list(job.hostqueueentry_set.all())
686
687        self._run_dispatcher() # launches job
688
689        self._check_statuses(queue_entries[0], HqeStatus.RUNNING)
690
691        job.hostqueueentry_set.update(aborted=True)
692
693        self._run_dispatcher() # kills job, launches gathering
694
695        self._check_statuses(queue_entries[0], HqeStatus.GATHERING)
696        self.mock_drone_manager.finish_process(_PidfileType.GATHER)
697        self._run_dispatcher() # launches parsing + cleanup
698        queue_entry = job.hostqueueentry_set.all()[0]
699        self._finish_parsing()
700        # The abort will cause gathering to launch a cleanup.
701        self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)
702        self._run_dispatcher()
703        self.mock_drone_manager.finish_process(_PidfileType.RESET)
704        self._run_dispatcher()
705
706
707    def test_job_abort_queued_synchronous(self):
708        self._initialize_test()
709        job = self._create_job(hosts=[1,2])
710        job.synch_count = 2
711        job.save()
712
713        job.hostqueueentry_set.update(aborted=True)
714        self._run_dispatcher()
715        for host_queue_entry in job.hostqueueentry_set.all():
716            self.assertEqual(host_queue_entry.status,
717                             HqeStatus.ABORTED)
718
719
720    def test_no_pidfile_leaking(self):
721        self._initialize_test()
722
723        self.test_simple_job()
724        self.mock_drone_manager.refresh()
725        self.assertEquals(self.mock_drone_manager._pidfiles, {})
726
727        self.test_job_abort_in_verify()
728        self.mock_drone_manager.refresh()
729        self.assertEquals(self.mock_drone_manager._pidfiles, {})
730
731        self.test_job_abort()
732        self.mock_drone_manager.refresh()
733        self.assertEquals(self.mock_drone_manager._pidfiles, {})
734
735
736    def _make_job_and_queue_entry(self):
737        job = self._create_job(hosts=[1])
738        queue_entry = job.hostqueueentry_set.all()[0]
739        return job, queue_entry
740
741
742    def test_recover_running_no_process(self):
743        # recovery should re-execute a Running HQE if no process is found
744        _, queue_entry = self._make_job_and_queue_entry()
745        queue_entry.status = HqeStatus.RUNNING
746        queue_entry.execution_subdir = '1-myuser/host1'
747        queue_entry.save()
748        queue_entry.host.status = HostStatus.RUNNING
749        queue_entry.host.save()
750
751        self._initialize_test()
752        self._run_dispatcher()
753        self._finish_job(queue_entry)
754
755
756    def test_recover_verifying_hqe_no_special_task(self):
757        # recovery should move a Resetting HQE with no corresponding
758        # Verify or Reset SpecialTask back to Queued.
759        _, queue_entry = self._make_job_and_queue_entry()
760        queue_entry.status = HqeStatus.RESETTING
761        queue_entry.save()
762
763        # make some dummy SpecialTasks that shouldn't count
764        models.SpecialTask.objects.create(
765                host=queue_entry.host,
766                task=models.SpecialTask.Task.RESET,
767                requested_by=models.User.current_user())
768        models.SpecialTask.objects.create(
769                host=queue_entry.host,
770                task=models.SpecialTask.Task.CLEANUP,
771                queue_entry=queue_entry,
772                is_complete=True,
773                requested_by=models.User.current_user())
774
775        self._initialize_test()
776        self._check_statuses(queue_entry, HqeStatus.QUEUED)
777
778
779    def _test_recover_verifying_hqe_helper(self, task, pidfile_type):
780        _, queue_entry = self._make_job_and_queue_entry()
781        queue_entry.status = HqeStatus.VERIFYING
782        queue_entry.save()
783
784        special_task = models.SpecialTask.objects.create(
785                host=queue_entry.host, task=task, queue_entry=queue_entry)
786
787        self._initialize_test()
788        self._run_dispatcher()
789        self.mock_drone_manager.finish_process(pidfile_type)
790        self._run_dispatcher()
791        # don't bother checking the rest of the job execution, as long as the
792        # SpecialTask ran
793
794
795    def test_recover_verifying_hqe_with_cleanup(self):
796        # recover an HQE that was in pre-job cleanup
797        self._test_recover_verifying_hqe_helper(models.SpecialTask.Task.CLEANUP,
798                                                _PidfileType.CLEANUP)
799
800
801    def test_recover_verifying_hqe_with_verify(self):
802        # recover an HQE that was in pre-job verify
803        self._test_recover_verifying_hqe_helper(models.SpecialTask.Task.VERIFY,
804                                                _PidfileType.VERIFY)
805
806
807    def test_recover_parsing(self):
808        self._initialize_test()
809        job, queue_entry = self._make_job_and_queue_entry()
810        job.run_verify = False
811        job.run_reset = False
812        job.reboot_after = model_attributes.RebootAfter.NEVER
813        job.save()
814
815        self._run_dispatcher() # launches job
816        self.mock_drone_manager.finish_process(_PidfileType.JOB)
817        self._run_dispatcher() # launches parsing
818
819        # now "restart" the scheduler
820        self._create_dispatcher()
821        self._initialize_test()
822        self._run_dispatcher()
823        self.mock_drone_manager.finish_process(_PidfileType.PARSE)
824        self._run_dispatcher()
825
826
827    def test_recover_parsing__no_process_already_aborted(self):
828        _, queue_entry = self._make_job_and_queue_entry()
829        queue_entry.execution_subdir = 'host1'
830        queue_entry.status = HqeStatus.PARSING
831        queue_entry.aborted = True
832        queue_entry.save()
833
834        self._initialize_test()
835        self._run_dispatcher()
836
837
838    def test_job_scheduled_just_after_abort(self):
839        # test a pretty obscure corner case where a job is aborted while queued,
840        # another job is ready to run, and throttling is active. the post-abort
841        # cleanup must not be pre-empted by the second job.
842        # This test kind of doesn't make sense anymore after verify+cleanup
843        # were merged into reset.  It should maybe just be removed.
844        job1, queue_entry1 = self._make_job_and_queue_entry()
845        queue_entry1.save()
846        job2, queue_entry2 = self._make_job_and_queue_entry()
847        job2.reboot_before = model_attributes.RebootBefore.IF_DIRTY
848        job2.save()
849
850        self.mock_drone_manager.process_capacity = 0
851        self._run_dispatcher() # schedule job1, but won't start verify
852        job1.hostqueueentry_set.update(aborted=True)
853        self.mock_drone_manager.process_capacity = 100
854        self._run_dispatcher() # reset must run here, not verify for job2
855        self._check_statuses(queue_entry1, HqeStatus.ABORTED,
856                             HostStatus.RESETTING)
857        self.mock_drone_manager.finish_process(_PidfileType.RESET)
858        self._run_dispatcher() # now verify starts for job2
859        self._check_statuses(queue_entry2, HqeStatus.RUNNING,
860                             HostStatus.RUNNING)
861
862
863    def test_reverify_interrupting_pre_job(self):
864        # ensure things behave sanely if a reverify is scheduled in the middle
865        # of pre-job actions
866        _, queue_entry = self._make_job_and_queue_entry()
867
868        self._run_dispatcher() # pre-job verify
869        self._create_reverify_request()
870        self.mock_drone_manager.finish_process(_PidfileType.VERIFY,
871                                               exit_status=256)
872        self._run_dispatcher() # repair
873        self.mock_drone_manager.finish_process(_PidfileType.REPAIR)
874        self._run_dispatcher() # reverify runs now
875        self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
876        self._run_dispatcher() # pre-job verify
877        self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
878        self._run_dispatcher() # and job runs...
879        self._check_statuses(queue_entry, HqeStatus.RUNNING, HostStatus.RUNNING)
880        self._finish_job(queue_entry) # reverify has been deleted
881        self._check_statuses(queue_entry, HqeStatus.COMPLETED,
882                             HostStatus.READY)
883        self._assert_nothing_is_running()
884
885
886    def test_reverify_while_job_running(self):
887        # once a job is running, a reverify must not be allowed to preempt
888        # Gathering
889        _, queue_entry = self._make_job_and_queue_entry()
890        self._run_pre_job_verify(queue_entry)
891        self._run_dispatcher() # job runs
892        self._create_reverify_request()
893        # make job end with a signal, so gathering will run
894        self.mock_drone_manager.finish_process(_PidfileType.JOB,
895                                               exit_status=271)
896        self._run_dispatcher() # gathering must start
897        self.mock_drone_manager.finish_process(_PidfileType.GATHER)
898        self._run_dispatcher() # parsing and cleanup
899        self._finish_parsing()
900        self._run_dispatcher() # now reverify runs
901        self._check_statuses(queue_entry, HqeStatus.FAILED,
902                             HostStatus.VERIFYING)
903        self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
904        self._run_dispatcher()
905        self._check_host_status(queue_entry.host, HostStatus.READY)
906
907
908    def test_reverify_while_host_pending(self):
909        # ensure that if a reverify is scheduled while a host is in Pending, it
910        # won't run until the host is actually free
911        job = self._create_job(hosts=[1,2])
912        queue_entry = job.hostqueueentry_set.get(host__hostname='host1')
913        job.synch_count = 2
914        job.save()
915
916        host2 = self.hosts[1]
917        host2.locked = True
918        host2.save()
919
920        self._run_dispatcher() # verify host1
921        self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
922        self._run_dispatcher() # host1 Pending
923        self._check_statuses(queue_entry, HqeStatus.PENDING, HostStatus.PENDING)
924        self._create_reverify_request()
925        self._run_dispatcher() # nothing should happen here
926        self._check_statuses(queue_entry, HqeStatus.PENDING, HostStatus.PENDING)
927
928        # now let the job run
929        host2.locked = False
930        host2.save()
931        self._run_dispatcher() # verify host2
932        self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
933        self._run_dispatcher() # run job
934        self._finish_job(queue_entry)
935        # the reverify should now be running
936        self._check_statuses(queue_entry, HqeStatus.COMPLETED,
937                             HostStatus.VERIFYING)
938        self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
939        self._run_dispatcher()
940        self._check_host_status(queue_entry.host, HostStatus.READY)
941
942
943    def test_throttling(self):
944        job = self._create_job(hosts=[1,2,3])
945        job.synch_count = 3
946        job.save()
947
948        queue_entries = list(job.hostqueueentry_set.all())
949        def _check_hqe_statuses(*statuses):
950            for queue_entry, status in zip(queue_entries, statuses):
951                self._check_statuses(queue_entry, status)
952
953        self.mock_drone_manager.process_capacity = 2
954        self._run_dispatcher() # verify runs on 1 and 2
955        queue_entries = list(job.hostqueueentry_set.all())
956        _check_hqe_statuses(HqeStatus.QUEUED,
957                            HqeStatus.VERIFYING, HqeStatus.VERIFYING)
958        self.assertEquals(len(self.mock_drone_manager.running_pidfile_ids()), 2)
959
960        self.mock_drone_manager.finish_specific_process(
961                'hosts/host3/1-verify', drone_manager.AUTOSERV_PID_FILE)
962        self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
963        self._run_dispatcher() # verify runs on 3
964        _check_hqe_statuses(HqeStatus.VERIFYING, HqeStatus.PENDING,
965                            HqeStatus.PENDING)
966
967        self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
968        self._run_dispatcher() # job won't run due to throttling
969        _check_hqe_statuses(HqeStatus.STARTING, HqeStatus.STARTING,
970                            HqeStatus.STARTING)
971        self._assert_nothing_is_running()
972
973        self.mock_drone_manager.process_capacity = 3
974        self._run_dispatcher() # now job runs
975        _check_hqe_statuses(HqeStatus.RUNNING, HqeStatus.RUNNING,
976                            HqeStatus.RUNNING)
977
978        self.mock_drone_manager.process_capacity = 2
979        self.mock_drone_manager.finish_process(_PidfileType.JOB,
980                                               exit_status=271)
981        self._run_dispatcher() # gathering won't run due to throttling
982        _check_hqe_statuses(HqeStatus.GATHERING, HqeStatus.GATHERING,
983                            HqeStatus.GATHERING)
984        self._assert_nothing_is_running()
985
986        self.mock_drone_manager.process_capacity = 3
987        self._run_dispatcher() # now gathering runs
988
989        self.mock_drone_manager.process_capacity = 0
990        self.mock_drone_manager.finish_process(_PidfileType.GATHER)
991        self._run_dispatcher() # parsing runs despite throttling
992        _check_hqe_statuses(HqeStatus.PARSING, HqeStatus.PARSING,
993                            HqeStatus.PARSING)
994
995
996    def test_abort_starting_while_throttling(self):
997        self._initialize_test()
998        job = self._create_job(hosts=[1,2], synchronous=True)
999        queue_entry = job.hostqueueentry_set.all()[0]
1000        job.run_verify = False
1001        job.run_reset = False
1002        job.reboot_after = model_attributes.RebootAfter.NEVER
1003        job.save()
1004
1005        self.mock_drone_manager.process_capacity = 0
1006        self._run_dispatcher() # go to starting, but don't start job
1007        self._check_statuses(queue_entry, HqeStatus.STARTING,
1008                             HostStatus.PENDING)
1009
1010        job.hostqueueentry_set.update(aborted=True)
1011        self._run_dispatcher()
1012        self._check_statuses(queue_entry, HqeStatus.GATHERING,
1013                             HostStatus.RUNNING)
1014
1015        self.mock_drone_manager.process_capacity = 5
1016        self._run_dispatcher()
1017        self._check_statuses(queue_entry, HqeStatus.ABORTED,
1018                             HostStatus.CLEANING)
1019
1020
1021    def test_simple_metahost_assignment(self):
1022        job = self._create_job(metahosts=[1])
1023        self._run_dispatcher()
1024        entry = job.hostqueueentry_set.all()[0]
1025        self.assertEquals(entry.host.hostname, 'host1')
1026        self._check_statuses(entry, HqeStatus.VERIFYING, HostStatus.VERIFYING)
1027        self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
1028        self._run_dispatcher()
1029        self._check_statuses(entry, HqeStatus.RUNNING, HostStatus.RUNNING)
1030        # rest of job proceeds normally
1031
1032
1033    def test_metahost_fail_verify(self):
1034        self.hosts[1].labels.add(self.labels[0]) # put label1 also on host2
1035        job = self._create_job(metahosts=[1])
1036        self._run_dispatcher() # assigned to host1
1037        self.mock_drone_manager.finish_process(_PidfileType.VERIFY,
1038                                               exit_status=256)
1039        self._run_dispatcher() # host1 failed, gets reassigned to host2
1040        entry = job.hostqueueentry_set.all()[0]
1041        self.assertEquals(entry.host.hostname, 'host2')
1042        self._check_statuses(entry, HqeStatus.VERIFYING, HostStatus.VERIFYING)
1043        self._check_host_status(self.hosts[0], HostStatus.REPAIRING)
1044
1045        self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
1046        self._run_dispatcher()
1047        self._check_statuses(entry, HqeStatus.RUNNING, HostStatus.RUNNING)
1048
1049
1050    def test_hostless_job(self):
1051        job = self._create_job(hostless=True)
1052        entry = job.hostqueueentry_set.all()[0]
1053
1054        self._run_dispatcher()
1055        self._check_entry_status(entry, HqeStatus.RUNNING)
1056
1057        self.mock_drone_manager.finish_process(_PidfileType.JOB)
1058        self._run_dispatcher()
1059        self._check_entry_status(entry, HqeStatus.PARSING)
1060        self.mock_drone_manager.finish_process(_PidfileType.PARSE)
1061        self._run_dispatcher()
1062        self._check_entry_status(entry, HqeStatus.COMPLETED)
1063
1064
1065    def test_pre_job_keyvals(self):
1066        job = self._create_job(hosts=[1])
1067        job.run_verify = False
1068        job.run_reset = False
1069        job.reboot_before = model_attributes.RebootBefore.NEVER
1070        job.save()
1071        models.JobKeyval.objects.create(job=job, key='mykey', value='myvalue')
1072
1073        self._run_dispatcher()
1074        self._finish_job(job.hostqueueentry_set.all()[0])
1075
1076        attached_files = self.mock_drone_manager.attached_files(
1077                '1-autotest_system/host1')
1078        job_keyval_path = '1-autotest_system/host1/keyval'
1079        self.assert_(job_keyval_path in attached_files, attached_files)
1080        keyval_contents = attached_files[job_keyval_path]
1081        keyval_dict = dict(line.strip().split('=', 1)
1082                           for line in keyval_contents.splitlines())
1083        self.assert_('job_queued' in keyval_dict, keyval_dict)
1084        self.assertEquals(keyval_dict['mykey'], 'myvalue')
1085
1086
1087# This tests the scheduler functions with archiving step disabled
1088class SchedulerFunctionalTestNoArchiving(SchedulerFunctionalTest):
1089    def _set_global_config_values(self):
1090        super(SchedulerFunctionalTestNoArchiving, self
1091                )._set_global_config_values()
1092        self.mock_config.set_config_value('SCHEDULER', 'enable_archiving',
1093                                          False)
1094
1095
1096    def _finish_parsing(self):
1097        self.mock_drone_manager.finish_process(_PidfileType.PARSE)
1098        self._run_dispatcher()
1099
1100
1101    def _run_post_job_cleanup_failure_up_to_repair(self, queue_entry,
1102                                                   include_verify=True):
1103        if include_verify:
1104            self._run_pre_job_verify(queue_entry)
1105        self._run_dispatcher() # job
1106        self.mock_drone_manager.finish_process(_PidfileType.JOB)
1107        self._run_dispatcher() # parsing + cleanup
1108        self.mock_drone_manager.finish_process(_PidfileType.PARSE)
1109        self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,
1110                                               exit_status=256)
1111        self._run_dispatcher() # repair, HQE unaffected
1112        return queue_entry
1113
1114
1115    def test_hostless_job(self):
1116        job = self._create_job(hostless=True)
1117        entry = job.hostqueueentry_set.all()[0]
1118
1119        self._run_dispatcher()
1120        self._check_entry_status(entry, HqeStatus.RUNNING)
1121
1122        self.mock_drone_manager.finish_process(_PidfileType.JOB)
1123        self._run_dispatcher()
1124        self._check_entry_status(entry, HqeStatus.PARSING)
1125        self.mock_drone_manager.finish_process(_PidfileType.PARSE)
1126        self._run_dispatcher()
1127        self._check_entry_status(entry, HqeStatus.COMPLETED)
1128
1129    def test_synchronous_with_reset(self):
1130        # For crbug/621257.
1131        job = self._create_job(hosts=[1, 2])
1132        job.synch_count = 2
1133        job.reboot_before = model_attributes.RebootBefore.ALWAYS
1134        job.save()
1135
1136        hqe1 = job.hostqueueentry_set.get(host__hostname='host1')
1137        hqe2 = job.hostqueueentry_set.get(host__hostname='host2')
1138
1139        self._run_dispatcher()
1140
1141        self._check_statuses(hqe1, HqeStatus.RESETTING, HostStatus.RESETTING)
1142        self._check_statuses(hqe2, HqeStatus.RESETTING, HostStatus.RESETTING)
1143
1144        self.mock_drone_manager.finish_active_process_on_host(1)
1145        self._run_dispatcher()
1146
1147        self._check_statuses(hqe1, HqeStatus.PENDING, HostStatus.PENDING)
1148        self._check_statuses(hqe2, HqeStatus.RESETTING, HostStatus.RESETTING)
1149
1150        self.mock_drone_manager.finish_active_process_on_host(2)
1151        self._run_dispatcher()
1152
1153        self._check_statuses(hqe1, HqeStatus.RUNNING, HostStatus.RUNNING)
1154        self._check_statuses(hqe2, HqeStatus.RUNNING, HostStatus.RUNNING)
1155
1156
1157if __name__ == '__main__':
1158    unittest.main()
1159