• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/usr/bin/python
2# pylint: disable=missing-docstring
3
4import logging, os, signal, unittest
5import common
6import mock
7from autotest_lib.client.common_lib import enum, global_config, host_protections
8from autotest_lib.database import database_connection
9from autotest_lib.frontend import setup_django_environment
10from autotest_lib.frontend.afe import frontend_test_utils, models
11from autotest_lib.frontend.afe import model_attributes
12from autotest_lib.scheduler import drone_manager, email_manager
13from autotest_lib.scheduler import monitor_db, scheduler_models
14from autotest_lib.scheduler import scheduler_config
15from autotest_lib.scheduler import scheduler_lib
16
17HqeStatus = models.HostQueueEntry.Status
18HostStatus = models.Host.Status
19
20class NullMethodObject(object):
21    _NULL_METHODS = ()
22
23    def __init__(self):
24        def null_method(*args, **kwargs):
25            pass
26
27        for method_name in self._NULL_METHODS:
28            setattr(self, method_name, null_method)
29
30# the SpecialTask names here must match the suffixes used on the SpecialTask
31# results directories
32_PidfileType = enum.Enum('verify', 'cleanup', 'repair', 'job', 'gather',
33                         'parse', 'archive', 'reset', 'provision')
34
35
36_PIDFILE_TO_PIDFILE_TYPE = {
37        drone_manager.AUTOSERV_PID_FILE: _PidfileType.JOB,
38        drone_manager.CRASHINFO_PID_FILE: _PidfileType.GATHER,
39        drone_manager.PARSER_PID_FILE: _PidfileType.PARSE,
40        drone_manager.ARCHIVER_PID_FILE: _PidfileType.ARCHIVE,
41        }
42
43
44_PIDFILE_TYPE_TO_PIDFILE = dict((value, key) for key, value
45                                in _PIDFILE_TO_PIDFILE_TYPE.iteritems())
46
47
48class MockConnectionManager(object):
49    """docstring for MockConnectionManager"""
50
51    db = None
52
53    def __init__(self):
54        super(MockConnectionManager, self).__init__()
55
56    def get_connection(self):
57        assert MockConnectionManager.db
58        return MockConnectionManager.db
59
60
61class MockDroneManager(NullMethodObject):
62    """
63    Public attributes:
64    max_runnable_processes_value: value returned by max_runnable_processes().
65            tests can change this to activate throttling.
66    """
67    _NULL_METHODS = ('reinitialize_drones', 'copy_to_results_repository',
68                     'copy_results_on_drone', 'trigger_refresh', 'sync_refresh')
69
70    class _DummyPidfileId(object):
71        """
72        Object to represent pidfile IDs that is opaque to the scheduler code but
73        still debugging-friendly for us.
74        """
75        def __init__(self, working_directory, pidfile_name, num_processes=None):
76            self._working_directory = working_directory
77            self._pidfile_name = pidfile_name
78            self._num_processes = num_processes
79            self._paired_with_pidfile = None
80
81
82        def key(self):
83            """Key for MockDroneManager._pidfile_index"""
84            return (self._working_directory, self._pidfile_name)
85
86
87        def __str__(self):
88            return os.path.join(self._working_directory, self._pidfile_name)
89
90
91        def __repr__(self):
92            return '<_DummyPidfileId: %s>' % str(self)
93
94
95    def __init__(self):
96        super(MockDroneManager, self).__init__()
97        self.process_capacity = 100
98
99        # maps result_dir to set of tuples (file_path, file_contents)
100        self._attached_files = {}
101        # maps pidfile IDs to PidfileContents
102        self._pidfiles = {}
103        # pidfile IDs that haven't been created yet
104        self._future_pidfiles = []
105        # maps _PidfileType to the most recently created pidfile ID of that type
106        self._last_pidfile_id = {}
107        # maps (working_directory, pidfile_name) to pidfile IDs
108        self._pidfile_index = {}
109        # maps process to pidfile IDs
110        self._process_index = {}
111        # tracks pidfiles of processes that have been killed
112        self._pids_to_signals_received = {}
113        # pidfile IDs that have just been unregistered (so will disappear on the
114        # next cycle)
115        self._unregistered_pidfiles = set()
116        # Pids to write exit status for at end of tick
117        self._set_pidfile_exit_status_queue = []
118
119    # utility APIs for use by the test
120
121    def finish_process(self, pidfile_type, exit_status=0):
122        pidfile_id = self._last_pidfile_id[pidfile_type]
123        self._set_pidfile_exit_status(pidfile_id, exit_status)
124
125
126    def finish_specific_process(self, working_directory, pidfile_name):
127        pidfile_id = self.pidfile_from_path(working_directory, pidfile_name)
128        self._set_pidfile_exit_status(pidfile_id, 0)
129
130    def finish_active_process_on_host(self, host_id):
131        match = 'hosts/host%d/' % host_id
132        for pidfile_id in self.nonfinished_pidfile_ids():
133            if pidfile_id._working_directory.startswith(match):
134                self._set_pidfile_exit_status(pidfile_id, 0)
135                break
136        else:
137          raise KeyError('No active process matched %s' % match)
138
139    def _set_pidfile_exit_status(self, pidfile_id, exit_status):
140        assert pidfile_id is not None
141        contents = self._pidfiles[pidfile_id]
142        contents.exit_status = exit_status
143        contents.num_tests_failed = 0
144
145
146    def was_last_process_killed(self, pidfile_type, sigs):
147        pidfile_id = self._last_pidfile_id[pidfile_type]
148        return sigs == self._pids_to_signals_received[pidfile_id]
149
150
151    def nonfinished_pidfile_ids(self):
152        return [pidfile_id for pidfile_id, pidfile_contents
153                in self._pidfiles.iteritems()
154                if pidfile_contents.exit_status is None]
155
156
157    def running_pidfile_ids(self):
158        return [pidfile_id for pidfile_id in self.nonfinished_pidfile_ids()
159                if self._pidfiles[pidfile_id].process is not None]
160
161
162    def pidfile_from_path(self, working_directory, pidfile_name):
163        return self._pidfile_index[(working_directory, pidfile_name)]
164
165
166    def attached_files(self, working_directory):
167        """
168        Return dict mapping path to contents for attached files with specified
169        paths.
170        """
171        return dict((path, contents) for path, contents
172                    in self._attached_files.get(working_directory, [])
173                    if path is not None)
174
175
176    # DroneManager emulation APIs for use by monitor_db
177
178    def get_orphaned_autoserv_processes(self):
179        return set()
180
181
182    def total_running_processes(self):
183        return sum(pidfile_id._num_processes
184                   for pidfile_id in self.nonfinished_pidfile_ids())
185
186
187    def max_runnable_processes(self, username, drone_hostnames_allowed):
188        return self.process_capacity - self.total_running_processes()
189
190
191    def refresh(self):
192        for pidfile_id in self._unregistered_pidfiles:
193            # intentionally handle non-registered pidfiles silently
194            self._pidfiles.pop(pidfile_id, None)
195        self._unregistered_pidfiles = set()
196
197
198    def execute_actions(self):
199        # executing an "execute_command" causes a pidfile to be created
200        for pidfile_id in self._future_pidfiles:
201            # Process objects are opaque to monitor_db
202            process = object()
203            self._pidfiles[pidfile_id].process = process
204            self._process_index[process] = pidfile_id
205        self._future_pidfiles = []
206
207        for pidfile_id in self._set_pidfile_exit_status_queue:
208            self._set_pidfile_exit_status(pidfile_id, 271)
209        self._set_pidfile_exit_status_queue = []
210
211
212    def attach_file_to_execution(self, result_dir, file_contents,
213                                 file_path=None):
214        self._attached_files.setdefault(result_dir, set()).add((file_path,
215                                                                file_contents))
216        return 'attach_path'
217
218
219    def _initialize_pidfile(self, pidfile_id):
220        if pidfile_id not in self._pidfiles:
221            assert pidfile_id.key() not in self._pidfile_index
222            self._pidfiles[pidfile_id] = drone_manager.PidfileContents()
223            self._pidfile_index[pidfile_id.key()] = pidfile_id
224
225
226    def _set_last_pidfile(self, pidfile_id, working_directory, pidfile_name):
227        if working_directory.startswith('hosts/'):
228            # such paths look like hosts/host1/1-verify, we'll grab the end
229            type_string = working_directory.rsplit('-', 1)[1]
230            pidfile_type = _PidfileType.get_value(type_string)
231        else:
232            pidfile_type = _PIDFILE_TO_PIDFILE_TYPE[pidfile_name]
233        self._last_pidfile_id[pidfile_type] = pidfile_id
234
235
236    def execute_command(self, command, working_directory, pidfile_name,
237                        num_processes, log_file=None, paired_with_pidfile=None,
238                        username=None, drone_hostnames_allowed=None):
239        logging.debug('Executing %s in %s', command, working_directory)
240        pidfile_id = self._DummyPidfileId(working_directory, pidfile_name)
241        if pidfile_id.key() in self._pidfile_index:
242            pidfile_id = self._pidfile_index[pidfile_id.key()]
243        pidfile_id._num_processes = num_processes
244        pidfile_id._paired_with_pidfile = paired_with_pidfile
245
246        self._future_pidfiles.append(pidfile_id)
247        self._initialize_pidfile(pidfile_id)
248        self._pidfile_index[(working_directory, pidfile_name)] = pidfile_id
249        self._set_last_pidfile(pidfile_id, working_directory, pidfile_name)
250        return pidfile_id
251
252
253    def get_pidfile_contents(self, pidfile_id, use_second_read=False):
254        if pidfile_id not in self._pidfiles:
255            logging.debug('Request for nonexistent pidfile %s', pidfile_id)
256        return self._pidfiles.get(pidfile_id, drone_manager.PidfileContents())
257
258
259    def is_process_running(self, process):
260        return True
261
262
263    def register_pidfile(self, pidfile_id):
264        self._initialize_pidfile(pidfile_id)
265
266
267    def unregister_pidfile(self, pidfile_id):
268        self._unregistered_pidfiles.add(pidfile_id)
269
270
271    def declare_process_count(self, pidfile_id, num_processes):
272        pidfile_id.num_processes = num_processes
273
274
275    def absolute_path(self, path):
276        return 'absolute/' + path
277
278
279    def write_lines_to_file(self, file_path, lines, paired_with_process=None):
280        # TODO: record this
281        pass
282
283
284    def get_pidfile_id_from(self, execution_tag, pidfile_name):
285        default_pidfile = self._DummyPidfileId(execution_tag, pidfile_name,
286                                               num_processes=0)
287        return self._pidfile_index.get((execution_tag, pidfile_name),
288                                       default_pidfile)
289
290
291    def kill_process(self, process, sig=signal.SIGKILL):
292        pidfile_id = self._process_index[process]
293
294        if pidfile_id not in self._pids_to_signals_received:
295            self._pids_to_signals_received[pidfile_id] = set()
296        self._pids_to_signals_received[pidfile_id].add(sig)
297
298        if signal.SIGKILL == sig:
299            self._set_pidfile_exit_status_queue.append(pidfile_id)
300
301
302class MockEmailManager(NullMethodObject):
303    _NULL_METHODS = ('send_queued_emails', 'send_email')
304
305    def enqueue_notify_email(self, subject, message):
306        logging.warning('enqueue_notify_email: %s', subject)
307        logging.warning(message)
308
309
310class SchedulerFunctionalTest(unittest.TestCase,
311                              frontend_test_utils.FrontendTestMixin):
312    # some number of ticks after which the scheduler is presumed to have
313    # stabilized, given no external changes
314    _A_LOT_OF_TICKS = 10
315
316    def setUp(self):
317        self._frontend_common_setup()
318        self._set_stubs()
319        self._set_global_config_values()
320        self._create_dispatcher()
321
322        logging.basicConfig(level=logging.DEBUG)
323
324
325    def _create_dispatcher(self):
326        self.dispatcher = monitor_db.Dispatcher()
327
328
329    def tearDown(self):
330        self._database.disconnect()
331        self._frontend_common_teardown()
332
333
334    def _set_stubs(self):
335        self.mock_config = global_config.FakeGlobalConfig()
336        self.god.stub_with(global_config, 'global_config', self.mock_config)
337
338        self.mock_drone_manager = MockDroneManager()
339        drone_manager._set_instance(self.mock_drone_manager)
340
341        self.mock_email_manager = MockEmailManager()
342        self.god.stub_with(email_manager, 'manager', self.mock_email_manager)
343
344        self._database = (
345            database_connection.TranslatingDatabase.get_test_database(
346                translators=scheduler_lib._DB_TRANSLATORS))
347        self._database.connect(db_type='django')
348        self.god.stub_with(monitor_db, '_db', self._database)
349        self.god.stub_with(scheduler_models, '_db', self._database)
350
351        MockConnectionManager.db = self._database
352        scheduler_lib.ConnectionManager = MockConnectionManager
353
354        monitor_db.initialize_globals()
355        scheduler_models.initialize_globals()
356
357        patcher = mock.patch(
358                'autotest_lib.scheduler.luciferlib.is_lucifer_enabled',
359                lambda: False)
360        patcher.start()
361        self.addCleanup(patcher.stop)
362
363
364    def _set_global_config_values(self):
365        self.mock_config.set_config_value('SCHEDULER', 'pidfile_timeout_mins',
366                                          1)
367        self.mock_config.set_config_value('SCHEDULER', 'gc_stats_interval_mins',
368                                          999999)
369        self.mock_config.set_config_value('SCHEDULER', 'enable_archiving', True)
370        self.mock_config.set_config_value('SCHEDULER',
371                                          'clean_interval_minutes', 60)
372        self.mock_config.set_config_value('SCHEDULER',
373                                          'max_parse_processes', 50)
374        self.mock_config.set_config_value('SCHEDULER',
375                                          'max_transfer_processes', 50)
376        self.mock_config.set_config_value('SCHEDULER',
377                                          'clean_interval_minutes', 50)
378        self.mock_config.set_config_value('SCHEDULER',
379                                          'max_provision_retries', 1)
380        self.mock_config.set_config_value('SCHEDULER', 'max_repair_limit', 1)
381        self.mock_config.set_config_value(
382                'SCHEDULER', 'secs_to_wait_for_atomic_group_hosts', 600)
383        self.mock_config.set_config_value(
384                'SCHEDULER', 'inline_host_acquisition', True)
385        scheduler_config.config.read_config()
386
387
388    def _initialize_test(self):
389        self.dispatcher.initialize()
390
391
392    def _run_dispatcher(self):
393        for _ in xrange(self._A_LOT_OF_TICKS):
394            self.dispatcher.tick()
395
396
397    def test_idle(self):
398        self._initialize_test()
399        self._run_dispatcher()
400
401
402    def _assert_process_executed(self, working_directory, pidfile_name):
403        process_was_executed = self.mock_drone_manager.was_process_executed(
404                'hosts/host1/1-verify', drone_manager.AUTOSERV_PID_FILE)
405        self.assert_(process_was_executed,
406                     '%s/%s not executed' % (working_directory, pidfile_name))
407
408
409    def _update_instance(self, model_instance):
410        return type(model_instance).objects.get(pk=model_instance.pk)
411
412
413    def _check_statuses(self, queue_entry, queue_entry_status,
414                        host_status=None):
415        self._check_entry_status(queue_entry, queue_entry_status)
416        if host_status:
417            self._check_host_status(queue_entry.host, host_status)
418
419
420    def _check_entry_status(self, queue_entry, status):
421        # update from DB
422        queue_entry = self._update_instance(queue_entry)
423        self.assertEquals(queue_entry.status, status)
424
425
426    def _check_host_status(self, host, status):
427        # update from DB
428        host = self._update_instance(host)
429        self.assertEquals(host.status, status)
430
431
432    def _run_pre_job_verify(self, queue_entry):
433        self._run_dispatcher() # launches verify
434        self._check_statuses(queue_entry, HqeStatus.VERIFYING,
435                             HostStatus.VERIFYING)
436        self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
437
438
439    def test_simple_job(self):
440        self._initialize_test()
441        job, queue_entry = self._make_job_and_queue_entry()
442        self._run_pre_job_verify(queue_entry)
443        self._run_dispatcher() # launches job
444        self._check_statuses(queue_entry, HqeStatus.RUNNING, HostStatus.RUNNING)
445        self._finish_job(queue_entry)
446        self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)
447        self._assert_nothing_is_running()
448
449
450    def _setup_for_pre_job_reset(self):
451        self._initialize_test()
452        job, queue_entry = self._make_job_and_queue_entry()
453        job.reboot_before = model_attributes.RebootBefore.ALWAYS
454        job.save()
455        return queue_entry
456
457
458    def _run_pre_job_reset_job(self, queue_entry):
459        self._run_dispatcher() # reset
460        self._check_statuses(queue_entry, HqeStatus.RESETTING,
461                             HostStatus.RESETTING)
462        self.mock_drone_manager.finish_process(_PidfileType.RESET)
463        self._run_dispatcher() # job
464        self._finish_job(queue_entry)
465
466
467    def test_pre_job_reset(self):
468        queue_entry = self._setup_for_pre_job_reset()
469        self._run_pre_job_reset_job(queue_entry)
470
471
472    def _run_pre_job_reset_one_failure(self):
473        queue_entry = self._setup_for_pre_job_reset()
474        self._run_dispatcher() # reset
475        self.mock_drone_manager.finish_process(_PidfileType.RESET,
476                                               exit_status=256)
477        self._run_dispatcher() # repair
478        self._check_statuses(queue_entry, HqeStatus.QUEUED,
479                             HostStatus.REPAIRING)
480        self.mock_drone_manager.finish_process(_PidfileType.REPAIR)
481        return queue_entry
482
483
484    def test_pre_job_reset_failure(self):
485        queue_entry = self._run_pre_job_reset_one_failure()
486        # from here the job should run as normal
487        self._run_pre_job_reset_job(queue_entry)
488
489
490    def test_pre_job_reset_double_failure(self):
491        # TODO (showard): this test isn't perfect.  in reality, when the second
492        # reset fails, it copies its results over to the job directory using
493        # copy_results_on_drone() and then parses them.  since we don't handle
494        # that, there appear to be no results at the job directory.  the
495        # scheduler handles this gracefully, parsing gets effectively skipped,
496        # and this test passes as is.  but we ought to properly test that
497        # behavior.
498        queue_entry = self._run_pre_job_reset_one_failure()
499        self._run_dispatcher() # second reset
500        self.mock_drone_manager.finish_process(_PidfileType.RESET,
501                                               exit_status=256)
502        self._run_dispatcher()
503        self._check_statuses(queue_entry, HqeStatus.FAILED,
504                             HostStatus.REPAIR_FAILED)
505        # nothing else should run
506        self._assert_nothing_is_running()
507
508
509    def _assert_nothing_is_running(self):
510        self.assertEquals(self.mock_drone_manager.running_pidfile_ids(), [])
511
512
513    def _setup_for_post_job_cleanup(self):
514        self._initialize_test()
515        job, queue_entry = self._make_job_and_queue_entry()
516        job.reboot_after = model_attributes.RebootAfter.ALWAYS
517        job.save()
518        return queue_entry
519
520
521    def _run_post_job_cleanup_failure_up_to_repair(self, queue_entry,
522                                                   include_verify=True):
523        if include_verify:
524            self._run_pre_job_verify(queue_entry)
525        self._run_dispatcher() # job
526        self.mock_drone_manager.finish_process(_PidfileType.JOB)
527        self._run_dispatcher() # parsing + cleanup
528        self.mock_drone_manager.finish_process(_PidfileType.PARSE)
529        self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,
530                                               exit_status=256)
531        self._run_dispatcher() # repair, HQE unaffected
532        return queue_entry
533
534
535    def test_post_job_cleanup_failure(self):
536        queue_entry = self._setup_for_post_job_cleanup()
537        self._run_post_job_cleanup_failure_up_to_repair(queue_entry)
538        self._check_statuses(queue_entry, HqeStatus.COMPLETED,
539                             HostStatus.REPAIRING)
540        self.mock_drone_manager.finish_process(_PidfileType.REPAIR)
541        self._run_dispatcher()
542        self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)
543
544
545    def test_post_job_cleanup_failure_repair_failure(self):
546        queue_entry = self._setup_for_post_job_cleanup()
547        self._run_post_job_cleanup_failure_up_to_repair(queue_entry)
548        self.mock_drone_manager.finish_process(_PidfileType.REPAIR,
549                                               exit_status=256)
550        self._run_dispatcher()
551        self._check_statuses(queue_entry, HqeStatus.COMPLETED,
552                             HostStatus.REPAIR_FAILED)
553
554
555    def _ensure_post_job_process_is_paired(self, queue_entry, pidfile_type):
556        pidfile_name = _PIDFILE_TYPE_TO_PIDFILE[pidfile_type]
557        queue_entry = self._update_instance(queue_entry)
558        pidfile_id = self.mock_drone_manager.pidfile_from_path(
559                queue_entry.execution_path(), pidfile_name)
560        self.assert_(pidfile_id._paired_with_pidfile)
561
562
563    def _finish_job(self, queue_entry):
564        self._check_statuses(queue_entry, HqeStatus.RUNNING)
565        self.mock_drone_manager.finish_process(_PidfileType.JOB)
566        self._run_dispatcher() # launches parsing
567        self._check_statuses(queue_entry, HqeStatus.PARSING)
568        self._ensure_post_job_process_is_paired(queue_entry, _PidfileType.PARSE)
569        self._finish_parsing()
570
571
572    def _finish_parsing(self):
573        self.mock_drone_manager.finish_process(_PidfileType.PARSE)
574        self._run_dispatcher()
575
576
577    def _create_reverify_request(self):
578        host = self.hosts[0]
579        models.SpecialTask.schedule_special_task(
580                host=host, task=models.SpecialTask.Task.VERIFY)
581        return host
582
583
584    def test_requested_reverify(self):
585        host = self._create_reverify_request()
586        self._run_dispatcher()
587        self._check_host_status(host, HostStatus.VERIFYING)
588        self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
589        self._run_dispatcher()
590        self._check_host_status(host, HostStatus.READY)
591
592
593    def test_requested_reverify_failure(self):
594        host = self._create_reverify_request()
595        self._run_dispatcher()
596        self.mock_drone_manager.finish_process(_PidfileType.VERIFY,
597                                               exit_status=256)
598        self._run_dispatcher() # repair
599        self._check_host_status(host, HostStatus.REPAIRING)
600        self.mock_drone_manager.finish_process(_PidfileType.REPAIR)
601        self._run_dispatcher()
602        self._check_host_status(host, HostStatus.READY)
603
604
605    def _setup_for_do_not_verify(self):
606        self._initialize_test()
607        job, queue_entry = self._make_job_and_queue_entry()
608        queue_entry.host.protection = host_protections.Protection.DO_NOT_VERIFY
609        queue_entry.host.save()
610        return queue_entry
611
612
613    def test_do_not_verify_job(self):
614        queue_entry = self._setup_for_do_not_verify()
615        self._run_dispatcher() # runs job directly
616        self._finish_job(queue_entry)
617
618
619    def test_do_not_verify_job_with_cleanup(self):
620        queue_entry = self._setup_for_do_not_verify()
621        queue_entry.job.reboot_before = model_attributes.RebootBefore.ALWAYS
622        queue_entry.job.save()
623
624        self._run_dispatcher() # cleanup
625        self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)
626        self._run_dispatcher() # job
627        self._finish_job(queue_entry)
628
629
630    def test_do_not_verify_pre_job_cleanup_failure(self):
631        queue_entry = self._setup_for_do_not_verify()
632        queue_entry.job.reboot_before = model_attributes.RebootBefore.ALWAYS
633        queue_entry.job.save()
634
635        self._run_dispatcher() # cleanup
636        self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,
637                                               exit_status=256)
638        self._run_dispatcher() # failure ignored; job runs
639        self._finish_job(queue_entry)
640
641
642    def test_do_not_verify_post_job_cleanup_failure(self):
643        queue_entry = self._setup_for_do_not_verify()
644        queue_entry.job.reboot_after = model_attributes.RebootAfter.ALWAYS
645        queue_entry.job.save()
646
647        self._run_post_job_cleanup_failure_up_to_repair(queue_entry,
648                                                        include_verify=False)
649        # failure ignored, host still set to Ready
650        self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)
651        self._run_dispatcher() # nothing else runs
652        self._assert_nothing_is_running()
653
654
655    def test_do_not_verify_requested_reverify_failure(self):
656        host = self._create_reverify_request()
657        host.protection = host_protections.Protection.DO_NOT_VERIFY
658        host.save()
659
660        self._run_dispatcher()
661        self.mock_drone_manager.finish_process(_PidfileType.VERIFY,
662                                               exit_status=256)
663        self._run_dispatcher()
664        self._check_host_status(host, HostStatus.READY) # ignore failure
665        self._assert_nothing_is_running()
666
667
668    def test_job_abort_in_verify(self):
669        self._initialize_test()
670        job = self._create_job(hosts=[1])
671        queue_entries = list(job.hostqueueentry_set.all())
672        self._run_dispatcher() # launches verify
673        self._check_statuses(queue_entries[0], HqeStatus.VERIFYING)
674        job.hostqueueentry_set.update(aborted=True)
675        self._run_dispatcher() # kills verify, launches cleanup
676        self.assert_(self.mock_drone_manager.was_last_process_killed(
677                _PidfileType.VERIFY, set([signal.SIGKILL])))
678        self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)
679        self._run_dispatcher()
680
681
682    def test_job_abort(self):
683        self._initialize_test()
684        job = self._create_job(hosts=[1])
685        job.run_reset = False
686        job.save()
687        queue_entries = list(job.hostqueueentry_set.all())
688
689        self._run_dispatcher() # launches job
690
691        self._check_statuses(queue_entries[0], HqeStatus.RUNNING)
692
693        job.hostqueueentry_set.update(aborted=True)
694
695        self._run_dispatcher() # kills job, launches gathering
696
697        self._check_statuses(queue_entries[0], HqeStatus.GATHERING)
698        self.mock_drone_manager.finish_process(_PidfileType.GATHER)
699        self._run_dispatcher() # launches parsing + cleanup
700        queue_entry = job.hostqueueentry_set.all()[0]
701        self._finish_parsing()
702        # The abort will cause gathering to launch a cleanup.
703        self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)
704        self._run_dispatcher()
705        self.mock_drone_manager.finish_process(_PidfileType.RESET)
706        self._run_dispatcher()
707
708
709    def test_job_abort_queued_synchronous(self):
710        self._initialize_test()
711        job = self._create_job(hosts=[1,2])
712        job.synch_count = 2
713        job.save()
714
715        job.hostqueueentry_set.update(aborted=True)
716        self._run_dispatcher()
717        for host_queue_entry in job.hostqueueentry_set.all():
718            self.assertEqual(host_queue_entry.status,
719                             HqeStatus.ABORTED)
720
721
722    def test_no_pidfile_leaking(self):
723        self._initialize_test()
724
725        self.test_simple_job()
726        self.mock_drone_manager.refresh()
727        self.assertEquals(self.mock_drone_manager._pidfiles, {})
728
729        self.test_job_abort_in_verify()
730        self.mock_drone_manager.refresh()
731        self.assertEquals(self.mock_drone_manager._pidfiles, {})
732
733        self.test_job_abort()
734        self.mock_drone_manager.refresh()
735        self.assertEquals(self.mock_drone_manager._pidfiles, {})
736
737
738    def _make_job_and_queue_entry(self):
739        job = self._create_job(hosts=[1])
740        queue_entry = job.hostqueueentry_set.all()[0]
741        return job, queue_entry
742
743
744    def test_recover_running_no_process(self):
745        # recovery should re-execute a Running HQE if no process is found
746        _, queue_entry = self._make_job_and_queue_entry()
747        queue_entry.status = HqeStatus.RUNNING
748        queue_entry.execution_subdir = '1-myuser/host1'
749        queue_entry.save()
750        queue_entry.host.status = HostStatus.RUNNING
751        queue_entry.host.save()
752
753        self._initialize_test()
754        self._run_dispatcher()
755        self._finish_job(queue_entry)
756
757
758    def test_recover_verifying_hqe_no_special_task(self):
759        # recovery should move a Resetting HQE with no corresponding
760        # Verify or Reset SpecialTask back to Queued.
761        _, queue_entry = self._make_job_and_queue_entry()
762        queue_entry.status = HqeStatus.RESETTING
763        queue_entry.save()
764
765        # make some dummy SpecialTasks that shouldn't count
766        models.SpecialTask.objects.create(
767                host=queue_entry.host,
768                task=models.SpecialTask.Task.RESET,
769                requested_by=models.User.current_user())
770        models.SpecialTask.objects.create(
771                host=queue_entry.host,
772                task=models.SpecialTask.Task.CLEANUP,
773                queue_entry=queue_entry,
774                is_complete=True,
775                requested_by=models.User.current_user())
776
777        self._initialize_test()
778        self._check_statuses(queue_entry, HqeStatus.QUEUED)
779
780
781    def _test_recover_verifying_hqe_helper(self, task, pidfile_type):
782        _, queue_entry = self._make_job_and_queue_entry()
783        queue_entry.status = HqeStatus.VERIFYING
784        queue_entry.save()
785
786        special_task = models.SpecialTask.objects.create(
787                host=queue_entry.host, task=task, queue_entry=queue_entry)
788
789        self._initialize_test()
790        self._run_dispatcher()
791        self.mock_drone_manager.finish_process(pidfile_type)
792        self._run_dispatcher()
793        # don't bother checking the rest of the job execution, as long as the
794        # SpecialTask ran
795
796
797    def test_recover_verifying_hqe_with_cleanup(self):
798        # recover an HQE that was in pre-job cleanup
799        self._test_recover_verifying_hqe_helper(models.SpecialTask.Task.CLEANUP,
800                                                _PidfileType.CLEANUP)
801
802
803    def test_recover_verifying_hqe_with_verify(self):
804        # recover an HQE that was in pre-job verify
805        self._test_recover_verifying_hqe_helper(models.SpecialTask.Task.VERIFY,
806                                                _PidfileType.VERIFY)
807
808
809    def test_recover_parsing(self):
810        self._initialize_test()
811        job, queue_entry = self._make_job_and_queue_entry()
812        job.run_verify = False
813        job.run_reset = False
814        job.reboot_after = model_attributes.RebootAfter.NEVER
815        job.save()
816
817        self._run_dispatcher() # launches job
818        self.mock_drone_manager.finish_process(_PidfileType.JOB)
819        self._run_dispatcher() # launches parsing
820
821        # now "restart" the scheduler
822        self._create_dispatcher()
823        self._initialize_test()
824        self._run_dispatcher()
825        self.mock_drone_manager.finish_process(_PidfileType.PARSE)
826        self._run_dispatcher()
827
828
829    def test_recover_parsing__no_process_already_aborted(self):
830        _, queue_entry = self._make_job_and_queue_entry()
831        queue_entry.execution_subdir = 'host1'
832        queue_entry.status = HqeStatus.PARSING
833        queue_entry.aborted = True
834        queue_entry.save()
835
836        self._initialize_test()
837        self._run_dispatcher()
838
839
840    def test_job_scheduled_just_after_abort(self):
841        # test a pretty obscure corner case where a job is aborted while queued,
842        # another job is ready to run, and throttling is active. the post-abort
843        # cleanup must not be pre-empted by the second job.
844        # This test kind of doesn't make sense anymore after verify+cleanup
845        # were merged into reset.  It should maybe just be removed.
846        job1, queue_entry1 = self._make_job_and_queue_entry()
847        queue_entry1.save()
848        job2, queue_entry2 = self._make_job_and_queue_entry()
849        job2.reboot_before = model_attributes.RebootBefore.IF_DIRTY
850        job2.save()
851
852        self.mock_drone_manager.process_capacity = 0
853        self._run_dispatcher() # schedule job1, but won't start verify
854        job1.hostqueueentry_set.update(aborted=True)
855        self.mock_drone_manager.process_capacity = 100
856        self._run_dispatcher() # reset must run here, not verify for job2
857        self._check_statuses(queue_entry1, HqeStatus.ABORTED,
858                             HostStatus.RESETTING)
859        self.mock_drone_manager.finish_process(_PidfileType.RESET)
860        self._run_dispatcher() # now verify starts for job2
861        self._check_statuses(queue_entry2, HqeStatus.RUNNING,
862                             HostStatus.RUNNING)
863
864
865    def test_reverify_interrupting_pre_job(self):
866        # ensure things behave sanely if a reverify is scheduled in the middle
867        # of pre-job actions
868        _, queue_entry = self._make_job_and_queue_entry()
869
870        self._run_dispatcher() # pre-job verify
871        self._create_reverify_request()
872        self.mock_drone_manager.finish_process(_PidfileType.VERIFY,
873                                               exit_status=256)
874        self._run_dispatcher() # repair
875        self.mock_drone_manager.finish_process(_PidfileType.REPAIR)
876        self._run_dispatcher() # reverify runs now
877        self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
878        self._run_dispatcher() # pre-job verify
879        self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
880        self._run_dispatcher() # and job runs...
881        self._check_statuses(queue_entry, HqeStatus.RUNNING, HostStatus.RUNNING)
882        self._finish_job(queue_entry) # reverify has been deleted
883        self._check_statuses(queue_entry, HqeStatus.COMPLETED,
884                             HostStatus.READY)
885        self._assert_nothing_is_running()
886
887
888    def test_reverify_while_job_running(self):
889        # once a job is running, a reverify must not be allowed to preempt
890        # Gathering
891        _, queue_entry = self._make_job_and_queue_entry()
892        self._run_pre_job_verify(queue_entry)
893        self._run_dispatcher() # job runs
894        self._create_reverify_request()
895        # make job end with a signal, so gathering will run
896        self.mock_drone_manager.finish_process(_PidfileType.JOB,
897                                               exit_status=271)
898        self._run_dispatcher() # gathering must start
899        self.mock_drone_manager.finish_process(_PidfileType.GATHER)
900        self._run_dispatcher() # parsing and cleanup
901        self._finish_parsing()
902        self._run_dispatcher() # now reverify runs
903        self._check_statuses(queue_entry, HqeStatus.FAILED,
904                             HostStatus.VERIFYING)
905        self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
906        self._run_dispatcher()
907        self._check_host_status(queue_entry.host, HostStatus.READY)
908
909
910    def test_reverify_while_host_pending(self):
911        # ensure that if a reverify is scheduled while a host is in Pending, it
912        # won't run until the host is actually free
913        job = self._create_job(hosts=[1,2])
914        queue_entry = job.hostqueueentry_set.get(host__hostname='host1')
915        job.synch_count = 2
916        job.save()
917
918        host2 = self.hosts[1]
919        host2.locked = True
920        host2.save()
921
922        self._run_dispatcher() # verify host1
923        self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
924        self._run_dispatcher() # host1 Pending
925        self._check_statuses(queue_entry, HqeStatus.PENDING, HostStatus.PENDING)
926        self._create_reverify_request()
927        self._run_dispatcher() # nothing should happen here
928        self._check_statuses(queue_entry, HqeStatus.PENDING, HostStatus.PENDING)
929
930        # now let the job run
931        host2.locked = False
932        host2.save()
933        self._run_dispatcher() # verify host2
934        self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
935        self._run_dispatcher() # run job
936        self._finish_job(queue_entry)
937        # the reverify should now be running
938        self._check_statuses(queue_entry, HqeStatus.COMPLETED,
939                             HostStatus.VERIFYING)
940        self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
941        self._run_dispatcher()
942        self._check_host_status(queue_entry.host, HostStatus.READY)
943
944
945    def test_throttling(self):
946        job = self._create_job(hosts=[1,2,3])
947        job.synch_count = 3
948        job.save()
949
950        queue_entries = list(job.hostqueueentry_set.all())
951        def _check_hqe_statuses(*statuses):
952            for queue_entry, status in zip(queue_entries, statuses):
953                self._check_statuses(queue_entry, status)
954
955        self.mock_drone_manager.process_capacity = 2
956        self._run_dispatcher() # verify runs on 1 and 2
957        queue_entries = list(job.hostqueueentry_set.all())
958        _check_hqe_statuses(HqeStatus.QUEUED,
959                            HqeStatus.VERIFYING, HqeStatus.VERIFYING)
960        self.assertEquals(len(self.mock_drone_manager.running_pidfile_ids()), 2)
961
962        self.mock_drone_manager.finish_specific_process(
963                'hosts/host3/1-verify', drone_manager.AUTOSERV_PID_FILE)
964        self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
965        self._run_dispatcher() # verify runs on 3
966        _check_hqe_statuses(HqeStatus.VERIFYING, HqeStatus.PENDING,
967                            HqeStatus.PENDING)
968
969        self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
970        self._run_dispatcher() # job won't run due to throttling
971        _check_hqe_statuses(HqeStatus.STARTING, HqeStatus.STARTING,
972                            HqeStatus.STARTING)
973        self._assert_nothing_is_running()
974
975        self.mock_drone_manager.process_capacity = 3
976        self._run_dispatcher() # now job runs
977        _check_hqe_statuses(HqeStatus.RUNNING, HqeStatus.RUNNING,
978                            HqeStatus.RUNNING)
979
980        self.mock_drone_manager.process_capacity = 2
981        self.mock_drone_manager.finish_process(_PidfileType.JOB,
982                                               exit_status=271)
983        self._run_dispatcher() # gathering won't run due to throttling
984        _check_hqe_statuses(HqeStatus.GATHERING, HqeStatus.GATHERING,
985                            HqeStatus.GATHERING)
986        self._assert_nothing_is_running()
987
988        self.mock_drone_manager.process_capacity = 3
989        self._run_dispatcher() # now gathering runs
990
991        self.mock_drone_manager.process_capacity = 0
992        self.mock_drone_manager.finish_process(_PidfileType.GATHER)
993        self._run_dispatcher() # parsing runs despite throttling
994        _check_hqe_statuses(HqeStatus.PARSING, HqeStatus.PARSING,
995                            HqeStatus.PARSING)
996
997
998    def test_abort_starting_while_throttling(self):
999        self._initialize_test()
1000        job = self._create_job(hosts=[1,2], synchronous=True)
1001        queue_entry = job.hostqueueentry_set.all()[0]
1002        job.run_verify = False
1003        job.run_reset = False
1004        job.reboot_after = model_attributes.RebootAfter.NEVER
1005        job.save()
1006
1007        self.mock_drone_manager.process_capacity = 0
1008        self._run_dispatcher() # go to starting, but don't start job
1009        self._check_statuses(queue_entry, HqeStatus.STARTING,
1010                             HostStatus.PENDING)
1011
1012        job.hostqueueentry_set.update(aborted=True)
1013        self._run_dispatcher()
1014        self._check_statuses(queue_entry, HqeStatus.GATHERING,
1015                             HostStatus.RUNNING)
1016
1017        self.mock_drone_manager.process_capacity = 5
1018        self._run_dispatcher()
1019        self._check_statuses(queue_entry, HqeStatus.ABORTED,
1020                             HostStatus.CLEANING)
1021
1022
1023    def test_simple_metahost_assignment(self):
1024        job = self._create_job(metahosts=[1])
1025        self._run_dispatcher()
1026        entry = job.hostqueueentry_set.all()[0]
1027        self.assertEquals(entry.host.hostname, 'host1')
1028        self._check_statuses(entry, HqeStatus.VERIFYING, HostStatus.VERIFYING)
1029        self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
1030        self._run_dispatcher()
1031        self._check_statuses(entry, HqeStatus.RUNNING, HostStatus.RUNNING)
1032        # rest of job proceeds normally
1033
1034
1035    def test_metahost_fail_verify(self):
1036        self.hosts[1].labels.add(self.labels[0]) # put label1 also on host2
1037        job = self._create_job(metahosts=[1])
1038        self._run_dispatcher() # assigned to host1
1039        self.mock_drone_manager.finish_process(_PidfileType.VERIFY,
1040                                               exit_status=256)
1041        self._run_dispatcher() # host1 failed, gets reassigned to host2
1042        entry = job.hostqueueentry_set.all()[0]
1043        self.assertEquals(entry.host.hostname, 'host2')
1044        self._check_statuses(entry, HqeStatus.VERIFYING, HostStatus.VERIFYING)
1045        self._check_host_status(self.hosts[0], HostStatus.REPAIRING)
1046
1047        self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
1048        self._run_dispatcher()
1049        self._check_statuses(entry, HqeStatus.RUNNING, HostStatus.RUNNING)
1050
1051
1052    def test_hostless_job(self):
1053        job = self._create_job(hostless=True)
1054        entry = job.hostqueueentry_set.all()[0]
1055
1056        self._run_dispatcher()
1057        self._check_entry_status(entry, HqeStatus.RUNNING)
1058
1059        self.mock_drone_manager.finish_process(_PidfileType.JOB)
1060        self._run_dispatcher()
1061        self._check_entry_status(entry, HqeStatus.PARSING)
1062        self.mock_drone_manager.finish_process(_PidfileType.PARSE)
1063        self._run_dispatcher()
1064        self._check_entry_status(entry, HqeStatus.COMPLETED)
1065
1066
1067    def test_pre_job_keyvals(self):
1068        job = self._create_job(hosts=[1])
1069        job.run_verify = False
1070        job.run_reset = False
1071        job.reboot_before = model_attributes.RebootBefore.NEVER
1072        job.save()
1073        models.JobKeyval.objects.create(job=job, key='mykey', value='myvalue')
1074
1075        self._run_dispatcher()
1076        self._finish_job(job.hostqueueentry_set.all()[0])
1077
1078        attached_files = self.mock_drone_manager.attached_files(
1079                '1-autotest_system/host1')
1080        job_keyval_path = '1-autotest_system/host1/keyval'
1081        self.assert_(job_keyval_path in attached_files, attached_files)
1082        keyval_contents = attached_files[job_keyval_path]
1083        keyval_dict = dict(line.strip().split('=', 1)
1084                           for line in keyval_contents.splitlines())
1085        self.assert_('job_queued' in keyval_dict, keyval_dict)
1086        self.assertEquals(keyval_dict['mykey'], 'myvalue')
1087
1088
1089# This tests the scheduler functions with archiving step disabled
1090class SchedulerFunctionalTestNoArchiving(SchedulerFunctionalTest):
1091    def _set_global_config_values(self):
1092        super(SchedulerFunctionalTestNoArchiving, self
1093                )._set_global_config_values()
1094        self.mock_config.set_config_value('SCHEDULER', 'enable_archiving',
1095                                          False)
1096
1097
1098    def _finish_parsing(self):
1099        self.mock_drone_manager.finish_process(_PidfileType.PARSE)
1100        self._run_dispatcher()
1101
1102
1103    def _run_post_job_cleanup_failure_up_to_repair(self, queue_entry,
1104                                                   include_verify=True):
1105        if include_verify:
1106            self._run_pre_job_verify(queue_entry)
1107        self._run_dispatcher() # job
1108        self.mock_drone_manager.finish_process(_PidfileType.JOB)
1109        self._run_dispatcher() # parsing + cleanup
1110        self.mock_drone_manager.finish_process(_PidfileType.PARSE)
1111        self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,
1112                                               exit_status=256)
1113        self._run_dispatcher() # repair, HQE unaffected
1114        return queue_entry
1115
1116
1117    def test_hostless_job(self):
1118        job = self._create_job(hostless=True)
1119        entry = job.hostqueueentry_set.all()[0]
1120
1121        self._run_dispatcher()
1122        self._check_entry_status(entry, HqeStatus.RUNNING)
1123
1124        self.mock_drone_manager.finish_process(_PidfileType.JOB)
1125        self._run_dispatcher()
1126        self._check_entry_status(entry, HqeStatus.PARSING)
1127        self.mock_drone_manager.finish_process(_PidfileType.PARSE)
1128        self._run_dispatcher()
1129        self._check_entry_status(entry, HqeStatus.COMPLETED)
1130
1131    def test_synchronous_with_reset(self):
1132        # For crbug/621257.
1133        job = self._create_job(hosts=[1, 2])
1134        job.synch_count = 2
1135        job.reboot_before = model_attributes.RebootBefore.ALWAYS
1136        job.save()
1137
1138        hqe1 = job.hostqueueentry_set.get(host__hostname='host1')
1139        hqe2 = job.hostqueueentry_set.get(host__hostname='host2')
1140
1141        self._run_dispatcher()
1142
1143        self._check_statuses(hqe1, HqeStatus.RESETTING, HostStatus.RESETTING)
1144        self._check_statuses(hqe2, HqeStatus.RESETTING, HostStatus.RESETTING)
1145
1146        self.mock_drone_manager.finish_active_process_on_host(1)
1147        self._run_dispatcher()
1148
1149        self._check_statuses(hqe1, HqeStatus.PENDING, HostStatus.PENDING)
1150        self._check_statuses(hqe2, HqeStatus.RESETTING, HostStatus.RESETTING)
1151
1152        self.mock_drone_manager.finish_active_process_on_host(2)
1153        self._run_dispatcher()
1154
1155        self._check_statuses(hqe1, HqeStatus.RUNNING, HostStatus.RUNNING)
1156        self._check_statuses(hqe2, HqeStatus.RUNNING, HostStatus.RUNNING)
1157
1158
1159if __name__ == '__main__':
1160    unittest.main()
1161