• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# pylint: disable=missing-docstring
2
3import cPickle as pickle
4import copy
5import errno
6import fcntl
7import logging
8import os
9import re
10import tempfile
11import time
12import traceback
13import weakref
14from autotest_lib.client.common_lib import autotemp, error, log
15
16
17class job_directory(object):
18    """Represents a job.*dir directory."""
19
20
21    class JobDirectoryException(error.AutotestError):
22        """Generic job_directory exception superclass."""
23
24
25    class MissingDirectoryException(JobDirectoryException):
26        """Raised when a directory required by the job does not exist."""
27        def __init__(self, path):
28            Exception.__init__(self, 'Directory %s does not exist' % path)
29
30
31    class UncreatableDirectoryException(JobDirectoryException):
32        """Raised when a directory required by the job is missing and cannot
33        be created."""
34        def __init__(self, path, error):
35            msg = 'Creation of directory %s failed with exception %s'
36            msg %= (path, error)
37            Exception.__init__(self, msg)
38
39
40    class UnwritableDirectoryException(JobDirectoryException):
41        """Raised when a writable directory required by the job exists
42        but is not writable."""
43        def __init__(self, path):
44            msg = 'Directory %s exists but is not writable' % path
45            Exception.__init__(self, msg)
46
47
48    def __init__(self, path, is_writable=False):
49        """
50        Instantiate a job directory.
51
52        @param path: The path of the directory. If None a temporary directory
53            will be created instead.
54        @param is_writable: If True, expect the directory to be writable.
55
56        @raise MissingDirectoryException: raised if is_writable=False and the
57            directory does not exist.
58        @raise UnwritableDirectoryException: raised if is_writable=True and
59            the directory exists but is not writable.
60        @raise UncreatableDirectoryException: raised if is_writable=True, the
61            directory does not exist and it cannot be created.
62        """
63        if path is None:
64            if is_writable:
65                self._tempdir = autotemp.tempdir(unique_id='autotest')
66                self.path = self._tempdir.name
67            else:
68                raise self.MissingDirectoryException(path)
69        else:
70            self._tempdir = None
71            self.path = path
72        self._ensure_valid(is_writable)
73
74
75    def _ensure_valid(self, is_writable):
76        """
77        Ensure that this is a valid directory.
78
79        Will check if a directory exists, can optionally also enforce that
80        it be writable. It can optionally create it if necessary. Creation
81        will still fail if the path is rooted in a non-writable directory, or
82        if a file already exists at the given location.
83
84        @param dir_path A path where a directory should be located
85        @param is_writable A boolean indicating that the directory should
86            not only exist, but also be writable.
87
88        @raises MissingDirectoryException raised if is_writable=False and the
89            directory does not exist.
90        @raises UnwritableDirectoryException raised if is_writable=True and
91            the directory is not wrtiable.
92        @raises UncreatableDirectoryException raised if is_writable=True, the
93            directory does not exist and it cannot be created
94        """
95        # ensure the directory exists
96        if is_writable:
97            try:
98                os.makedirs(self.path)
99            except OSError, e:
100                if e.errno != errno.EEXIST or not os.path.isdir(self.path):
101                    raise self.UncreatableDirectoryException(self.path, e)
102        elif not os.path.isdir(self.path):
103            raise self.MissingDirectoryException(self.path)
104
105        # if is_writable=True, also check that the directory is writable
106        if is_writable and not os.access(self.path, os.W_OK):
107            raise self.UnwritableDirectoryException(self.path)
108
109
110    @staticmethod
111    def property_factory(attribute):
112        """
113        Create a job.*dir -> job._*dir.path property accessor.
114
115        @param attribute A string with the name of the attribute this is
116            exposed as. '_'+attribute must then be attribute that holds
117            either None or a job_directory-like object.
118
119        @returns A read-only property object that exposes a job_directory path
120        """
121        @property
122        def dir_property(self):
123            underlying_attribute = getattr(self, '_' + attribute)
124            if underlying_attribute is None:
125                return None
126            else:
127                return underlying_attribute.path
128        return dir_property
129
130
131# decorator for use with job_state methods
132def with_backing_lock(method):
133    """A decorator to perform a lock-*-unlock cycle.
134
135    When applied to a method, this decorator will automatically wrap
136    calls to the method in a backing file lock and before the call
137    followed by a backing file unlock.
138    """
139    def wrapped_method(self, *args, **dargs):
140        already_have_lock = self._backing_file_lock is not None
141        if not already_have_lock:
142            self._lock_backing_file()
143        try:
144            return method(self, *args, **dargs)
145        finally:
146            if not already_have_lock:
147                self._unlock_backing_file()
148    wrapped_method.__name__ = method.__name__
149    wrapped_method.__doc__ = method.__doc__
150    return wrapped_method
151
152
153# decorator for use with job_state methods
154def with_backing_file(method):
155    """A decorator to perform a lock-read-*-write-unlock cycle.
156
157    When applied to a method, this decorator will automatically wrap
158    calls to the method in a lock-and-read before the call followed by a
159    write-and-unlock. Any operation that is reading or writing state
160    should be decorated with this method to ensure that backing file
161    state is consistently maintained.
162    """
163    @with_backing_lock
164    def wrapped_method(self, *args, **dargs):
165        self._read_from_backing_file()
166        try:
167            return method(self, *args, **dargs)
168        finally:
169            self._write_to_backing_file()
170    wrapped_method.__name__ = method.__name__
171    wrapped_method.__doc__ = method.__doc__
172    return wrapped_method
173
174
175
176class job_state(object):
177    """A class for managing explicit job and user state, optionally persistent.
178
179    The class allows you to save state by name (like a dictionary). Any state
180    stored in this class should be picklable and deep copyable. While this is
181    not enforced it is recommended that only valid python identifiers be used
182    as names. Additionally, the namespace 'stateful_property' is used for
183    storing the valued associated with properties constructed using the
184    property_factory method.
185    """
186
187    NO_DEFAULT = object()
188    PICKLE_PROTOCOL = 2  # highest protocol available in python 2.4
189
190
191    def __init__(self):
192        """Initialize the job state."""
193        self._state = {}
194        self._backing_file = None
195        self._backing_file_initialized = False
196        self._backing_file_lock = None
197
198
199    def _lock_backing_file(self):
200        """Acquire a lock on the backing file."""
201        if self._backing_file:
202            self._backing_file_lock = open(self._backing_file, 'a')
203            fcntl.flock(self._backing_file_lock, fcntl.LOCK_EX)
204
205
206    def _unlock_backing_file(self):
207        """Release a lock on the backing file."""
208        if self._backing_file_lock:
209            fcntl.flock(self._backing_file_lock, fcntl.LOCK_UN)
210            self._backing_file_lock.close()
211            self._backing_file_lock = None
212
213
214    def read_from_file(self, file_path, merge=True):
215        """Read in any state from the file at file_path.
216
217        When merge=True, any state specified only in-memory will be preserved.
218        Any state specified on-disk will be set in-memory, even if an in-memory
219        setting already exists.
220
221        @param file_path: The path where the state should be read from. It must
222            exist but it can be empty.
223        @param merge: If true, merge the on-disk state with the in-memory
224            state. If false, replace the in-memory state with the on-disk
225            state.
226
227        @warning: This method is intentionally concurrency-unsafe. It makes no
228            attempt to control concurrent access to the file at file_path.
229        """
230
231        # we can assume that the file exists
232        if os.path.getsize(file_path) == 0:
233            on_disk_state = {}
234        else:
235            on_disk_state = pickle.load(open(file_path))
236
237        if merge:
238            # merge the on-disk state with the in-memory state
239            for namespace, namespace_dict in on_disk_state.iteritems():
240                in_memory_namespace = self._state.setdefault(namespace, {})
241                for name, value in namespace_dict.iteritems():
242                    if name in in_memory_namespace:
243                        if in_memory_namespace[name] != value:
244                            logging.info('Persistent value of %s.%s from %s '
245                                         'overridding existing in-memory '
246                                         'value', namespace, name, file_path)
247                            in_memory_namespace[name] = value
248                        else:
249                            logging.debug('Value of %s.%s is unchanged, '
250                                          'skipping import', namespace, name)
251                    else:
252                        logging.debug('Importing %s.%s from state file %s',
253                                      namespace, name, file_path)
254                        in_memory_namespace[name] = value
255        else:
256            # just replace the in-memory state with the on-disk state
257            self._state = on_disk_state
258
259        # lock the backing file before we refresh it
260        with_backing_lock(self.__class__._write_to_backing_file)(self)
261
262
263    def write_to_file(self, file_path):
264        """Write out the current state to the given path.
265
266        @param file_path: The path where the state should be written out to.
267            Must be writable.
268
269        @warning: This method is intentionally concurrency-unsafe. It makes no
270            attempt to control concurrent access to the file at file_path.
271        """
272        outfile = open(file_path, 'w')
273        try:
274            pickle.dump(self._state, outfile, self.PICKLE_PROTOCOL)
275        finally:
276            outfile.close()
277
278
279    def _read_from_backing_file(self):
280        """Refresh the current state from the backing file.
281
282        If the backing file has never been read before (indicated by checking
283        self._backing_file_initialized) it will merge the file with the
284        in-memory state, rather than overwriting it.
285        """
286        if self._backing_file:
287            merge_backing_file = not self._backing_file_initialized
288            self.read_from_file(self._backing_file, merge=merge_backing_file)
289            self._backing_file_initialized = True
290
291
292    def _write_to_backing_file(self):
293        """Flush the current state to the backing file."""
294        if self._backing_file:
295            self.write_to_file(self._backing_file)
296
297
298    @with_backing_file
299    def _synchronize_backing_file(self):
300        """Synchronizes the contents of the in-memory and on-disk state."""
301        # state is implicitly synchronized in _with_backing_file methods
302        pass
303
304
305    def set_backing_file(self, file_path):
306        """Change the path used as the backing file for the persistent state.
307
308        When a new backing file is specified if a file already exists then
309        its contents will be added into the current state, with conflicts
310        between the file and memory being resolved in favor of the file
311        contents. The file will then be kept in sync with the (combined)
312        in-memory state. The syncing can be disabled by setting this to None.
313
314        @param file_path: A path on the filesystem that can be read from and
315            written to, or None to turn off the backing store.
316        """
317        self._synchronize_backing_file()
318        self._backing_file = file_path
319        self._backing_file_initialized = False
320        self._synchronize_backing_file()
321
322
323    @with_backing_file
324    def get(self, namespace, name, default=NO_DEFAULT):
325        """Returns the value associated with a particular name.
326
327        @param namespace: The namespace that the property should be stored in.
328        @param name: The name the value was saved with.
329        @param default: A default value to return if no state is currently
330            associated with var.
331
332        @return: A deep copy of the value associated with name. Note that this
333            explicitly returns a deep copy to avoid problems with mutable
334            values; mutations are not persisted or shared.
335        @raise KeyError: raised when no state is associated with var and a
336            default value is not provided.
337        """
338        if self.has(namespace, name):
339            return copy.deepcopy(self._state[namespace][name])
340        elif default is self.NO_DEFAULT:
341            raise KeyError('No key %s in namespace %s' % (name, namespace))
342        else:
343            return default
344
345
346    @with_backing_file
347    def set(self, namespace, name, value):
348        """Saves the value given with the provided name.
349
350        @param namespace: The namespace that the property should be stored in.
351        @param name: The name the value should be saved with.
352        @param value: The value to save.
353        """
354        namespace_dict = self._state.setdefault(namespace, {})
355        namespace_dict[name] = copy.deepcopy(value)
356        logging.debug('Persistent state %s.%s now set to %r', namespace,
357                      name, value)
358
359
360    @with_backing_file
361    def has(self, namespace, name):
362        """Return a boolean indicating if namespace.name is defined.
363
364        @param namespace: The namespace to check for a definition.
365        @param name: The name to check for a definition.
366
367        @return: True if the given name is defined in the given namespace and
368            False otherwise.
369        """
370        return namespace in self._state and name in self._state[namespace]
371
372
373    @with_backing_file
374    def discard(self, namespace, name):
375        """If namespace.name is a defined value, deletes it.
376
377        @param namespace: The namespace that the property is stored in.
378        @param name: The name the value is saved with.
379        """
380        if self.has(namespace, name):
381            del self._state[namespace][name]
382            if len(self._state[namespace]) == 0:
383                del self._state[namespace]
384            logging.debug('Persistent state %s.%s deleted', namespace, name)
385        else:
386            logging.debug(
387                'Persistent state %s.%s not defined so nothing is discarded',
388                namespace, name)
389
390
391    @with_backing_file
392    def discard_namespace(self, namespace):
393        """Delete all defined namespace.* names.
394
395        @param namespace: The namespace to be cleared.
396        """
397        if namespace in self._state:
398            del self._state[namespace]
399        logging.debug('Persistent state %s.* deleted', namespace)
400
401
402    @staticmethod
403    def property_factory(state_attribute, property_attribute, default,
404                         namespace='global_properties'):
405        """
406        Create a property object for an attribute using self.get and self.set.
407
408        @param state_attribute: A string with the name of the attribute on
409            job that contains the job_state instance.
410        @param property_attribute: A string with the name of the attribute
411            this property is exposed as.
412        @param default: A default value that should be used for this property
413            if it is not set.
414        @param namespace: The namespace to store the attribute value in.
415
416        @return: A read-write property object that performs self.get calls
417            to read the value and self.set calls to set it.
418        """
419        def getter(job):
420            state = getattr(job, state_attribute)
421            return state.get(namespace, property_attribute, default)
422        def setter(job, value):
423            state = getattr(job, state_attribute)
424            state.set(namespace, property_attribute, value)
425        return property(getter, setter)
426
427
428class status_log_entry(object):
429    """Represents a single status log entry."""
430
431    RENDERED_NONE_VALUE = '----'
432    TIMESTAMP_FIELD = 'timestamp'
433    LOCALTIME_FIELD = 'localtime'
434
435    # non-space whitespace is forbidden in any fields
436    BAD_CHAR_REGEX = re.compile(r'[\t\n\r\v\f]')
437
438    def _init_message(self, message):
439        """Handle the message which describs event to be recorded.
440
441        Break the message line into a single-line message that goes into the
442        database, and a block of additional lines that goes into the status
443        log but will never be parsed
444        When detecting a bad char in message, replace it with space instead
445        of raising an exception that cannot be parsed by tko parser.
446
447        @param message: the input message.
448
449        @return: filtered message without bad characters.
450        """
451        message_lines = message.splitlines()
452        if message_lines:
453            self.message = message_lines[0]
454            self.extra_message_lines = message_lines[1:]
455        else:
456            self.message = ''
457            self.extra_message_lines = []
458
459        self.message = self.message.replace('\t', ' ' * 8)
460        self.message = self.BAD_CHAR_REGEX.sub(' ', self.message)
461
462
463    def __init__(self, status_code, subdir, operation, message, fields,
464                 timestamp=None):
465        """Construct a status.log entry.
466
467        @param status_code: A message status code. Must match the codes
468            accepted by autotest_lib.common_lib.log.is_valid_status.
469        @param subdir: A valid job subdirectory, or None.
470        @param operation: Description of the operation, or None.
471        @param message: A printable string describing event to be recorded.
472        @param fields: A dictionary of arbitrary alphanumeric key=value pairs
473            to be included in the log, or None.
474        @param timestamp: An optional integer timestamp, in the same format
475            as a time.time() timestamp. If unspecified, the current time is
476            used.
477
478        @raise ValueError: if any of the parameters are invalid
479        """
480        if not log.is_valid_status(status_code):
481            raise ValueError('status code %r is not valid' % status_code)
482        self.status_code = status_code
483
484        if subdir and self.BAD_CHAR_REGEX.search(subdir):
485            raise ValueError('Invalid character in subdir string')
486        self.subdir = subdir
487
488        if operation and self.BAD_CHAR_REGEX.search(operation):
489            raise ValueError('Invalid character in operation string')
490        self.operation = operation
491
492        self._init_message(message)
493
494        if not fields:
495            self.fields = {}
496        else:
497            self.fields = fields.copy()
498        for key, value in self.fields.iteritems():
499            if type(value) is int:
500                value = str(value)
501            if self.BAD_CHAR_REGEX.search(key + value):
502                raise ValueError('Invalid character in %r=%r field'
503                                 % (key, value))
504
505        # build up the timestamp
506        if timestamp is None:
507            timestamp = int(time.time())
508        self.fields[self.TIMESTAMP_FIELD] = str(timestamp)
509        self.fields[self.LOCALTIME_FIELD] = time.strftime(
510            '%b %d %H:%M:%S', time.localtime(timestamp))
511
512
513    def is_start(self):
514        """Indicates if this status log is the start of a new nested block.
515
516        @return: A boolean indicating if this entry starts a new nested block.
517        """
518        return self.status_code == 'START'
519
520
521    def is_end(self):
522        """Indicates if this status log is the end of a nested block.
523
524        @return: A boolean indicating if this entry ends a nested block.
525        """
526        return self.status_code.startswith('END ')
527
528
529    def render(self):
530        """Render the status log entry into a text string.
531
532        @return: A text string suitable for writing into a status log file.
533        """
534        # combine all the log line data into a tab-delimited string
535        subdir = self.subdir or self.RENDERED_NONE_VALUE
536        operation = self.operation or self.RENDERED_NONE_VALUE
537        extra_fields = ['%s=%s' % field for field in self.fields.iteritems()]
538        line_items = [self.status_code, subdir, operation]
539        line_items += extra_fields + [self.message]
540        first_line = '\t'.join(line_items)
541
542        # append the extra unparsable lines, two-space indented
543        all_lines = [first_line]
544        all_lines += ['  ' + line for line in self.extra_message_lines]
545        return '\n'.join(all_lines)
546
547
548    @classmethod
549    def parse(cls, line):
550        """Parse a status log entry from a text string.
551
552        This method is the inverse of render; it should always be true that
553        parse(entry.render()) produces a new status_log_entry equivalent to
554        entry.
555
556        @return: A new status_log_entry instance with fields extracted from the
557            given status line. If the line is an extra message line then None
558            is returned.
559        """
560        # extra message lines are always prepended with two spaces
561        if line.startswith('  '):
562            return None
563
564        line = line.lstrip('\t')  # ignore indentation
565        entry_parts = line.split('\t')
566        if len(entry_parts) < 4:
567            raise ValueError('%r is not a valid status line' % line)
568        status_code, subdir, operation = entry_parts[:3]
569        if subdir == cls.RENDERED_NONE_VALUE:
570            subdir = None
571        if operation == cls.RENDERED_NONE_VALUE:
572            operation = None
573        message = entry_parts[-1]
574        fields = dict(part.split('=', 1) for part in entry_parts[3:-1])
575        if cls.TIMESTAMP_FIELD in fields:
576            timestamp = int(fields[cls.TIMESTAMP_FIELD])
577        else:
578            timestamp = None
579        return cls(status_code, subdir, operation, message, fields, timestamp)
580
581
582class status_indenter(object):
583    """Abstract interface that a status log indenter should use."""
584
585    @property
586    def indent(self):
587        raise NotImplementedError
588
589
590    def increment(self):
591        """Increase indentation by one level."""
592        raise NotImplementedError
593
594
595    def decrement(self):
596        """Decrease indentation by one level."""
597
598
599class status_logger(object):
600    """Represents a status log file. Responsible for translating messages
601    into on-disk status log lines.
602
603    @property global_filename: The filename to write top-level logs to.
604    @property subdir_filename: The filename to write subdir-level logs to.
605    """
606    def __init__(self, job, indenter, global_filename='status',
607                 subdir_filename='status', record_hook=None):
608        """Construct a logger instance.
609
610        @param job: A reference to the job object this is logging for. Only a
611            weak reference to the job is held, to avoid a
612            status_logger <-> job circular reference.
613        @param indenter: A status_indenter instance, for tracking the
614            indentation level.
615        @param global_filename: An optional filename to initialize the
616            self.global_filename attribute.
617        @param subdir_filename: An optional filename to initialize the
618            self.subdir_filename attribute.
619        @param record_hook: An optional function to be called before an entry
620            is logged. The function should expect a single parameter, a
621            copy of the status_log_entry object.
622        """
623        self._jobref = weakref.ref(job)
624        self._indenter = indenter
625        self.global_filename = global_filename
626        self.subdir_filename = subdir_filename
627        self._record_hook = record_hook
628
629
630    def render_entry(self, log_entry):
631        """Render a status_log_entry as it would be written to a log file.
632
633        @param log_entry: A status_log_entry instance to be rendered.
634
635        @return: The status log entry, rendered as it would be written to the
636            logs (including indentation).
637        """
638        if log_entry.is_end():
639            indent = self._indenter.indent - 1
640        else:
641            indent = self._indenter.indent
642        return '\t' * indent + log_entry.render().rstrip('\n')
643
644
645    def record_entry(self, log_entry, log_in_subdir=True):
646        """Record a status_log_entry into the appropriate status log files.
647
648        @param log_entry: A status_log_entry instance to be recorded into the
649                status logs.
650        @param log_in_subdir: A boolean that indicates (when true) that subdir
651                logs should be written into the subdirectory status log file.
652        """
653        # acquire a strong reference for the duration of the method
654        job = self._jobref()
655        if job is None:
656            logging.warning('Something attempted to write a status log entry '
657                            'after its job terminated, ignoring the attempt.')
658            logging.warning(traceback.format_stack())
659            return
660
661        # call the record hook if one was given
662        if self._record_hook:
663            self._record_hook(log_entry)
664
665        # figure out where we need to log to
666        log_files = [os.path.join(job.resultdir, self.global_filename)]
667        if log_in_subdir and log_entry.subdir:
668            log_files.append(os.path.join(job.resultdir, log_entry.subdir,
669                                          self.subdir_filename))
670
671        # write out to entry to the log files
672        log_text = self.render_entry(log_entry)
673        for log_file in log_files:
674            fileobj = open(log_file, 'a')
675            try:
676                print >> fileobj, log_text
677            finally:
678                fileobj.close()
679
680        # adjust the indentation if this was a START or END entry
681        if log_entry.is_start():
682            self._indenter.increment()
683        elif log_entry.is_end():
684            self._indenter.decrement()
685
686
687class base_job(object):
688    """An abstract base class for the various autotest job classes.
689
690    @property autodir: The top level autotest directory.
691    @property clientdir: The autotest client directory.
692    @property serverdir: The autotest server directory. [OPTIONAL]
693    @property resultdir: The directory where results should be written out.
694        [WRITABLE]
695
696    @property pkgdir: The job packages directory. [WRITABLE]
697    @property tmpdir: The job temporary directory. [WRITABLE]
698    @property testdir: The job test directory. [WRITABLE]
699    @property site_testdir: The job site test directory. [WRITABLE]
700
701    @property bindir: The client bin/ directory.
702    @property profdir: The client profilers/ directory.
703    @property toolsdir: The client tools/ directory.
704
705    @property control: A path to the control file to be executed. [OPTIONAL]
706    @property hosts: A set of all live Host objects currently in use by the
707        job. Code running in the context of a local client can safely assume
708        that this set contains only a single entry.
709    @property machines: A list of the machine names associated with the job.
710    @property user: The user executing the job.
711    @property tag: A tag identifying the job. Often used by the scheduler to
712        give a name of the form NUMBER-USERNAME/HOSTNAME.
713    @property test_retry: The number of times to retry a test if the test did
714        not complete successfully.
715    @property args: A list of addtional miscellaneous command-line arguments
716        provided when starting the job.
717
718    @property automatic_test_tag: A string which, if set, will be automatically
719        added to the test name when running tests.
720
721    @property default_profile_only: A boolean indicating the default value of
722        profile_only used by test.execute. [PERSISTENT]
723    @property drop_caches: A boolean indicating if caches should be dropped
724        before each test is executed.
725    @property drop_caches_between_iterations: A boolean indicating if caches
726        should be dropped before each test iteration is executed.
727    @property run_test_cleanup: A boolean indicating if test.cleanup should be
728        run by default after a test completes, if the run_cleanup argument is
729        not specified. [PERSISTENT]
730
731    @property num_tests_run: The number of tests run during the job. [OPTIONAL]
732    @property num_tests_failed: The number of tests failed during the job.
733        [OPTIONAL]
734
735    @property harness: An instance of the client test harness. Only available
736        in contexts where client test execution happens. [OPTIONAL]
737    @property logging: An instance of the logging manager associated with the
738        job.
739    @property profilers: An instance of the profiler manager associated with
740        the job.
741    @property sysinfo: An instance of the sysinfo object. Only available in
742        contexts where it's possible to collect sysinfo.
743    @property warning_manager: A class for managing which types of WARN
744        messages should be logged and which should be supressed. [OPTIONAL]
745    @property warning_loggers: A set of readable streams that will be monitored
746        for WARN messages to be logged. [OPTIONAL]
747    @property max_result_size_KB: Maximum size of test results should be
748        collected in KB. [OPTIONAL]
749
750    Abstract methods:
751        _find_base_directories [CLASSMETHOD]
752            Returns the location of autodir, clientdir and serverdir
753
754        _find_resultdir
755            Returns the location of resultdir. Gets a copy of any parameters
756            passed into base_job.__init__. Can return None to indicate that
757            no resultdir is to be used.
758
759        _get_status_logger
760            Returns a status_logger instance for recording job status logs.
761    """
762
763    # capture the dependency on several helper classes with factories
764    _job_directory = job_directory
765    _job_state = job_state
766
767
768    # all the job directory attributes
769    autodir = _job_directory.property_factory('autodir')
770    clientdir = _job_directory.property_factory('clientdir')
771    serverdir = _job_directory.property_factory('serverdir')
772    resultdir = _job_directory.property_factory('resultdir')
773    pkgdir = _job_directory.property_factory('pkgdir')
774    tmpdir = _job_directory.property_factory('tmpdir')
775    testdir = _job_directory.property_factory('testdir')
776    site_testdir = _job_directory.property_factory('site_testdir')
777    bindir = _job_directory.property_factory('bindir')
778    profdir = _job_directory.property_factory('profdir')
779    toolsdir = _job_directory.property_factory('toolsdir')
780
781
782    # all the generic persistent properties
783    tag = _job_state.property_factory('_state', 'tag', '')
784    test_retry = _job_state.property_factory('_state', 'test_retry', 0)
785    default_profile_only = _job_state.property_factory(
786        '_state', 'default_profile_only', False)
787    run_test_cleanup = _job_state.property_factory(
788        '_state', 'run_test_cleanup', True)
789    automatic_test_tag = _job_state.property_factory(
790        '_state', 'automatic_test_tag', None)
791    max_result_size_KB = _job_state.property_factory(
792        '_state', 'max_result_size_KB', 0)
793
794    # the use_sequence_number property
795    _sequence_number = _job_state.property_factory(
796        '_state', '_sequence_number', None)
797    def _get_use_sequence_number(self):
798        return bool(self._sequence_number)
799    def _set_use_sequence_number(self, value):
800        if value:
801            self._sequence_number = 1
802        else:
803            self._sequence_number = None
804    use_sequence_number = property(_get_use_sequence_number,
805                                   _set_use_sequence_number)
806
807    # parent job id is passed in from autoserv command line. It's only used in
808    # server job. The property is added here for unittest
809    # (base_job_unittest.py) to be consistent on validating public properties of
810    # a base_job object.
811    parent_job_id = None
812
813    def __init__(self, *args, **dargs):
814        # initialize the base directories, all others are relative to these
815        autodir, clientdir, serverdir = self._find_base_directories()
816        self._autodir = self._job_directory(autodir)
817        self._clientdir = self._job_directory(clientdir)
818        # TODO(scottz): crosbug.com/38259, needed to pass unittests for now.
819        self.label = None
820        if serverdir:
821            self._serverdir = self._job_directory(serverdir)
822        else:
823            self._serverdir = None
824
825        # initialize all the other directories relative to the base ones
826        self._initialize_dir_properties()
827        self._resultdir = self._job_directory(
828            self._find_resultdir(*args, **dargs), True)
829        self._execution_contexts = []
830
831        # initialize all the job state
832        self._state = self._job_state()
833
834
835    @classmethod
836    def _find_base_directories(cls):
837        raise NotImplementedError()
838
839
840    def _initialize_dir_properties(self):
841        """
842        Initializes all the secondary self.*dir properties. Requires autodir,
843        clientdir and serverdir to already be initialized.
844        """
845        # create some stubs for use as shortcuts
846        def readonly_dir(*args):
847            return self._job_directory(os.path.join(*args))
848        def readwrite_dir(*args):
849            return self._job_directory(os.path.join(*args), True)
850
851        # various client-specific directories
852        self._bindir = readonly_dir(self.clientdir, 'bin')
853        self._profdir = readonly_dir(self.clientdir, 'profilers')
854        self._pkgdir = readwrite_dir(self.clientdir, 'packages')
855        self._toolsdir = readonly_dir(self.clientdir, 'tools')
856
857        # directories which are in serverdir on a server, clientdir on a client
858        # tmp tests, and site_tests need to be read_write for client, but only
859        # read for server.
860        if self.serverdir:
861            root = self.serverdir
862            r_or_rw_dir = readonly_dir
863        else:
864            root = self.clientdir
865            r_or_rw_dir = readwrite_dir
866        self._testdir = r_or_rw_dir(root, 'tests')
867        self._site_testdir = r_or_rw_dir(root, 'site_tests')
868
869        # various server-specific directories
870        if self.serverdir:
871            self._tmpdir = readwrite_dir(tempfile.gettempdir())
872        else:
873            self._tmpdir = readwrite_dir(root, 'tmp')
874
875
876    def _find_resultdir(self, *args, **dargs):
877        raise NotImplementedError()
878
879
880    def push_execution_context(self, resultdir):
881        """
882        Save off the current context of the job and change to the given one.
883
884        In practice method just changes the resultdir, but it may become more
885        extensive in the future. The expected use case is for when a child
886        job needs to be executed in some sort of nested context (for example
887        the way parallel_simple does). The original context can be restored
888        with a pop_execution_context call.
889
890        @param resultdir: The new resultdir, relative to the current one.
891        """
892        new_dir = self._job_directory(
893            os.path.join(self.resultdir, resultdir), True)
894        self._execution_contexts.append(self._resultdir)
895        self._resultdir = new_dir
896
897
898    def pop_execution_context(self):
899        """
900        Reverse the effects of the previous push_execution_context call.
901
902        @raise IndexError: raised when the stack of contexts is empty.
903        """
904        if not self._execution_contexts:
905            raise IndexError('No old execution context to restore')
906        self._resultdir = self._execution_contexts.pop()
907
908
909    def get_state(self, name, default=_job_state.NO_DEFAULT):
910        """Returns the value associated with a particular name.
911
912        @param name: The name the value was saved with.
913        @param default: A default value to return if no state is currently
914            associated with var.
915
916        @return: A deep copy of the value associated with name. Note that this
917            explicitly returns a deep copy to avoid problems with mutable
918            values; mutations are not persisted or shared.
919        @raise KeyError: raised when no state is associated with var and a
920            default value is not provided.
921        """
922        try:
923            return self._state.get('public', name, default=default)
924        except KeyError:
925            raise KeyError(name)
926
927
928    def set_state(self, name, value):
929        """Saves the value given with the provided name.
930
931        @param name: The name the value should be saved with.
932        @param value: The value to save.
933        """
934        self._state.set('public', name, value)
935
936
937    def _build_tagged_test_name(self, testname, dargs):
938        """Builds the fully tagged testname and subdirectory for job.run_test.
939
940        @param testname: The base name of the test
941        @param dargs: The ** arguments passed to run_test. And arguments
942            consumed by this method will be removed from the dictionary.
943
944        @return: A 3-tuple of the full name of the test, the subdirectory it
945            should be stored in, and the full tag of the subdir.
946        """
947        tag_parts = []
948
949        # build up the parts of the tag used for the test name
950        master_testpath = dargs.get('master_testpath', "")
951        base_tag = dargs.pop('tag', None)
952        if base_tag:
953            tag_parts.append(str(base_tag))
954        if self.use_sequence_number:
955            tag_parts.append('_%02d_' % self._sequence_number)
956            self._sequence_number += 1
957        if self.automatic_test_tag:
958            tag_parts.append(self.automatic_test_tag)
959        full_testname = '.'.join([testname] + tag_parts)
960
961        # build up the subdir and tag as well
962        subdir_tag = dargs.pop('subdir_tag', None)
963        if subdir_tag:
964            tag_parts.append(subdir_tag)
965        subdir = '.'.join([testname] + tag_parts)
966        subdir = os.path.join(master_testpath, subdir)
967        tag = '.'.join(tag_parts)
968
969        return full_testname, subdir, tag
970
971
972    def _make_test_outputdir(self, subdir):
973        """Creates an output directory for a test to run it.
974
975        @param subdir: The subdirectory of the test. Generally computed by
976            _build_tagged_test_name.
977
978        @return: A job_directory instance corresponding to the outputdir of
979            the test.
980        @raise TestError: If the output directory is invalid.
981        """
982        # explicitly check that this subdirectory is new
983        path = os.path.join(self.resultdir, subdir)
984        if os.path.exists(path):
985            msg = ('%s already exists; multiple tests cannot run with the '
986                   'same subdirectory' % subdir)
987            raise error.TestError(msg)
988
989        # create the outputdir and raise a TestError if it isn't valid
990        try:
991            outputdir = self._job_directory(path, True)
992            return outputdir
993        except self._job_directory.JobDirectoryException, e:
994            logging.exception('%s directory creation failed with %s',
995                              subdir, e)
996            raise error.TestError('%s directory creation failed' % subdir)
997
998
999    def record(self, status_code, subdir, operation, status='',
1000               optional_fields=None):
1001        """Record a job-level status event.
1002
1003        Logs an event noteworthy to the Autotest job as a whole. Messages will
1004        be written into a global status log file, as well as a subdir-local
1005        status log file (if subdir is specified).
1006
1007        @param status_code: A string status code describing the type of status
1008            entry being recorded. It must pass log.is_valid_status to be
1009            considered valid.
1010        @param subdir: A specific results subdirectory this also applies to, or
1011            None. If not None the subdirectory must exist.
1012        @param operation: A string describing the operation that was run.
1013        @param status: An optional human-readable message describing the status
1014            entry, for example an error message or "completed successfully".
1015        @param optional_fields: An optional dictionary of addtional named fields
1016            to be included with the status message. Every time timestamp and
1017            localtime entries are generated with the current time and added
1018            to this dictionary.
1019        """
1020        entry = status_log_entry(status_code, subdir, operation, status,
1021                                 optional_fields)
1022        self.record_entry(entry)
1023
1024
1025    def record_entry(self, entry, log_in_subdir=True):
1026        """Record a job-level status event, using a status_log_entry.
1027
1028        This is the same as self.record but using an existing status log
1029        entry object rather than constructing one for you.
1030
1031        @param entry: A status_log_entry object
1032        @param log_in_subdir: A boolean that indicates (when true) that subdir
1033                logs should be written into the subdirectory status log file.
1034        """
1035        self._get_status_logger().record_entry(entry, log_in_subdir)
1036