• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/usr/bin/python
2"""
3This script will scan an autotest server results directory for job result
4directories that have completed and that have not yet been published on
5a remote dashboard server matching given filtering options and for those it
6finds it will rsync them to the tko server and mark them as published (it uses
7a <jobdir>/.tko_published flag file to determine if a jobdir results directory
8has been published yet).
9"""
10
11import sys, os, re, optparse
12
13import common
14from autotest_lib.client.common_lib import utils
15from autotest_lib.server import frontend
16
17options = optparse.Values()
18
19USAGE="""tko-publish [options] <resultsdir> <rsync-destination-path>
20
21Where:
22<resultsdir>              A path to the directory having the job results
23                          directories to publish.
24
25<rsync-destination-path>  A valid rsync destination path where to upload the
26                          job result directories.
27                          Example: user@machine.org:/home/autotest/results"""
28PUBLISH_FLAGFILE = '.tko_published'
29RSYNC_COMMAND = 'rsync -aqz "%s" "%s"'
30
31
32def get_job_dirs(path):
33    regex = re.compile('[1-9][0-9]*-')
34    jobdirs = []
35
36    for dir in os.listdir(path):
37        # skip directories not matching the job result dir pattern
38        if not regex.match(dir):
39            continue
40
41        dir = os.path.join(options.resultsdir, dir)
42        if (os.path.isdir(dir)
43                and not os.path.exists(os.path.join(dir, PUBLISH_FLAGFILE))):
44            jobdirs.append(dir)
45
46    return jobdirs
47
48
49def publish_job(jobdir):
50    cmd = RSYNC_COMMAND % (jobdir, options.dest)
51    utils.system(cmd)
52
53    # mark the jobdir as published
54    fd = open(os.path.join(jobdir, PUBLISH_FLAGFILE), 'w')
55    fd.close()
56    print 'Published', jobdir
57
58
59def main():
60    jobdirs = get_job_dirs(options.resultsdir)
61
62    afe = frontend.AFE()
63    # the way AFE API is right now is to give a whole list of jobs and can't
64    # get specific jobs so minimize the queries caching the result
65    finished_jobs = afe.get_jobs(finished=True)
66
67    if options.jobname_pattern:
68        jobname_pattern = re.compile(options.jobname_pattern)
69    else:
70        jobname_pattern = None
71
72    # for each unpublished possible jobdir find it in the database and see
73    # if it is completed
74    for jobdir in jobdirs:
75        job_id = int(os.path.basename(jobdir).split('-')[0])
76        job = [job for job in finished_jobs if job.id == job_id]
77
78        if len(job) != 1:
79            continue
80
81        if jobname_pattern:
82            # does it match the jobname pattern?
83            if not jobname_pattern.match(job[0].name):
84                continue
85
86        # does it match the wanted job owner
87        if options.job_owner and options.job_owner != job[0].owner:
88            continue
89
90        publish_job(jobdir)
91
92
93if __name__ == '__main__':
94    parser = optparse.OptionParser(usage=USAGE)
95    parser.add_option('--jobname-pattern', dest='jobname_pattern',
96                      help='Regexp pattern to match against job names, by '
97                      "default there won't be any matching done",
98                      default=None)
99    parser.add_option('--job-owner', dest='job_owner', default=None,
100                      help='Job owner username to match against for the '
101                      'published jobs, by default no matching is done.')
102    options, args = parser.parse_args()
103
104    if len(args) < 2:
105        print USAGE
106        sys.exit(-1)
107
108    options.resultsdir = args[0]
109    options.dest = args[1]
110    main()
111