mbligh | 57e7866 | 2008-06-17 19:53:49 +0000 | [diff] [blame^] | 1 | """ |
| 2 | The main job wrapper for the server side. |
| 3 | |
| 4 | This is the core infrastructure. Derived from the client side job.py |
| 5 | |
| 6 | Copyright Martin J. Bligh, Andy Whitcroft 2007 |
| 7 | """ |
| 8 | |
| 9 | __author__ = """ |
| 10 | Martin J. Bligh <mbligh@google.com> |
| 11 | Andy Whitcroft <apw@shadowen.org> |
| 12 | """ |
| 13 | |
| 14 | import re, sys |
mbligh | fa970da | 2008-06-10 20:13:44 +0000 | [diff] [blame] | 15 | from autotest_lib.server import base_server_job |
mbligh | caa62c2 | 2008-04-07 21:51:17 +0000 | [diff] [blame] | 16 | |
mbligh | 57e7866 | 2008-06-17 19:53:49 +0000 | [diff] [blame^] | 17 | # a file-like object for catching stderr from an autotest client and |
| 18 | # extracting status logs from it |
| 19 | class client_logger(object): |
| 20 | """Partial file object to write to both stdout and |
| 21 | the status log file. We only implement those methods |
| 22 | utils.run() actually calls. |
| 23 | """ |
| 24 | parser = re.compile(r"^AUTOTEST_STATUS:([^:]*):(.*)$") |
| 25 | extract_indent = re.compile(r"^(\t*).*$") |
| 26 | |
| 27 | def __init__(self, job): |
| 28 | self.job = job |
| 29 | self.leftover = "" |
| 30 | self.last_line = "" |
| 31 | self.logs = {} |
| 32 | |
| 33 | |
| 34 | def _process_log_dict(self, log_dict): |
| 35 | log_list = log_dict.pop("logs", []) |
| 36 | for key in sorted(log_dict.iterkeys()): |
| 37 | log_list += self._process_log_dict(log_dict.pop(key)) |
| 38 | return log_list |
| 39 | |
| 40 | |
| 41 | def _process_logs(self): |
| 42 | """Go through the accumulated logs in self.log and print them |
| 43 | out to stdout and the status log. Note that this processes |
| 44 | logs in an ordering where: |
| 45 | |
| 46 | 1) logs to different tags are never interleaved |
| 47 | 2) logs to x.y come before logs to x.y.z for all z |
| 48 | 3) logs to x.y come before x.z whenever y < z |
| 49 | |
| 50 | Note that this will in general not be the same as the |
| 51 | chronological ordering of the logs. However, if a chronological |
| 52 | ordering is desired that one can be reconstructed from the |
| 53 | status log by looking at timestamp lines.""" |
| 54 | log_list = self._process_log_dict(self.logs) |
| 55 | for line in log_list: |
| 56 | self.job._record_prerendered(line + '\n') |
| 57 | if log_list: |
| 58 | self.last_line = log_list[-1] |
| 59 | |
| 60 | |
| 61 | def _process_quoted_line(self, tag, line): |
| 62 | """Process a line quoted with an AUTOTEST_STATUS flag. If the |
| 63 | tag is blank then we want to push out all the data we've been |
| 64 | building up in self.logs, and then the newest line. If the |
| 65 | tag is not blank, then push the line into the logs for handling |
| 66 | later.""" |
| 67 | print line |
| 68 | if tag == "": |
| 69 | self._process_logs() |
| 70 | self.job._record_prerendered(line + '\n') |
| 71 | self.last_line = line |
| 72 | else: |
| 73 | tag_parts = [int(x) for x in tag.split(".")] |
| 74 | log_dict = self.logs |
| 75 | for part in tag_parts: |
| 76 | log_dict = log_dict.setdefault(part, {}) |
| 77 | log_list = log_dict.setdefault("logs", []) |
| 78 | log_list.append(line) |
| 79 | |
| 80 | |
| 81 | def _process_line(self, line): |
| 82 | """Write out a line of data to the appropriate stream. Status |
| 83 | lines sent by autotest will be prepended with |
| 84 | "AUTOTEST_STATUS", and all other lines are ssh error |
| 85 | messages.""" |
| 86 | match = self.parser.search(line) |
| 87 | if match: |
| 88 | tag, line = match.groups() |
| 89 | self._process_quoted_line(tag, line) |
| 90 | else: |
| 91 | print line |
| 92 | |
| 93 | def _format_warnings(self, last_line, warnings): |
| 94 | # use the indentation of whatever the last log line was |
| 95 | indent = self.extract_indent.match(last_line).group(1) |
| 96 | # if the last line starts a new group, add an extra indent |
| 97 | if last_line.lstrip('\t').startswith("START\t"): |
| 98 | indent += '\t' |
| 99 | return [self.job._render_record("WARN", None, None, msg, |
| 100 | timestamp, indent).rstrip('\n') |
| 101 | for timestamp, msg in warnings] |
| 102 | |
| 103 | |
| 104 | def _process_warnings(self, last_line, log_dict, warnings): |
| 105 | if log_dict.keys() in ([], ["logs"]): |
| 106 | # there are no sub-jobs, just append the warnings here |
| 107 | warnings = self._format_warnings(last_line, warnings) |
| 108 | log_list = log_dict.setdefault("logs", []) |
| 109 | log_list += warnings |
| 110 | for warning in warnings: |
| 111 | sys.stdout.write(warning + '\n') |
| 112 | else: |
| 113 | # there are sub-jobs, so put the warnings in there |
| 114 | log_list = log_dict.get("logs", []) |
| 115 | if log_list: |
| 116 | last_line = log_list[-1] |
| 117 | for key in sorted(log_dict.iterkeys()): |
| 118 | if key != "logs": |
| 119 | self._process_warnings(last_line, |
| 120 | log_dict[key], |
| 121 | warnings) |
| 122 | |
| 123 | |
| 124 | def write(self, data): |
| 125 | # first check for any new console warnings |
| 126 | warnings = self.job._read_warnings() |
| 127 | self._process_warnings(self.last_line, self.logs, warnings) |
| 128 | # now process the newest data written out |
| 129 | data = self.leftover + data |
| 130 | lines = data.split("\n") |
| 131 | # process every line but the last one |
| 132 | for line in lines[:-1]: |
| 133 | self._process_line(line) |
| 134 | # save the last line for later processing |
| 135 | # since we may not have the whole line yet |
| 136 | self.leftover = lines[-1] |
| 137 | |
| 138 | |
| 139 | def flush(self): |
| 140 | sys.stdout.flush() |
| 141 | |
| 142 | |
| 143 | def close(self): |
| 144 | if self.leftover: |
| 145 | self._process_line(self.leftover) |
| 146 | self._process_logs() |
| 147 | self.flush() |
| 148 | |
| 149 | |
mbligh | caa62c2 | 2008-04-07 21:51:17 +0000 | [diff] [blame] | 150 | # site_server_job.py may be non-existant or empty, make sure that an |
| 151 | # appropriate site_server_job class is created nevertheless |
| 152 | try: |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 153 | from autotest_lib.server.site_server_job import site_server_job |
mbligh | caa62c2 | 2008-04-07 21:51:17 +0000 | [diff] [blame] | 154 | except ImportError: |
mbligh | fa970da | 2008-06-10 20:13:44 +0000 | [diff] [blame] | 155 | class site_server_job(base_server_job.base_server_job): |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 156 | pass |
| 157 | |
mbligh | caa62c2 | 2008-04-07 21:51:17 +0000 | [diff] [blame] | 158 | class server_job(site_server_job): |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 159 | pass |