blob: bc399161ffacc8771f1cca075eff8b031d9d7a94 [file] [log] [blame]
mblighf1c52842007-10-16 15:21:38 +00001"""
2The main job wrapper for the server side.
3
4This is the core infrastructure. Derived from the client side job.py
5
6Copyright Martin J. Bligh, Andy Whitcroft 2007
7"""
8
9__author__ = """
10Martin J. Bligh <mbligh@google.com>
11Andy Whitcroft <apw@shadowen.org>
12"""
13
mblighdbdac6c2008-03-05 15:49:58 +000014import os, sys, re, time, select, subprocess, traceback
mbligh03f4fc72007-11-29 20:56:14 +000015import test
mblighf1c52842007-10-16 15:21:38 +000016from utils import *
mblighf31b0c02007-11-29 18:19:22 +000017from common.error import *
mblighf1c52842007-10-16 15:21:38 +000018
mbligh3f4bced2007-11-05 17:55:53 +000019# this magic incantation should give us access to a client library
20server_dir = os.path.dirname(__file__)
21client_dir = os.path.join(server_dir, "..", "client", "bin")
22sys.path.append(client_dir)
23import fd_stack
24sys.path.pop()
25
mblighed5a4102007-11-20 00:46:41 +000026# load up a control segment
27# these are all stored in <server_dir>/control_segments
28def load_control_segment(name):
29 server_dir = os.path.dirname(os.path.abspath(__file__))
mbligh7f86e0b2007-11-24 19:45:07 +000030 script_file = os.path.join(server_dir, "control_segments", name)
mblighed5a4102007-11-20 00:46:41 +000031 if os.path.exists(script_file):
32 return file(script_file).read()
33 else:
34 return ""
35
36
mblighf1c52842007-10-16 15:21:38 +000037preamble = """\
38import os, sys
39
mblighb3c9f372008-01-14 16:39:44 +000040import hosts, autotest, kvm, git, standalone_profiler
mblighd0868ab2007-12-04 22:47:46 +000041import source_kernel, rpm_kernel, deb_kernel, git_kernel
mbligh03f4fc72007-11-29 20:56:14 +000042from common.error import *
mblighe1417fa2007-12-10 16:55:13 +000043from common import barrier
mblighf1c52842007-10-16 15:21:38 +000044from subcommand import *
45from utils import run, get_tmp_dir, sh_escape
46
mbligh119c12a2007-11-12 22:13:44 +000047autotest.Autotest.job = job
mbligh31a49de2007-11-05 18:41:19 +000048hosts.SSHHost.job = job
mblighe1417fa2007-12-10 16:55:13 +000049barrier = barrier.barrier
mbligh1fb77cc2008-02-27 16:41:20 +000050
51if len(machines) > 1:
52 open('.machines', 'w').write('\\n'.join(machines) + '\\n')
mblighf1c52842007-10-16 15:21:38 +000053"""
54
55client_wrapper = """
56at = autotest.Autotest()
57
58def run_client(machine):
59 host = hosts.SSHHost(machine)
60 at.run(control, host=host)
61
mbligh1fb77cc2008-02-27 16:41:20 +000062parallel_simple(run_client, machines)
mblighf1c52842007-10-16 15:21:38 +000063"""
64
mbligh303ccac2007-11-05 18:07:28 +000065crashdumps = """
66def crashdumps(machine):
67 host = hosts.SSHHost(machine, initialize=False)
68 host.get_crashdumps(test_start_time)
69
70parallel_simple(crashdumps, machines, log=False)
71"""
72
mbligh98ff1462007-12-19 16:27:55 +000073reboot_segment="""\
74def reboot(machine):
mbligh17f0c662007-11-05 18:28:19 +000075 host = hosts.SSHHost(machine, initialize=False)
76 host.reboot()
mblighf1c52842007-10-16 15:21:38 +000077
mbligh98ff1462007-12-19 16:27:55 +000078parallel_simple(reboot, machines, log=False)
mblighf1c52842007-10-16 15:21:38 +000079"""
80
mblighf36243d2007-10-30 15:36:16 +000081install="""\
82def install(machine):
mbligh17f0c662007-11-05 18:28:19 +000083 host = hosts.SSHHost(machine, initialize=False)
84 host.machine_install()
mblighf36243d2007-10-30 15:36:16 +000085
mbligh009b25a2007-11-05 18:38:51 +000086parallel_simple(install, machines, log=False)
mblighf36243d2007-10-30 15:36:16 +000087"""
88
mbligh7f86e0b2007-11-24 19:45:07 +000089# load up the verifier control segment, with an optional site-specific hook
mblighed5a4102007-11-20 00:46:41 +000090verify = load_control_segment("site_verify")
91verify += load_control_segment("verify")
mbligh1d42d4e2007-11-05 22:42:00 +000092
mbligh7f86e0b2007-11-24 19:45:07 +000093# load up the repair control segment, with an optional site-specific hook
94repair = load_control_segment("site_repair")
95repair += load_control_segment("repair")
96
mbligh1d42d4e2007-11-05 22:42:00 +000097
mbligh970b94e2008-01-24 16:29:34 +000098# load up site-specific code for generating site-specific job data
99try:
100 import site_job
101 get_site_job_data = site_job.get_site_job_data
102 del site_job
103except ImportError:
104 # by default provide a stub that generates no site data
105 def get_site_job_data(job):
106 return {}
107
108
mblighcaa62c22008-04-07 21:51:17 +0000109class base_server_job:
mblighf1c52842007-10-16 15:21:38 +0000110 """The actual job against which we do everything.
111
112 Properties:
113 autodir
114 The top level autotest directory (/usr/local/autotest).
115 serverdir
116 <autodir>/server/
117 clientdir
118 <autodir>/client/
119 conmuxdir
120 <autodir>/conmux/
121 testdir
122 <autodir>/server/tests/
123 control
124 the control file for this job
125 """
126
mblighe8b37a92007-12-19 15:54:11 +0000127 def __init__(self, control, args, resultdir, label, user, machines,
128 client = False):
mblighf1c52842007-10-16 15:21:38 +0000129 """
130 control
131 The control file (pathname of)
132 args
133 args to pass to the control file
134 resultdir
135 where to throw the results
mbligh18420c22007-10-16 22:27:14 +0000136 label
137 label for the job
mblighf1c52842007-10-16 15:21:38 +0000138 user
139 Username for the job (email address)
140 client
141 True if a client-side control file
142 """
mbligh05269362007-10-16 16:58:11 +0000143 path = os.path.dirname(sys.modules['server_job'].__file__)
mblighf1c52842007-10-16 15:21:38 +0000144 self.autodir = os.path.abspath(os.path.join(path, '..'))
145 self.serverdir = os.path.join(self.autodir, 'server')
mbligh05269362007-10-16 16:58:11 +0000146 self.testdir = os.path.join(self.serverdir, 'tests')
147 self.tmpdir = os.path.join(self.serverdir, 'tmp')
mblighf1c52842007-10-16 15:21:38 +0000148 self.conmuxdir = os.path.join(self.autodir, 'conmux')
149 self.clientdir = os.path.join(self.autodir, 'client')
mblighe25fd5b2008-01-22 17:23:37 +0000150 if control:
151 self.control = open(control, 'r').read()
152 self.control = re.sub('\r', '', self.control)
153 else:
154 self.control = None
mblighf1c52842007-10-16 15:21:38 +0000155 self.resultdir = resultdir
156 if not os.path.exists(resultdir):
157 os.mkdir(resultdir)
mbligh3ccb8592007-11-05 18:13:40 +0000158 self.debugdir = os.path.join(resultdir, 'debug')
159 if not os.path.exists(self.debugdir):
160 os.mkdir(self.debugdir)
mbligh3dcf2c92007-10-16 22:24:00 +0000161 self.status = os.path.join(resultdir, 'status')
mbligh18420c22007-10-16 22:27:14 +0000162 self.label = label
mblighf1c52842007-10-16 15:21:38 +0000163 self.user = user
164 self.args = args
mblighe8b37a92007-12-19 15:54:11 +0000165 self.machines = machines
mblighf1c52842007-10-16 15:21:38 +0000166 self.client = client
167 self.record_prefix = ''
mblighf4e04152008-02-21 16:05:53 +0000168 self.warning_loggers = set()
mblighf1c52842007-10-16 15:21:38 +0000169
mbligh3f4bced2007-11-05 17:55:53 +0000170 self.stdout = fd_stack.fd_stack(1, sys.stdout)
171 self.stderr = fd_stack.fd_stack(2, sys.stderr)
172
mbligh3dcf2c92007-10-16 22:24:00 +0000173 if os.path.exists(self.status):
174 os.unlink(self.status)
mblighe8b37a92007-12-19 15:54:11 +0000175 job_data = { 'label' : label, 'user' : user,
176 'hostname' : ','.join(machines) }
mbligh970b94e2008-01-24 16:29:34 +0000177 job_data.update(get_site_job_data(self))
mblighf1c52842007-10-16 15:21:38 +0000178 write_keyval(self.resultdir, job_data)
179
180
mblighe25fd5b2008-01-22 17:23:37 +0000181 def verify(self):
182 if not self.machines:
183 raise AutoservError('No machines specified to verify')
184 try:
185 namespace = {'machines' : self.machines, 'job' : self}
186 exec(preamble + verify, namespace, namespace)
187 except Exception, e:
188 msg = 'Verify failed\n' + str(e) + '\n' + format_error()
189 self.record('ABORT', None, None, msg)
190 raise
191
192
193 def repair(self):
194 if not self.machines:
195 raise AutoservError('No machines specified to repair')
196 namespace = {'machines' : self.machines, 'job' : self}
mbligh16c722d2008-03-05 00:58:44 +0000197 # no matter what happens during repair, go on to try to reverify
198 try:
199 exec(preamble + repair, namespace, namespace)
200 except Exception, exc:
201 print 'Exception occured during repair'
202 traceback.print_exc()
mbligh8141f862008-01-25 17:20:40 +0000203 self.verify()
mblighe25fd5b2008-01-22 17:23:37 +0000204
205
mblighcaa62c22008-04-07 21:51:17 +0000206 def enable_external_logging(self):
207 """Start or restart external logging mechanism.
208 """
209 pass
210
211
212 def disable_external_logging(self):
213 """ Pause or stop external logging mechanism.
214 """
215 pass
216
217
218 def use_external_logging(self):
219 """Return True if external logging should be used.
220 """
221 return False
222
223
mblighe8b37a92007-12-19 15:54:11 +0000224 def run(self, reboot = False, install_before = False,
mblighddd54332008-03-07 18:14:06 +0000225 install_after = False, collect_crashdumps = True,
226 namespace = {}):
mbligh60dbd502007-10-26 14:59:31 +0000227 # use a copy so changes don't affect the original dictionary
228 namespace = namespace.copy()
mblighe8b37a92007-12-19 15:54:11 +0000229 machines = self.machines
mbligh60dbd502007-10-26 14:59:31 +0000230
mblighfaf0cd42007-11-19 16:00:24 +0000231 self.aborted = False
mblighf1c52842007-10-16 15:21:38 +0000232 namespace['machines'] = machines
233 namespace['args'] = self.args
234 namespace['job'] = self
mbligh6e294382007-11-05 18:11:29 +0000235 test_start_time = int(time.time())
mblighf1c52842007-10-16 15:21:38 +0000236
mbligh87c5d882007-10-29 17:07:24 +0000237 os.chdir(self.resultdir)
mblighcaa62c22008-04-07 21:51:17 +0000238
239 self.enable_external_logging()
mbligh87c5d882007-10-29 17:07:24 +0000240 status_log = os.path.join(self.resultdir, 'status.log')
mblighf1c52842007-10-16 15:21:38 +0000241 try:
mblighf36243d2007-10-30 15:36:16 +0000242 if install_before and machines:
243 exec(preamble + install, namespace, namespace)
mblighf1c52842007-10-16 15:21:38 +0000244 if self.client:
245 namespace['control'] = self.control
246 open('control', 'w').write(self.control)
247 open('control.srv', 'w').write(client_wrapper)
248 server_control = client_wrapper
249 else:
250 open('control.srv', 'w').write(self.control)
251 server_control = self.control
mblighf1c52842007-10-16 15:21:38 +0000252 exec(preamble + server_control, namespace, namespace)
253
254 finally:
mblighddd54332008-03-07 18:14:06 +0000255 if machines and collect_crashdumps:
mbligh6e294382007-11-05 18:11:29 +0000256 namespace['test_start_time'] = test_start_time
mbligh98ff1462007-12-19 16:27:55 +0000257 exec(preamble + crashdumps,
258 namespace, namespace)
mblighcaa62c22008-04-07 21:51:17 +0000259 self.disable_external_logging()
mblighf1c52842007-10-16 15:21:38 +0000260 if reboot and machines:
mbligh98ff1462007-12-19 16:27:55 +0000261 exec(preamble + reboot_segment,
262 namespace, namespace)
mblighf36243d2007-10-30 15:36:16 +0000263 if install_after and machines:
264 exec(preamble + install, namespace, namespace)
mblighf1c52842007-10-16 15:21:38 +0000265
266
267 def run_test(self, url, *args, **dargs):
268 """Summon a test object and run it.
269
270 tag
271 tag to add to testname
272 url
273 url of the test to run
274 """
275
mblighf1c52842007-10-16 15:21:38 +0000276 (group, testname) = test.testname(url)
277 tag = None
278 subdir = testname
mbligh43ac5222007-10-16 15:55:01 +0000279
mblighf1c52842007-10-16 15:21:38 +0000280 if dargs.has_key('tag'):
281 tag = dargs['tag']
282 del dargs['tag']
283 if tag:
284 subdir += '.' + tag
mblighf1c52842007-10-16 15:21:38 +0000285
mbligh43ac5222007-10-16 15:55:01 +0000286 try:
287 test.runtest(self, url, tag, args, dargs)
288 self.record('GOOD', subdir, testname, 'completed successfully')
289 except Exception, detail:
mbligh4e61c4e2008-04-07 18:36:59 +0000290 self.record('FAIL', subdir, testname, str(detail) + "\n" + format_error())
mblighf1c52842007-10-16 15:21:38 +0000291
292
293 def run_group(self, function, *args, **dargs):
294 """\
295 function:
296 subroutine to run
297 *args:
298 arguments for the function
299 """
300
301 result = None
302 name = function.__name__
303
304 # Allow the tag for the group to be specified.
305 if dargs.has_key('tag'):
306 tag = dargs['tag']
307 del dargs['tag']
308 if tag:
309 name = tag
310
311 # if tag:
312 # name += '.' + tag
313 old_record_prefix = self.record_prefix
314 try:
315 try:
316 self.record('START', None, name)
317 self.record_prefix += '\t'
318 result = function(*args, **dargs)
319 self.record_prefix = old_record_prefix
320 self.record('END GOOD', None, name)
321 except:
322 self.record_prefix = old_record_prefix
323 self.record('END FAIL', None, name, format_error())
324 # We don't want to raise up an error higher if it's just
325 # a TestError - we want to carry on to other tests. Hence
326 # this outer try/except block.
327 except TestError:
328 pass
329 except:
330 raise TestError(name + ' failed\n' + format_error())
331
332 return result
333
334
mblighf4e04152008-02-21 16:05:53 +0000335 def record(self, status_code, subdir, operation, status=''):
mblighf1c52842007-10-16 15:21:38 +0000336 """
337 Record job-level status
338
339 The intent is to make this file both machine parseable and
340 human readable. That involves a little more complexity, but
341 really isn't all that bad ;-)
342
343 Format is <status code>\t<subdir>\t<operation>\t<status>
344
345 status code: (GOOD|WARN|FAIL|ABORT)
346 or START
347 or END (GOOD|WARN|FAIL|ABORT)
348
349 subdir: MUST be a relevant subdirectory in the results,
350 or None, which will be represented as '----'
351
352 operation: description of what you ran (e.g. "dbench", or
353 "mkfs -t foobar /dev/sda9")
354
355 status: error message or "completed sucessfully"
356
357 ------------------------------------------------------------
358
359 Initial tabs indicate indent levels for grouping, and is
360 governed by self.record_prefix
361
362 multiline messages have secondary lines prefaced by a double
363 space (' ')
mblighf4e04152008-02-21 16:05:53 +0000364
365 Executing this method will trigger the logging of all new
366 warnings to date from the various console loggers.
367 """
mblighdab39662008-02-27 16:47:55 +0000368 # poll all our warning loggers for new warnings
369 warnings = self._read_warnings()
370 for timestamp, msg in warnings:
371 self.__record("WARN", None, None, msg, timestamp)
372
373 # write out the actual status log line
374 self.__record(status_code, subdir, operation, status)
375
376
377 def _read_warnings(self):
mblighf4e04152008-02-21 16:05:53 +0000378 warnings = []
379 while True:
380 # pull in a line of output from every logger that has
381 # output ready to be read
382 loggers, _, _ = select.select(self.warning_loggers,
383 [], [], 0)
384 closed_loggers = set()
385 for logger in loggers:
386 line = logger.readline()
387 # record any broken pipes (aka line == empty)
388 if len(line) == 0:
389 closed_loggers.add(logger)
390 continue
391 timestamp, msg = line.split('\t', 1)
392 warnings.append((int(timestamp), msg.strip()))
393
394 # stop listening to loggers that are closed
395 self.warning_loggers -= closed_loggers
396
397 # stop if none of the loggers have any output left
398 if not loggers:
399 break
400
mblighdab39662008-02-27 16:47:55 +0000401 # sort into timestamp order
402 warnings.sort()
403 return warnings
mblighf4e04152008-02-21 16:05:53 +0000404
405
mblighdab39662008-02-27 16:47:55 +0000406 def _render_record(self, status_code, subdir, operation, status='',
407 epoch_time=None, record_prefix=None):
mblighf4e04152008-02-21 16:05:53 +0000408 """
mblighdab39662008-02-27 16:47:55 +0000409 Internal Function to generate a record to be written into a
410 status log. For use by server_job.* classes only.
mblighf1c52842007-10-16 15:21:38 +0000411 """
mblighf1c52842007-10-16 15:21:38 +0000412 if subdir:
413 if re.match(r'[\n\t]', subdir):
mbligh4d6feff2008-01-14 16:48:56 +0000414 raise ValueError('Invalid character in subdir string')
mblighf1c52842007-10-16 15:21:38 +0000415 substr = subdir
416 else:
417 substr = '----'
418
419 if not re.match(r'(START|(END )?(GOOD|WARN|FAIL|ABORT))$', \
420 status_code):
mbligh4d6feff2008-01-14 16:48:56 +0000421 raise ValueError('Invalid status code supplied: %s' % status_code)
mblighe25fd5b2008-01-22 17:23:37 +0000422 if not operation:
423 operation = '----'
mblighf1c52842007-10-16 15:21:38 +0000424 if re.match(r'[\n\t]', operation):
mbligh4d6feff2008-01-14 16:48:56 +0000425 raise ValueError('Invalid character in operation string')
mblighf1c52842007-10-16 15:21:38 +0000426 operation = operation.rstrip()
427 status = status.rstrip()
428 status = re.sub(r"\t", " ", status)
429 # Ensure any continuation lines are marked so we can
430 # detect them in the status file to ensure it is parsable.
431 status = re.sub(r"\n", "\n" + self.record_prefix + " ", status)
432
mbligh30270302007-11-05 20:33:52 +0000433 # Generate timestamps for inclusion in the logs
mblighf4e04152008-02-21 16:05:53 +0000434 if epoch_time is None:
435 epoch_time = int(time.time())
mbligh30270302007-11-05 20:33:52 +0000436 local_time = time.localtime(epoch_time)
437 epoch_time_str = "timestamp=%d" % (epoch_time,)
438 local_time_str = time.strftime("localtime=%b %d %H:%M:%S",
439 local_time)
440
mblighdab39662008-02-27 16:47:55 +0000441 if record_prefix is None:
442 record_prefix = self.record_prefix
443
mbligh30270302007-11-05 20:33:52 +0000444 msg = '\t'.join(str(x) for x in (status_code, substr, operation,
445 epoch_time_str, local_time_str,
446 status))
mblighdab39662008-02-27 16:47:55 +0000447 return record_prefix + msg + '\n'
448
449
450 def _record_prerendered(self, msg):
451 """
452 Record a pre-rendered msg into the status logs. The only
453 change this makes to the message is to add on the local
454 indentation. Should not be called outside of server_job.*
455 classes. Unlike __record, this does not write the message
456 to standard output.
457 """
458 status_file = os.path.join(self.resultdir, 'status.log')
459 status_log = open(status_file, 'a')
mblighb03ba642008-03-13 17:37:17 +0000460 need_reparse = False
mblighdab39662008-02-27 16:47:55 +0000461 for line in msg.splitlines():
462 line = self.record_prefix + line + '\n'
463 status_log.write(line)
mblighb03ba642008-03-13 17:37:17 +0000464 if self.__need_reparse(line):
465 need_reparse = True
mblighdab39662008-02-27 16:47:55 +0000466 status_log.close()
mblighb03ba642008-03-13 17:37:17 +0000467 if need_reparse:
468 self.__parse_status()
mblighdab39662008-02-27 16:47:55 +0000469
470
471 def __record(self, status_code, subdir, operation, status='',
472 epoch_time=None):
473 """
474 Actual function for recording a single line into the status
475 logs. Should never be called directly, only by job.record as
476 this would bypass the console monitor logging.
477 """
478
479 msg = self._render_record(status_code, subdir, operation,
480 status, epoch_time)
481
mblighf1c52842007-10-16 15:21:38 +0000482
mbligh31a49de2007-11-05 18:41:19 +0000483 status_file = os.path.join(self.resultdir, 'status.log')
mblighdab39662008-02-27 16:47:55 +0000484 sys.stdout.write(msg)
485 open(status_file, "a").write(msg)
mblighf1c52842007-10-16 15:21:38 +0000486 if subdir:
mblighd56eb592008-01-22 16:36:34 +0000487 test_dir = os.path.join(self.resultdir, subdir)
488 if not os.path.exists(test_dir):
489 os.mkdir(test_dir)
490 status_file = os.path.join(test_dir, 'status')
mblighdab39662008-02-27 16:47:55 +0000491 open(status_file, "a").write(msg)
mblighb03ba642008-03-13 17:37:17 +0000492 if self.__need_reparse(msg):
493 self.__parse_status()
494
495
496 def __need_reparse(self, line):
497 # the parser will not record results if lines have more than
498 # one level of indentation
499 indent = len(re.search(r"^(\t*)", line).group(1))
500 if indent > 1:
501 return False
502 # we can also skip START lines, as they add nothing
503 line = line.lstrip("\t")
504 if line.startswith("START\t"):
505 return False
506 # otherwise, we should do a parse
507 return True
mblighdbdac6c2008-03-05 15:49:58 +0000508
509
510 def __parse_status(self):
511 """
512 If a .parse.cmd file is present in the results directory,
513 launch the tko parser.
514 """
515 cmdfile = os.path.join(self.resultdir, '.parse.cmd')
516 if os.path.exists(cmdfile):
517 cmd = open(cmdfile).read().strip()
518 subprocess.Popen(cmd, shell=True)
mblighdab39662008-02-27 16:47:55 +0000519
520
521# a file-like object for catching stderr from an autotest client and
522# extracting status logs from it
523class client_logger(object):
524 """Partial file object to write to both stdout and
525 the status log file. We only implement those methods
526 utils.run() actually calls.
527 """
528 parser = re.compile(r"^AUTOTEST_STATUS:([^:]*):(.*)$")
529 extract_indent = re.compile(r"^(\t*).*$")
530
531 def __init__(self, job):
532 self.job = job
533 self.leftover = ""
534 self.last_line = ""
535 self.logs = {}
536
537
538 def _process_log_dict(self, log_dict):
539 log_list = log_dict.pop("logs", [])
540 for key in sorted(log_dict.iterkeys()):
541 log_list += self._process_log_dict(log_dict.pop(key))
542 return log_list
543
544
545 def _process_logs(self):
546 """Go through the accumulated logs in self.log and print them
547 out to stdout and the status log. Note that this processes
548 logs in an ordering where:
549
550 1) logs to different tags are never interleaved
551 2) logs to x.y come before logs to x.y.z for all z
552 3) logs to x.y come before x.z whenever y < z
553
554 Note that this will in general not be the same as the
555 chronological ordering of the logs. However, if a chronological
556 ordering is desired that one can be reconstructed from the
557 status log by looking at timestamp lines."""
558 log_list = self._process_log_dict(self.logs)
559 for line in log_list:
560 self.job._record_prerendered(line + '\n')
561 if log_list:
562 self.last_line = log_list[-1]
563
564
565 def _process_quoted_line(self, tag, line):
566 """Process a line quoted with an AUTOTEST_STATUS flag. If the
567 tag is blank then we want to push out all the data we've been
568 building up in self.logs, and then the newest line. If the
569 tag is not blank, then push the line into the logs for handling
570 later."""
571 print line
572 if tag == "":
573 self._process_logs()
574 self.job._record_prerendered(line + '\n')
575 self.last_line = line
576 else:
577 tag_parts = [int(x) for x in tag.split(".")]
578 log_dict = self.logs
579 for part in tag_parts:
580 log_dict = log_dict.setdefault(part, {})
581 log_list = log_dict.setdefault("logs", [])
582 log_list.append(line)
583
584
585 def _process_line(self, line):
586 """Write out a line of data to the appropriate stream. Status
587 lines sent by autotest will be prepended with
588 "AUTOTEST_STATUS", and all other lines are ssh error
589 messages."""
590 match = self.parser.search(line)
591 if match:
592 tag, line = match.groups()
593 self._process_quoted_line(tag, line)
594 else:
mblighfe749d22008-03-07 18:14:46 +0000595 print line
mblighdab39662008-02-27 16:47:55 +0000596
597
598 def _format_warnings(self, last_line, warnings):
mbligh71d340d2008-03-05 15:51:16 +0000599 # use the indentation of whatever the last log line was
mblighdab39662008-02-27 16:47:55 +0000600 indent = self.extract_indent.match(last_line).group(1)
mbligh71d340d2008-03-05 15:51:16 +0000601 # if the last line starts a new group, add an extra indent
602 if last_line.lstrip('\t').startswith("START\t"):
603 indent += '\t'
mblighdab39662008-02-27 16:47:55 +0000604 return [self.job._render_record("WARN", None, None, msg,
605 timestamp, indent).rstrip('\n')
606 for timestamp, msg in warnings]
607
608
609 def _process_warnings(self, last_line, log_dict, warnings):
610 if log_dict.keys() in ([], ["logs"]):
611 # there are no sub-jobs, just append the warnings here
612 warnings = self._format_warnings(last_line, warnings)
613 log_list = log_dict.setdefault("logs", [])
614 log_list += warnings
615 for warning in warnings:
616 sys.stdout.write(warning + '\n')
617 else:
618 # there are sub-jobs, so put the warnings in there
619 log_list = log_dict.get("logs", [])
620 if log_list:
621 last_line = log_list[-1]
622 for key in sorted(log_dict.iterkeys()):
623 if key != "logs":
624 self._process_warnings(last_line,
625 log_dict[key],
626 warnings)
627
628
629 def write(self, data):
630 # first check for any new console warnings
631 warnings = self.job._read_warnings()
632 self._process_warnings(self.last_line, self.logs, warnings)
633 # now process the newest data written out
634 data = self.leftover + data
635 lines = data.split("\n")
636 # process every line but the last one
637 for line in lines[:-1]:
638 self._process_line(line)
639 # save the last line for later processing
640 # since we may not have the whole line yet
641 self.leftover = lines[-1]
642
643
644 def flush(self):
645 sys.stdout.flush()
mblighdab39662008-02-27 16:47:55 +0000646
647
648 def close(self):
649 if self.leftover:
650 self._process_line(self.leftover)
651 self._process_logs()
652 self.flush()
mblighcaa62c22008-04-07 21:51:17 +0000653
654# site_server_job.py may be non-existant or empty, make sure that an
655# appropriate site_server_job class is created nevertheless
656try:
657 from site_server_job import site_server_job
658except ImportError:
659 class site_server_job(base_server_job):
660 pass
661
662class server_job(site_server_job):
663 pass