Add primary key id in table afe_hosts_labels
[autotest-zwu.git] / cli / job_unittest.py
blob484be47230d97a8eb848871ce0dd96b79bc9c0e6
1 #!/usr/bin/python -u
3 # Copyright 2008 Google Inc. All Rights Reserved.
6 """Tests for job."""
8 import copy, getpass, unittest, sys, os
10 import common
11 from autotest_lib.cli import cli_mock, topic_common, job
12 from autotest_lib.client.common_lib.test_utils import mock
15 class job_unittest(cli_mock.cli_unittest):
16 def setUp(self):
17 super(job_unittest, self).setUp()
18 self.values = copy.deepcopy(self.values_template)
20 results = [{u'status_counts': {u'Aborted': 1},
21 u'control_file':
22 u"job.run_test('sleeptest')\n",
23 u'name': u'test_job0',
24 u'control_type': u'Server',
25 u'priority':
26 u'Medium',
27 u'owner': u'user0',
28 u'created_on':
29 u'2008-07-08 17:45:44',
30 u'synch_count': 2,
31 u'id': 180},
32 {u'status_counts': {u'Queued': 1},
33 u'control_file':
34 u"job.run_test('sleeptest')\n",
35 u'name': u'test_job1',
36 u'control_type': u'Client',
37 u'priority':
38 u'High',
39 u'owner': u'user0',
40 u'created_on':
41 u'2008-07-08 12:17:47',
42 u'synch_count': 1,
43 u'id': 338}]
46 values_template = [{u'id': 180, # Valid job
47 u'priority': u'Low',
48 u'name': u'test_job0',
49 u'owner': u'Cringer',
50 u'invalid': False,
51 u'created_on': u'2008-07-02 13:02:40',
52 u'control_type': u'Server',
53 u'status_counts': {u'Queued': 1},
54 u'synch_count': 2},
55 {u'id': 338, # Valid job
56 u'priority': 'High',
57 u'name': u'test_job1',
58 u'owner': u'Fisto',
59 u'invalid': False,
60 u'created_on': u'2008-07-06 14:05:33',
61 u'control_type': u'Client',
62 u'status_counts': {u'Queued': 1},
63 u'synch_count': 1},
64 {u'id': 339, # Valid job
65 u'priority': 'Medium',
66 u'name': u'test_job2',
67 u'owner': u'Roboto',
68 u'invalid': False,
69 u'created_on': u'2008-07-07 15:33:18',
70 u'control_type': u'Server',
71 u'status_counts': {u'Queued': 1},
72 u'synch_count': 1},
73 {u'id': 340, # Invalid job priority
74 u'priority': u'Uber',
75 u'name': u'test_job3',
76 u'owner': u'Panthor',
77 u'invalid': True,
78 u'created_on': u'2008-07-04 00:00:01',
79 u'control_type': u'Server',
80 u'status_counts': {u'Queued': 1},
81 u'synch_count': 2},
82 {u'id': 350, # Invalid job created_on
83 u'priority': 'Medium',
84 u'name': u'test_job4',
85 u'owner': u'Icer',
86 u'invalid': True,
87 u'created_on': u'Today',
88 u'control_type': u'Client',
89 u'status_counts': {u'Queued': 1},
90 u'synch_count': 1},
91 {u'id': 420, # Invalid job control_type
92 u'priority': 'Urgent',
93 u'name': u'test_job5',
94 u'owner': u'Spikor',
95 u'invalid': True,
96 u'created_on': u'2012-08-08 18:54:37',
97 u'control_type': u'Child',
98 u'status_counts': {u'Queued': 1},
99 u'synch_count': 2}]
102 class job_list_unittest(job_unittest):
103 def test_job_list_jobs(self):
104 self.god.stub_function(getpass, 'getuser')
105 getpass.getuser.expect_call().and_return('user0')
106 self.run_cmd(argv=['atest', 'job', 'list', '--ignore_site_file'],
107 rpcs=[('get_jobs_summary', {'owner': 'user0',
108 'running': None},
109 True, self.values)],
110 out_words_ok=['test_job0', 'test_job1', 'test_job2'],
111 out_words_no=['Uber', 'Today', 'Child'])
114 def test_job_list_jobs_only_user(self):
115 values = [item for item in self.values if item['owner'] == 'Cringer']
116 self.run_cmd(argv=['atest', 'job', 'list', '-u', 'Cringer',
117 '--ignore_site_file'],
118 rpcs=[('get_jobs_summary', {'owner': 'Cringer',
119 'running': None},
120 True, values)],
121 out_words_ok=['Cringer'],
122 out_words_no=['Fisto', 'Roboto', 'Panthor', 'Icer',
123 'Spikor'])
126 def test_job_list_jobs_all(self):
127 self.run_cmd(argv=['atest', 'job', 'list', '--all',
128 '--ignore_site_file'],
129 rpcs=[('get_jobs_summary', {'running': None},
130 True, self.values)],
131 out_words_ok=['Fisto', 'Roboto', 'Panthor',
132 'Icer', 'Spikor', 'Cringer'],
133 out_words_no=['Created', 'Priority'])
136 def test_job_list_jobs_id(self):
137 self.run_cmd(argv=['atest', 'job', 'list', '5964',
138 '--ignore_site_file'],
139 rpcs=[('get_jobs_summary', {'id__in': ['5964'],
140 'running': None},
141 True,
142 [{u'status_counts': {u'Completed': 1},
143 u'control_file': u'kernel = \'8210088647656509311.kernel-smp-2.6.18-220.5.x86_64.rpm\'\ndef step_init():\n job.next_step([step_test])\n testkernel = job.kernel(\'8210088647656509311.kernel-smp-2.6.18-220.5.x86_64.rpm\')\n \n testkernel.install()\n testkernel.boot(args=\'console_always_print=1\')\n\ndef step_test():\n job.next_step(\'step0\')\n\ndef step0():\n AUTHOR = "Autotest Team"\n NAME = "Sleeptest"\n TIME = "SHORT"\n TEST_CATEGORY = "Functional"\n TEST_CLASS = "General"\n TEST_TYPE = "client"\n \n DOC = """\n This test simply sleeps for 1 second by default. It\'s a good way to test\n profilers and double check that autotest is working.\n The seconds argument can also be modified to make the machine sleep for as\n long as needed.\n """\n \n job.run_test(\'sleeptest\', seconds = 1)',
144 u'name': u'mytest',
145 u'control_type': u'Client',
146 u'run_verify': 1,
147 u'priority': u'Medium',
148 u'owner': u'user0',
149 u'created_on': u'2008-07-28 12:42:52',
150 u'timeout': 144,
151 u'synch_count': 1,
152 u'id': 5964}])],
153 out_words_ok=['user0', 'Completed', '1', '5964'],
154 out_words_no=['sleeptest', 'Priority', 'Client', '2008'])
157 def test_job_list_jobs_id_verbose(self):
158 self.run_cmd(argv=['atest', 'job', 'list', '5964', '-v',
159 '--ignore_site_file'],
160 rpcs=[('get_jobs_summary', {'id__in': ['5964'],
161 'running': None},
162 True,
163 [{u'status_counts': {u'Completed': 1},
164 u'control_file': u'kernel = \'8210088647656509311.kernel-smp-2.6.18-220.5.x86_64.rpm\'\ndef step_init():\n job.next_step([step_test])\n testkernel = job.kernel(\'8210088647656509311.kernel-smp-2.6.18-220.5.x86_64.rpm\')\n \n testkernel.install()\n testkernel.boot(args=\'console_always_print=1\')\n\ndef step_test():\n job.next_step(\'step0\')\n\ndef step0():\n AUTHOR = "Autotest Team"\n NAME = "Sleeptest"\n TIME = "SHORT"\n TEST_CATEGORY = "Functional"\n TEST_CLASS = "General"\n TEST_TYPE = "client"\n \n DOC = """\n This test simply sleeps for 1 second by default. It\'s a good way to test\n profilers and double check that autotest is working.\n The seconds argument can also be modified to make the machine sleep for as\n long as needed.\n """\n \n job.run_test(\'sleeptest\', seconds = 1)',
165 u'name': u'mytest',
166 u'control_type': u'Client',
167 u'run_verify': 1,
168 u'priority': u'Medium',
169 u'owner': u'user0',
170 u'created_on': u'2008-07-28 12:42:52',
171 u'timeout': 144,
172 u'synch_count': 1,
173 u'id': 5964}])],
174 out_words_ok=['user0', 'Completed', '1', '5964',
175 'Client', '2008', 'Priority'],
176 out_words_no=['sleeptest'])
179 def test_job_list_jobs_name(self):
180 self.run_cmd(argv=['atest', 'job', 'list', 'myt*',
181 '--ignore_site_file'],
182 rpcs=[('get_jobs_summary', {'name__startswith': 'myt',
183 'running': None},
184 True,
185 [{u'status_counts': {u'Completed': 1},
186 u'control_file': u'kernel = \'8210088647656509311.kernel-smp-2.6.18-220.5.x86_64.rpm\'\ndef step_init():\n job.next_step([step_test])\n testkernel = job.kernel(\'8210088647656509311.kernel-smp-2.6.18-220.5.x86_64.rpm\')\n \n testkernel.install()\n testkernel.boot(args=\'console_always_print=1\')\n\ndef step_test():\n job.next_step(\'step0\')\n\ndef step0():\n AUTHOR = "Autotest Team"\n NAME = "Sleeptest"\n TIME = "SHORT"\n TEST_CATEGORY = "Functional"\n TEST_CLASS = "General"\n TEST_TYPE = "client"\n \n DOC = """\n This test simply sleeps for 1 second by default. It\'s a good way to test\n profilers and double check that autotest is working.\n The seconds argument can also be modified to make the machine sleep for as\n long as needed.\n """\n \n job.run_test(\'sleeptest\', seconds = 1)',
187 u'name': u'mytest',
188 u'control_type': u'Client',
189 u'run_verify': 1,
190 u'priority': u'Medium',
191 u'owner': u'user0',
192 u'created_on': u'2008-07-28 12:42:52',
193 u'timeout': 144,
194 u'synch_count': 1,
195 u'id': 5964}])],
196 out_words_ok=['user0', 'Completed', '1', '5964'],
197 out_words_no=['sleeptest', 'Priority', 'Client', '2008'])
200 def test_job_list_jobs_all_verbose(self):
201 self.run_cmd(argv=['atest', 'job', 'list', '--all', '--verbose',
202 '--ignore_site_file'],
203 rpcs=[('get_jobs_summary', {'running': None},
204 True, self.values)],
205 out_words_ok=['Fisto', 'Spikor', 'Cringer', 'Priority',
206 'Created'])
209 class job_list_jobs_all_and_user_unittest(cli_mock.cli_unittest):
210 def test_job_list_jobs_all_and_user(self):
211 testjob = job.job_list()
212 sys.argv = ['atest', 'job', 'list', '-a', '-u', 'user0',
213 '--ignore_site_file']
214 self.god.mock_io()
215 (sys.exit.expect_call(mock.anything_comparator())
216 .and_raises(cli_mock.ExitException))
217 self.assertRaises(cli_mock.ExitException, testjob.parse)
218 self.god.unmock_io()
219 self.god.check_playback()
222 class job_stat_unittest(job_unittest):
223 def test_job_stat_job(self):
224 results = copy.deepcopy(self.results)
225 self.run_cmd(argv=['atest', 'job', 'stat', '180',
226 '--ignore_site_file'],
227 rpcs=[('get_jobs_summary', {'id__in': ['180']}, True,
228 [results[0]]),
229 ('get_host_queue_entries', {'job__in': ['180']},
230 True,
231 [{u'status': u'Failed',
232 u'complete': 1,
233 u'host': {u'status': u'Repair Failed',
234 u'locked': False,
235 u'hostname': u'host0',
236 u'invalid': True,
237 u'id': 4432,
238 u'synch_id': None},
239 u'priority': 1,
240 u'meta_host': None,
241 u'job': {u'control_file': u"def run(machine):\n\thost = hosts.create_host(machine)\n\tat = autotest.Autotest(host)\n\tat.run_test('sleeptest')\n\nparallel_simple(run, machines)",
242 u'name': u'test_sleep',
243 u'control_type': u'Server',
244 u'synchronizing': 0,
245 u'priority': u'Medium',
246 u'owner': u'user0',
247 u'created_on': u'2008-03-18 11:27:29',
248 u'synch_count': 1,
249 u'id': 180},
250 u'active': 0,
251 u'id': 101084}])],
252 out_words_ok=['test_job0', 'host0', 'Failed',
253 'Aborted'])
257 def test_job_stat_list_unassigned_host(self):
258 self.run_cmd(argv=['atest', 'job', 'stat', '6761',
259 '--list-hosts', '--ignore_site_file'],
260 rpcs=[('get_jobs_summary', {'id__in': ['6761']}, True,
261 [{u'status_counts': {u'Queued': 1},
262 u'control_file': u'def step_init():\n job.next_step(\'step0\')\n\ndef step0():\n AUTHOR = "mbligh@google.com (Martin Bligh)"\n NAME = "Kernbench"\n TIME = "SHORT"\n TEST_CLASS = "Kernel"\n TEST_CATEGORY = "Benchmark"\n TEST_TYPE = "client"\n \n DOC = """\n A standard CPU benchmark. Runs a kernel compile and measures the performance.\n """\n \n job.run_test(\'kernbench\')',
263 u'name': u'test_on_meta_hosts',
264 u'control_type': u'Client',
265 u'run_verify': 1,
266 u'priority': u'Medium',
267 u'owner': u'user0',
268 u'created_on': u'2008-07-30 22:15:43',
269 u'timeout': 144,
270 u'synch_count': 1,
271 u'id': 6761}]),
272 ('get_host_queue_entries', {'job__in': ['6761']},
273 True,
274 [{u'status': u'Queued',
275 u'complete': 0,
276 u'deleted': 0,
277 u'host': None,
278 u'priority': 1,
279 u'meta_host': u'Xeon',
280 u'job': {u'control_file': u'def step_init():\n job.next_step(\'step0\')\n\ndef step0():\n AUTHOR = "mbligh@google.com (Martin Bligh)"\n NAME = "Kernbench"\n TIME = "SHORT"\n TEST_CLASS = "Kernel"\n TEST_CATEGORY = "Benchmark"\n TEST_TYPE = "client"\n \n DOC = """\n A standard CPU benchmark. Runs a kernel compile and measures the performance.\n """\n \n job.run_test(\'kernbench\')',
281 u'name': u'test_on_meta_hosts',
282 u'control_type': u'Client',
283 u'run_verify': 1,
284 u'priority': u'Medium',
285 u'owner': u'user0',
286 u'created_on': u'2008-07-30 22:15:43',
287 u'timeout': 144,
288 u'synch_count': 1,
289 u'id': 6761},
290 u'active': 0,
291 u'id': 193166} ])],
292 err_words_ok=['unassigned', 'meta-hosts'],
293 out_words_no=['Xeon'])
296 def test_job_stat_list_hosts(self):
297 self.run_cmd(argv=['atest', 'job', 'stat', '6761',
298 '--list-hosts', '--ignore_site_file'],
299 rpcs=[('get_jobs_summary', {'id__in': ['6761']}, True,
300 [{u'status_counts': {u'Queued': 1},
301 u'control_file': u'def step_init():\n job.next_step(\'step0\')\n\ndef step0():\n AUTHOR = "mbligh@google.com (Martin Bligh)"\n NAME = "Kernbench"\n TIME = "SHORT"\n TEST_CLASS = "Kernel"\n TEST_CATEGORY = "Benchmark"\n TEST_TYPE = "client"\n \n DOC = """\n A standard CPU benchmark. Runs a kernel compile and measures the performance.\n """\n \n job.run_test(\'kernbench\')',
302 u'name': u'test_on_meta_hosts',
303 u'control_type': u'Client',
304 u'run_verify': 1,
305 u'priority': u'Medium',
306 u'owner': u'user0',
307 u'created_on': u'2008-07-30 22:15:43',
308 u'timeout': 144,
309 u'synch_count': 1,
310 u'id': 6761}]),
311 ('get_host_queue_entries', {'job__in': ['6761']},
312 True,
313 [{u'status': u'Queued',
314 u'complete': 0,
315 u'deleted': 0,
316 u'host': {u'status': u'Running',
317 u'lock_time': None,
318 u'hostname': u'host41',
319 u'locked': False,
320 u'locked_by': None,
321 u'invalid': False,
322 u'id': 4833,
323 u'protection': u'Repair filesystem only',
324 u'synch_id': None},
325 u'priority': 1,
326 u'meta_host': u'Xeon',
327 u'job': {u'control_file': u'def step_init():\n job.next_step(\'step0\')\n\ndef step0():\n AUTHOR = "mbligh@google.com (Martin Bligh)"\n NAME = "Kernbench"\n TIME = "SHORT"\n TEST_CLASS = "Kernel"\n TEST_CATEGORY = "Benchmark"\n TEST_TYPE = "client"\n \n DOC = """\n A standard CPU benchmark. Runs a kernel compile and measures the performance.\n """\n \n job.run_test(\'kernbench\')',
328 u'name': u'test_on_meta_hosts',
329 u'control_type': u'Client',
330 u'run_verify': 1,
331 u'priority': u'Medium',
332 u'owner': u'user0',
333 u'created_on': u'2008-07-30 22:15:43',
334 u'timeout': 144,
335 u'synch_count': 1,
336 u'id': 6761},
337 u'active': 0,
338 u'id': 193166},
339 {u'status': u'Running',
340 u'complete': 0,
341 u'deleted': 0,
342 u'host': {u'status': u'Running',
343 u'lock_time': None,
344 u'hostname': u'host42',
345 u'locked': False,
346 u'locked_by': None,
347 u'invalid': False,
348 u'id': 4833,
349 u'protection': u'Repair filesystem only',
350 u'synch_id': None},
351 u'priority': 1,
352 u'meta_host': u'Xeon',
353 u'job': {u'control_file': u'def step_init():\n job.next_step(\'step0\')\n\ndef step0():\n AUTHOR = "mbligh@google.com (Martin Bligh)"\n NAME = "Kernbench"\n TIME = "SHORT"\n TEST_CLASS = "Kernel"\n TEST_CATEGORY = "Benchmark"\n TEST_TYPE = "client"\n \n DOC = """\n A standard CPU benchmark. Runs a kernel compile and measures the performance.\n """\n \n job.run_test(\'kernbench\')',
354 u'name': u'test_on_meta_hosts',
355 u'control_type': u'Client',
356 u'run_verify': 1,
357 u'priority': u'Medium',
358 u'owner': u'user0',
359 u'created_on': u'2008-07-30 22:15:43',
360 u'timeout': 144,
361 u'synch_count': 1,
362 u'id': 6761},
363 u'active': 0,
364 u'id': 193166} ])],
365 out_words_ok=['host41', 'host42'],
366 out_words_no=['Xeon', 'Running', 'Queued'],
367 err_words_no=['unassigned'])
370 def test_job_stat_list_hosts_status(self):
371 self.run_cmd(argv=['atest', 'job', 'stat', '6761',
372 '--list-hosts-status', 'Running,Queued',
373 '--ignore_site_file'],
374 rpcs=[('get_jobs_summary', {'id__in': ['6761']}, True,
375 [{u'status_counts': {u'Queued': 1, u'Running': 1},
376 u'control_file': u'def step_init():\n job.next_step(\'step0\')\n\ndef step0():\n AUTHOR = "mbligh@google.com (Martin Bligh)"\n NAME = "Kernbench"\n TIME = "SHORT"\n TEST_CLASS = "Kernel"\n TEST_CATEGORY = "Benchmark"\n TEST_TYPE = "client"\n \n DOC = """\n A standard CPU benchmark. Runs a kernel compile and measures the performance.\n """\n \n job.run_test(\'kernbench\')',
377 u'name': u'test',
378 u'control_type': u'Client',
379 u'run_verify': 1,
380 u'priority': u'Medium',
381 u'owner': u'user0',
382 u'created_on': u'2008-07-30 22:15:43',
383 u'timeout': 144,
384 u'synch_count': 1,
385 u'id': 6761}]),
386 ('get_host_queue_entries', {'job__in': ['6761']},
387 True,
388 [{u'status': u'Queued',
389 u'complete': 0,
390 u'deleted': 0,
391 u'host': {u'status': u'Queued',
392 u'lock_time': None,
393 u'hostname': u'host41',
394 u'locked': False,
395 u'locked_by': None,
396 u'invalid': False,
397 u'id': 4833,
398 u'protection': u'Repair filesystem only',
399 u'synch_id': None},
400 u'priority': 1,
401 u'meta_host': None,
402 u'job': {u'control_file': u'def step_init():\n job.next_step(\'step0\')\n\ndef step0():\n AUTHOR = "mbligh@google.com (Martin Bligh)"\n NAME = "Kernbench"\n TIME = "SHORT"\n TEST_CLASS = "Kernel"\n TEST_CATEGORY = "Benchmark"\n TEST_TYPE = "client"\n \n DOC = """\n A standard CPU benchmark. Runs a kernel compile and measures the performance.\n """\n \n job.run_test(\'kernbench\')',
403 u'name': u'test',
404 u'control_type': u'Client',
405 u'run_verify': 1,
406 u'priority': u'Medium',
407 u'owner': u'user0',
408 u'created_on': u'2008-07-30 22:15:43',
409 u'timeout': 144,
410 u'synch_count': 1,
411 u'id': 6761},
412 u'active': 0,
413 u'id': 193166},
414 {u'status': u'Running',
415 u'complete': 0,
416 u'deleted': 0,
417 u'host': {u'status': u'Running',
418 u'lock_time': None,
419 u'hostname': u'host42',
420 u'locked': False,
421 u'locked_by': None,
422 u'invalid': False,
423 u'id': 4833,
424 u'protection': u'Repair filesystem only',
425 u'synch_id': None},
426 u'priority': 1,
427 u'meta_host': None,
428 u'job': {u'control_file': u'def step_init():\n job.next_step(\'step0\')\n\ndef step0():\n AUTHOR = "mbligh@google.com (Martin Bligh)"\n NAME = "Kernbench"\n TIME = "SHORT"\n TEST_CLASS = "Kernel"\n TEST_CATEGORY = "Benchmark"\n TEST_TYPE = "client"\n \n DOC = """\n A standard CPU benchmark. Runs a kernel compile and measures the performance.\n """\n \n job.run_test(\'kernbench\')',
429 u'name': u'test',
430 u'control_type': u'Client',
431 u'run_verify': 1,
432 u'priority': u'Medium',
433 u'owner': u'user0',
434 u'created_on': u'2008-07-30 22:15:43',
435 u'timeout': 144,
436 u'synch_count': 1,
437 u'id': 6761},
438 u'active': 0,
439 u'id': 193166} ])],
440 out_words_ok=['Queued', 'Running', 'host41', 'host42'],
441 out_words_no=['Xeon'],
442 err_words_no=['unassigned'])
445 def test_job_stat_job_multiple_hosts(self):
446 self.run_cmd(argv=['atest', 'job', 'stat', '6761',
447 '--ignore_site_file'],
448 rpcs=[('get_jobs_summary', {'id__in': ['6761']}, True,
449 [{u'status_counts': {u'Running': 1,
450 u'Queued': 4},
451 u'control_file': u'def step_init():\n job.next_step(\'step0\')\n\ndef step0():\n AUTHOR = "mbligh@google.com (Martin Bligh)"\n NAME = "Kernbench"\n TIME = "SHORT"\n TEST_CLASS = "Kernel"\n TEST_CATEGORY = "Benchmark"\n TEST_TYPE = "client"\n \n DOC = """\n A standard CPU benchmark. Runs a kernel compile and measures the performance.\n """\n \n job.run_test(\'kernbench\')',
452 u'name': u'test_on_meta_hosts',
453 u'control_type': u'Client',
454 u'run_verify': 1,
455 u'priority': u'Medium',
456 u'owner': u'user0',
457 u'created_on': u'2008-07-30 22:15:43',
458 u'timeout': 144,
459 u'synch_count': 1,
460 u'id': 6761}]),
461 ('get_host_queue_entries', {'job__in': ['6761']},
462 True,
463 [{u'status': u'Queued',
464 u'complete': 0,
465 u'deleted': 0,
466 u'host': None,
467 u'priority': 1,
468 u'meta_host': u'Xeon',
469 u'job': {u'control_file': u'def step_init():\n job.next_step(\'step0\')\n\ndef step0():\n AUTHOR = "mbligh@google.com (Martin Bligh)"\n NAME = "Kernbench"\n TIME = "SHORT"\n TEST_CLASS = "Kernel"\n TEST_CATEGORY = "Benchmark"\n TEST_TYPE = "client"\n \n DOC = """\n A standard CPU benchmark. Runs a kernel compile and measures the performance.\n """\n \n job.run_test(\'kernbench\')',
470 u'name': u'test_on_meta_hosts',
471 u'control_type': u'Client',
472 u'run_verify': 1,
473 u'priority': u'Medium',
474 u'owner': u'user0',
475 u'created_on': u'2008-07-30 22:15:43',
476 u'timeout': 144,
477 u'synch_count': 1,
478 u'id': 6761},
479 u'active': 0,
480 u'id': 193166},
481 {u'status': u'Queued',
482 u'complete': 0,
483 u'deleted': 0,
484 u'host': None,
485 u'priority': 1,
486 u'meta_host': u'Xeon',
487 u'job': {u'control_file': u'def step_init():\n job.next_step(\'step0\')\n\ndef step0():\n AUTHOR = "mbligh@google.com (Martin Bligh)"\n NAME = "Kernbench"\n TIME = "SHORT"\n TEST_CLASS = "Kernel"\n TEST_CATEGORY = "Benchmark"\n TEST_TYPE = "client"\n \n DOC = """\n A standard CPU benchmark. Runs a kernel compile and measures the performance.\n """\n \n job.run_test(\'kernbench\')',
488 u'name': u'test_on_meta_hosts',
489 u'control_type': u'Client',
490 u'run_verify': 1,
491 u'priority': u'Medium',
492 u'owner': u'user0',
493 u'created_on': u'2008-07-30 22:15:43',
494 u'timeout': 144,
495 u'synch_count': 1,
496 u'id': 6761},
497 u'active': 0,
498 u'id': 193167},
499 {u'status': u'Queued',
500 u'complete': 0,
501 u'deleted': 0,
502 u'host': None,
503 u'priority': 1,
504 u'meta_host': u'Athlon',
505 u'job': {u'control_file': u'def step_init():\n job.next_step(\'step0\')\n\ndef step0():\n AUTHOR = "mbligh@google.com (Martin Bligh)"\n NAME = "Kernbench"\n TIME = "SHORT"\n TEST_CLASS = "Kernel"\n TEST_CATEGORY = "Benchmark"\n TEST_TYPE = "client"\n \n DOC = """\n A standard CPU benchmark. Runs a kernel compile and measures the performance.\n """\n \n job.run_test(\'kernbench\')',
506 u'name': u'test_on_meta_hosts',
507 u'control_type': u'Client',
508 u'run_verify': 1,
509 u'priority': u'Medium',
510 u'owner': u'user0',
511 u'created_on': u'2008-07-30 22:15:43',
512 u'timeout': 144,
513 u'synch_count': 1,
514 u'id': 6761},
515 u'active': 0,
516 u'id': 193168},
517 {u'status': u'Queued',
518 u'complete': 0,
519 u'deleted': 0,
520 u'host': None,
521 u'priority': 1,
522 u'meta_host': u'x286',
523 u'job': {u'control_file': u'def step_init():\n job.next_step(\'step0\')\n\ndef step0():\n AUTHOR = "mbligh@google.com (Martin Bligh)"\n NAME = "Kernbench"\n TIME = "SHORT"\n TEST_CLASS = "Kernel"\n TEST_CATEGORY = "Benchmark"\n TEST_TYPE = "client"\n \n DOC = """\n A standard CPU benchmark. Runs a kernel compile and measures the performance.\n """\n \n job.run_test(\'kernbench\')',
524 u'name': u'test_on_meta_hosts',
525 u'control_type': u'Client',
526 u'run_verify': 1,
527 u'priority': u'Medium',
528 u'owner': u'user0',
529 u'created_on': u'2008-07-30 22:15:43',
530 u'timeout': 144,
531 u'synch_count': 1,
532 u'id': 6761},
533 u'active': 0,
534 u'id': 193169},
535 {u'status': u'Running',
536 u'complete': 0,
537 u'deleted': 0,
538 u'host': {u'status': u'Running',
539 u'lock_time': None,
540 u'hostname': u'host42',
541 u'locked': False,
542 u'locked_by': None,
543 u'invalid': False,
544 u'id': 4833,
545 u'protection': u'Repair filesystem only',
546 u'synch_id': None},
547 u'priority': 1,
548 u'meta_host': u'Athlon',
549 u'job': {u'control_file': u'def step_init():\n job.next_step(\'step0\')\n\ndef step0():\n AUTHOR = "mbligh@google.com (Martin Bligh)"\n NAME = "Kernbench"\n TIME = "SHORT"\n TEST_CLASS = "Kernel"\n TEST_CATEGORY = "Benchmark"\n TEST_TYPE = "client"\n \n DOC = """\n A standard CPU benchmark. Runs a kernel compile and measures the performance.\n """\n \n job.run_test(\'kernbench\')',
550 u'name': u'test_on_meta_hosts',
551 u'control_type': u'Client',
552 u'run_verify': 1,
553 u'priority': u'Medium',
554 u'owner': u'user0',
555 u'created_on': u'2008-07-30 22:15:43',
556 u'timeout': 144,
557 u'synch_count': 1,
558 u'id': 6761},
559 u'active': 1,
560 u'id': 193170} ])],
561 out_words_ok=['test_on_meta_hosts',
562 'host42', 'Queued', 'Running'],
563 out_words_no=['Athlon', 'Xeon', 'x286'])
566 def test_job_stat_job_no_host_in_qes(self):
567 results = copy.deepcopy(self.results)
568 self.run_cmd(argv=['atest', 'job', 'stat', '180',
569 '--ignore_site_file'],
570 rpcs=[('get_jobs_summary', {'id__in': ['180']}, True,
571 [results[0]]),
572 ('get_host_queue_entries', {'job__in': ['180']},
573 True,
574 [{u'status': u'Failed',
575 u'complete': 1,
576 u'host': None,
577 u'priority': 1,
578 u'meta_host': None,
579 u'job': {u'control_file': u"def run(machine):\n\thost = hosts.create_host(machine)\n\tat = autotest.Autotest(host)\n\tat.run_test('sleeptest')\n\nparallel_simple(run, machines)",
580 u'name': u'test_sleep',
581 u'control_type': u'Server',
582 u'priority': u'Medium',
583 u'owner': u'user0',
584 u'created_on': u'2008-03-18 11:27:29',
585 u'synch_count': 1,
586 u'id': 180},
587 u'active': 0,
588 u'id': 101084}])],
589 err_words_ok=['unassigned', 'meta-hosts'])
592 def test_job_stat_multi_jobs(self):
593 results = copy.deepcopy(self.results)
594 self.run_cmd(argv=['atest', 'job', 'stat', '180', '338',
595 '--ignore_site_file'],
596 rpcs=[('get_jobs_summary', {'id__in': ['180', '338']},
597 True, results),
598 ('get_host_queue_entries',
599 {'job__in': ['180', '338']},
600 True,
601 [{u'status': u'Failed',
602 u'complete': 1,
603 u'host': {u'status': u'Repair Failed',
604 u'locked': False,
605 u'hostname': u'host0',
606 u'invalid': True,
607 u'id': 4432,
608 u'synch_id': None},
609 u'priority': 1,
610 u'meta_host': None,
611 u'job': {u'control_file': u"def run(machine):\n\thost = hosts.create_host(machine)\n\tat = autotest.Autotest(host)\n\tat.run_test('sleeptest')\n\nparallel_simple(run, machines)",
612 u'name': u'test_sleep',
613 u'control_type': u'Server',
614 u'priority': u'Medium',
615 u'owner': u'user0',
616 u'created_on': u'2008-03-18 11:27:29',
617 u'synch_count': 1,
618 u'id': 180},
619 u'active': 0,
620 u'id': 101084},
621 {u'status': u'Failed',
622 u'complete': 1,
623 u'host': {u'status': u'Repair Failed',
624 u'locked': False,
625 u'hostname': u'host10',
626 u'invalid': True,
627 u'id': 4432,
628 u'synch_id': None},
629 u'priority': 1,
630 u'meta_host': None,
631 u'job': {u'control_file': u"def run(machine):\n\thost = hosts.create_host(machine)\n\tat = autotest.Autotest(host)\n\tat.run_test('sleeptest')\n\nparallel_simple(run, machines)",
632 u'name': u'test_sleep',
633 u'control_type': u'Server',
634 u'priority': u'Medium',
635 u'owner': u'user0',
636 u'created_on': u'2008-03-18 11:27:29',
637 u'synch_count': 1,
638 u'id': 338},
639 u'active': 0,
640 u'id': 101084}])],
641 out_words_ok=['test_job0', 'test_job1'])
644 def test_job_stat_multi_jobs_name_id(self):
645 self.run_cmd(argv=['atest', 'job', 'stat', 'mytest', '180',
646 '--ignore_site_file'],
647 rpcs=[('get_jobs_summary', {'id__in': ['180']},
648 True,
649 [{u'status_counts': {u'Aborted': 1},
650 u'control_file':
651 u"job.run_test('sleeptest')\n",
652 u'name': u'job0',
653 u'control_type': u'Server',
654 u'priority':
655 u'Medium',
656 u'owner': u'user0',
657 u'created_on':
658 u'2008-07-08 17:45:44',
659 u'synch_count': 2,
660 u'id': 180}]),
661 ('get_jobs_summary', {'name__in': ['mytest']},
662 True,
663 [{u'status_counts': {u'Queued': 1},
664 u'control_file':
665 u"job.run_test('sleeptest')\n",
666 u'name': u'mytest',
667 u'control_type': u'Client',
668 u'priority':
669 u'High',
670 u'owner': u'user0',
671 u'created_on': u'2008-07-08 12:17:47',
672 u'synch_count': 1,
673 u'id': 338}]),
674 ('get_host_queue_entries',
675 {'job__in': ['180']},
676 True,
677 [{u'status': u'Failed',
678 u'complete': 1,
679 u'host': {u'status': u'Repair Failed',
680 u'locked': False,
681 u'hostname': u'host0',
682 u'invalid': True,
683 u'id': 4432,
684 u'synch_id': None},
685 u'priority': 1,
686 u'meta_host': None,
687 u'job': {u'control_file': u"def run(machine):\n\thost = hosts.create_host(machine)\n\tat = autotest.Autotest(host)\n\tat.run_test('sleeptest')\n\nparallel_simple(run, machines)",
688 u'name': u'test_sleep',
689 u'control_type': u'Server',
690 u'synchronizing': 0,
691 u'priority': u'Medium',
692 u'owner': u'user0',
693 u'created_on': u'2008-03-18 11:27:29',
694 u'synch_count': 1,
695 u'id': 180},
696 u'active': 0,
697 u'id': 101084}]),
698 ('get_host_queue_entries',
699 {'job__name__in': ['mytest']},
700 True,
701 [{u'status': u'Failed',
702 u'complete': 1,
703 u'host': {u'status': u'Repair Failed',
704 u'locked': False,
705 u'hostname': u'host10',
706 u'invalid': True,
707 u'id': 4432,
708 u'synch_id': None},
709 u'priority': 1,
710 u'meta_host': None,
711 u'job': {u'control_file': u"def run(machine):\n\thost = hosts.create_host(machine)\n\tat = autotest.Autotest(host)\n\tat.run_test('sleeptest')\n\nparallel_simple(run, machines)",
712 u'name': u'test_sleep',
713 u'control_type': u'Server',
714 u'synchronizing': 0,
715 u'priority': u'Medium',
716 u'owner': u'user0',
717 u'created_on': u'2008-03-18 11:27:29',
718 u'synch_count': 1,
719 u'id': 338},
720 u'active': 0,
721 u'id': 101084}])],
722 out_words_ok=['job0', 'mytest', 'Aborted', 'Queued',
723 'Failed', 'Medium', 'High'])
726 class job_create_unittest(cli_mock.cli_unittest):
727 ctrl_file = '\ndef step_init():\n job.next_step(\'step0\')\n\ndef step0():\n AUTHOR = "Autotest Team"\n NAME = "Sleeptest"\n TIME =\n "SHORT"\n TEST_CATEGORY = "Functional"\n TEST_CLASS = "General"\n\n TEST_TYPE = "client"\n \n DOC = """\n This test simply sleeps for 1\n second by default. It\'s a good way to test\n profilers and double check\n that autotest is working.\n The seconds argument can also be modified to\n make the machine sleep for as\n long as needed.\n """\n \n\n job.run_test(\'sleeptest\', seconds = 1)'
729 kernel_ctrl_file = 'kernel = \'kernel\'\ndef step_init():\n job.next_step([step_test])\n testkernel = job.kernel(\'kernel\')\n \n testkernel.install()\n testkernel.boot(args=\'console_always_print=1\')\n\ndef step_test():\n job.next_step(\'step0\')\n\ndef step0():\n AUTHOR = "Autotest Team"\n NAME = "Sleeptest"\n TIME = "SHORT"\n TEST_CATEGORY = "Functional"\n TEST_CLASS = "General"\n TEST_TYPE = "client"\n \n DOC = """\n This test simply sleeps for 1 second by default. It\'s a good way to test\n profilers and double check that autotest is working.\n The seconds argument can also be modified to make the machine sleep for as\n long as needed.\n """\n \n job.run_test(\'sleeptest\', seconds = 1)'
731 trivial_ctrl_file = 'print "Hello"\n'
733 data = {'priority': 'Medium', 'control_file': ctrl_file, 'hosts': ['host0'],
734 'name': 'test_job0', 'control_type': 'Client', 'email_list': '',
735 'meta_hosts': [], 'synch_count': 1, 'dependencies': []}
738 def test_execute_create_job(self):
739 self.run_cmd(argv=['atest', 'job', 'create', '-t', 'sleeptest',
740 'test_job0', '-m', 'host0', '--ignore_site_file'],
741 rpcs=[('generate_control_file',
742 {'tests': ['sleeptest']},
743 True,
744 {'control_file' : self.ctrl_file,
745 'synch_count' : 1,
746 'is_server' : False,
747 'dependencies' : []}),
748 ('create_job', self.data, True, 180)],
749 out_words_ok=['test_job0', 'Created'],
750 out_words_no=['Uploading', 'Done'])
753 def test_execute_create_job_with_atomic_group(self):
754 data = dict(self.data)
755 data['atomic_group_name'] = 'my-atomic-group'
756 data['control_type'] = 'Server'
757 mock_ctrl_file = 'mock control file'
758 data['control_file'] = mock_ctrl_file
759 data['synch_count'] = 2
760 data['hosts'] = []
761 self.run_cmd(argv=['atest', 'job', 'create', '-t', 'mocktest',
762 'test_job0', '--ignore_site_file',
763 '-G', 'my-atomic-group'],
764 rpcs=[('generate_control_file',
765 {'tests': ['mocktest']},
766 True,
767 {'control_file' : mock_ctrl_file,
768 'synch_count' : 2,
769 'is_server' : True,
770 'dependencies' : []}),
771 ('create_job', data, True, 180)],
772 out_words_ok=['test_job0', 'Created'],
773 out_words_no=['Uploading', 'Done'])
776 def test_execute_create_job_with_control(self):
777 file_temp = cli_mock.create_file(self.ctrl_file)
778 self.run_cmd(argv=['atest', 'job', 'create', '-f', file_temp.name,
779 'test_job0', '-m', 'host0', '--ignore_site_file'],
780 rpcs=[('create_job', self.data, True, 42)],
781 out_words_ok=['test_job0', 'Created'],
782 out_words_no=['Uploading', 'Done'])
783 file_temp.clean()
786 def test_execute_create_job_with_control_and_kernel(self):
787 data = self.data.copy()
788 data['control_file'] = '# Made up control "file" for unittest.'
789 file_temp = cli_mock.create_file(self.trivial_ctrl_file)
790 self.run_cmd(argv=['atest', 'job', 'create', '-f', file_temp.name,
791 '-k', 'Kernel', 'test_job0', '-m', 'host0',
792 '--ignore_site_file'],
793 rpcs=[('generate_control_file',
794 {'client_control_file': self.trivial_ctrl_file,
795 'kernel': [{'version': 'Kernel'}]},
796 True,
797 {'control_file': data['control_file'],
798 'synch_count': 1,
799 'is_server': False,
800 'dependencies': []}),
801 ('create_job', data, True, 42)],
802 out_words_ok=['test_job0', 'Created',
803 'Uploading', 'Done'])
804 file_temp.clean()
807 def test_execute_create_job_with_control_and_email(self):
808 data = self.data.copy()
809 data['email_list'] = 'em'
810 file_temp = cli_mock.create_file(self.ctrl_file)
811 self.run_cmd(argv=['atest', 'job', 'create', '-f', file_temp.name,
812 'test_job0', '-m', 'host0', '-e', 'em',
813 '--ignore_site_file'],
814 rpcs=[('create_job', data, True, 42)],
815 out_words_ok=['test_job0', 'Created'],
816 out_words_no=['Uploading', 'Done'])
817 file_temp.clean()
820 def test_execute_create_job_with_control_and_dependencies(self):
821 data = self.data.copy()
822 data['dependencies'] = ['dep1', 'dep2']
823 file_temp = cli_mock.create_file(self.ctrl_file)
824 self.run_cmd(argv=['atest', 'job', 'create', '-f', file_temp.name,
825 'test_job0', '-m', 'host0', '-d', 'dep1, dep2 ',
826 '--ignore_site_file'],
827 rpcs=[('create_job', data, True, 42)],
828 out_words_ok=['test_job0', 'Created'],
829 out_words_no=['Uploading', 'Done'])
830 file_temp.clean()
833 def test_execute_create_job_with_control_and_comma_dependencies(self):
834 data = self.data.copy()
835 data['dependencies'] = ['dep2,False', 'dep1,True']
836 file_temp = cli_mock.create_file(self.ctrl_file)
837 self.run_cmd(argv=['atest', 'job', 'create', '-f', file_temp.name,
838 'test_job0', '-m', 'host0', '-d',
839 'dep1\,True, dep2\,False ', '--ignore_site_file'],
840 rpcs=[('create_job', data, True, 42)],
841 out_words_ok=['test_job0', 'Created'],
842 out_words_no=['Uploading', 'Done'])
843 file_temp.clean()
846 def test_execute_create_job_with_synch_count(self):
847 data = self.data.copy()
848 data['synch_count'] = 2
849 file_temp = cli_mock.create_file(self.ctrl_file)
850 self.run_cmd(argv=['atest', 'job', 'create', '-f', file_temp.name,
851 'test_job0', '-m', 'host0', '-y', '2',
852 '--ignore_site_file'],
853 rpcs=[('create_job', data, True, 42)],
854 out_words_ok=['test_job0', 'Created'],
855 out_words_no=['Uploading', 'Done'])
856 file_temp.clean()
859 def test_execute_create_job_with_test_and_dependencies(self):
860 data = self.data.copy()
861 data['dependencies'] = ['dep1', 'dep2', 'dep3']
862 self.run_cmd(argv=['atest', 'job', 'create', '-t', 'sleeptest',
863 'test_job0', '-m', 'host0', '-d', 'dep1, dep2 ',
864 '--ignore_site_file'],
865 rpcs=[('generate_control_file',
866 {'tests': ['sleeptest']},
867 True,
868 {'control_file' : self.ctrl_file,
869 'synch_count' : 1,
870 'is_server' : False,
871 'dependencies' : ['dep3']}),
872 ('create_job', data, True, 42)],
873 out_words_ok=['test_job0', 'Created'],
874 out_words_no=['Uploading', 'Done'])
877 def test_execute_create_job_with_test_and_comma_dependencies(self):
878 data = self.data.copy()
879 data['dependencies'] = ['dep1,True', 'dep2,False', 'dep3,123']
880 self.run_cmd(argv=['atest', 'job', 'create', '-t', 'sleeptest',
881 'test_job0', '-m', 'host0', '-d',
882 'dep1\,True dep2\,False ', '--ignore_site_file'],
883 rpcs=[('generate_control_file',
884 {'tests': ['sleeptest']},
885 True,
886 {'control_file' : self.ctrl_file,
887 'synch_count' : 1,
888 'is_server' : False,
889 'dependencies' : ['dep3,123']}),
890 ('create_job', data, True, 42)],
891 out_words_ok=['test_job0', 'Created'],
892 out_words_no=['Uploading', 'Done'])
895 def test_execute_create_job_with_kernel(self):
896 data = self.data.copy()
897 data['control_file'] = self.kernel_ctrl_file
898 self.run_cmd(argv=['atest', 'job', 'create', '-t', 'sleeptest',
899 '-k', 'kernel', 'test_job0', '-m', 'host0',
900 '--ignore_site_file'],
901 rpcs=[('generate_control_file',
902 {'tests': ['sleeptest'],
903 'kernel': [{'version': 'kernel'}]
905 True,
906 {'control_file' : self.kernel_ctrl_file,
907 'synch_count' : 1,
908 'is_server' : False,
909 'dependencies' : []}),
910 ('create_job', data, True, 180)],
911 out_words_ok=['test_job0', 'Created',
912 'Uploading', 'Done'])
915 def test_execute_create_job_with_kernels_and_cmdline(self):
916 data = self.data.copy()
917 data['control_file'] = self.kernel_ctrl_file
918 self.run_cmd(argv=['atest', 'job', 'create', '-t', 'sleeptest',
919 '-k', 'kernel1,kernel2', '--kernel-cmdline',
920 'arg1 arg2', 'test_job0', '-m', 'host0',
921 '--ignore_site_file'],
922 rpcs=[('generate_control_file',
923 {'tests': ['sleeptest'],
924 'kernel': [{'version': 'kernel1',
925 'cmdline': 'arg1 arg2'},
926 {'version': 'kernel2',
927 'cmdline': 'arg1 arg2'}]
929 True,
930 {'control_file' : self.kernel_ctrl_file,
931 'synch_count' : 1,
932 'is_server' : False,
933 'dependencies' : []}),
934 ('create_job', data, True, 180)],
935 out_words_ok=['test_job0', 'Created',
936 'Uploading', 'Done'])
939 def test_execute_create_job_with_kernel_spaces(self):
940 data = self.data.copy()
941 data['control_file'] = self.kernel_ctrl_file
942 data['name'] = 'test job with spaces'
943 self.run_cmd(argv=['atest', 'job', 'create', '-t', 'sleeptest',
944 '-k', 'kernel', 'test job with spaces',
945 '-m', 'host0', '--ignore_site_file'],
946 rpcs=[('generate_control_file',
947 {'tests': ['sleeptest'],
948 'kernel': [{'version': 'kernel'}]
950 True,
951 {'control_file' : self.kernel_ctrl_file,
952 'synch_count' : 1,
953 'is_server' : False,
954 'dependencies' : []}),
955 ('create_job', data, True, 180)],
956 # This is actually 7 spaces, the extra single quote that
957 # gets displayed before "test" causes the tab completion
958 # to move to the next 8 char boundary which is 7 characters
959 # away. Hence the 7 spaces in out_words_ok.
960 # The tab has been converted by print.
961 out_words_ok=['test job with spaces', 'Created',
962 'id', '180'])
965 def test_execute_create_job_no_args(self):
966 testjob = job.job_create()
967 sys.argv = ['atest', 'job', 'create', '--ignore_site_file']
968 self.god.mock_io()
969 (sys.exit.expect_call(mock.anything_comparator())
970 .and_raises(cli_mock.ExitException))
971 self.assertRaises(cli_mock.ExitException, testjob.parse)
972 self.god.unmock_io()
973 self.god.check_playback()
976 def test_execute_create_job_no_hosts(self):
977 testjob = job.job_create()
978 file_temp = cli_mock.create_file(self.ctrl_file)
979 sys.argv = ['atest', '-f', file_temp.name, 'test_job0',
980 '--ignore_site_file']
981 self.god.mock_io()
982 (sys.exit.expect_call(mock.anything_comparator())
983 .and_raises(cli_mock.ExitException))
984 self.assertRaises(cli_mock.ExitException, testjob.parse)
985 self.god.unmock_io()
986 self.god.check_playback()
987 file_temp.clean()
990 def test_execute_create_job_cfile_and_tests(self):
991 testjob = job.job_create()
992 sys.argv = ['atest', 'job', 'create', '-t', 'sleeptest', '-f',
993 'control_file', 'test_job0', '-m', 'host0',
994 '--ignore_site_file']
995 self.god.mock_io()
996 (sys.exit.expect_call(mock.anything_comparator())
997 .and_raises(cli_mock.ExitException))
998 self.assertRaises(cli_mock.ExitException, testjob.parse)
999 self.god.unmock_io()
1000 self.god.check_playback()
1003 def test_execute_create_job_cfile_and_kernel(self):
1004 testjob = job.job_create()
1005 sys.argv = ['atest', 'job', 'create', '-f', 'control_file', '-k',
1006 'kernel', 'test_job0', '-m', 'host0', '--ignore_site_file']
1007 self.god.mock_io()
1008 (sys.exit.expect_call(mock.anything_comparator())
1009 .and_raises(cli_mock.ExitException))
1010 self.assertRaises(cli_mock.ExitException, testjob.parse)
1011 self.god.unmock_io()
1012 self.god.check_playback()
1015 def test_execute_create_job_bad_cfile(self):
1016 testjob = job.job_create()
1017 sys.argv = ['atest', 'job', 'create', '-f', 'control_file',
1018 'test_job0', '-m', 'host0', '--ignore_site_file']
1019 self.god.mock_io()
1020 (sys.exit.expect_call(mock.anything_comparator())
1021 .and_raises(IOError))
1022 self.assertRaises(IOError, testjob.parse)
1023 self.god.unmock_io()
1026 def test_execute_create_job_bad_priority(self):
1027 testjob = job.job_create()
1028 sys.argv = ['atest', 'job', 'create', '-t', 'sleeptest', '-p', 'Uber',
1029 '-m', 'host0', 'test_job0', '--ignore_site_file']
1030 self.god.mock_io()
1031 (sys.exit.expect_call(mock.anything_comparator())
1032 .and_raises(cli_mock.ExitException))
1033 self.assertRaises(cli_mock.ExitException, testjob.parse)
1034 self.god.unmock_io()
1035 self.god.check_playback()
1038 def test_execute_create_job_with_mfile(self):
1039 data = self.data.copy()
1040 data['hosts'] = ['host3', 'host2', 'host1', 'host0']
1041 ctemp = cli_mock.create_file(self.ctrl_file)
1042 file_temp = cli_mock.create_file('host0\nhost1\nhost2\nhost3')
1043 self.run_cmd(argv=['atest', 'job', 'create', '--mlist', file_temp.name,
1044 '-f', ctemp.name, 'test_job0', '--ignore_site_file'],
1045 rpcs=[('create_job', data, True, 42)],
1046 out_words_ok=['test_job0', 'Created'])
1047 ctemp.clean()
1048 file_temp.clean()
1051 def test_execute_create_job_with_timeout(self):
1052 data = self.data.copy()
1053 data['timeout'] = '222'
1054 file_temp = cli_mock.create_file(self.ctrl_file)
1055 self.run_cmd(argv=['atest', 'job', 'create', '-f', file_temp.name,
1056 'test_job0', '-m', 'host0', '-o', '222',
1057 '--ignore_site_file'],
1058 rpcs=[('create_job', data, True, 42)],
1059 out_words_ok=['test_job0', 'Created'],)
1060 file_temp.clean()
1063 def test_execute_create_job_with_max_runtime(self):
1064 data = self.data.copy()
1065 data['max_runtime_hrs'] = '222'
1066 file_temp = cli_mock.create_file(self.ctrl_file)
1067 self.run_cmd(argv=['atest', 'job', 'create', '-f', file_temp.name,
1068 'test_job0', '-m', 'host0', '--max_runtime', '222',
1069 '--ignore_site_file'],
1070 rpcs=[('create_job', data, True, 42)],
1071 out_words_ok=['test_job0', 'Created'],)
1072 file_temp.clean()
1076 def test_execute_create_job_with_noverify(self):
1077 data = self.data.copy()
1078 data['run_verify'] = False
1079 file_temp = cli_mock.create_file(self.ctrl_file)
1080 self.run_cmd(argv=['atest', 'job', 'create', '-f', file_temp.name,
1081 'test_job0', '-m', 'host0', '-n',
1082 '--ignore_site_file'],
1083 rpcs=[('create_job', data, True, 42)],
1084 out_words_ok=['test_job0', 'Created'],)
1085 file_temp.clean()
1088 def test_execute_create_job_oth(self):
1089 data = self.data.copy()
1090 data['hosts'] = []
1091 data['one_time_hosts'] = ['host0']
1092 self.run_cmd(argv=['atest', 'job', 'create', '-t', 'sleeptest',
1093 'test_job0', '--one-time-hosts', 'host0'],
1094 rpcs=[('generate_control_file',
1095 {'tests': ['sleeptest']},
1096 True,
1097 {'control_file' : self.ctrl_file,
1098 'synch_count' : 1,
1099 'is_server' : False,
1100 'dependencies' : []}),
1101 ('create_job', data, True, 180)],
1102 out_words_ok=['test_job0', 'Created'],
1103 out_words_no=['Uploading', 'Done'])
1106 def test_execute_create_job_multi_oth(self):
1107 data = self.data.copy()
1108 data['hosts'] = []
1109 data['one_time_hosts'] = ['host1', 'host0']
1110 self.run_cmd(argv=['atest', 'job', 'create', '-t', 'sleeptest',
1111 'test_job0', '--one-time-hosts', 'host0,host1'],
1112 rpcs=[('generate_control_file',
1113 {'tests': ['sleeptest']},
1114 True,
1115 {'control_file' : self.ctrl_file,
1116 'synch_count' : 1,
1117 'is_server' : False,
1118 'dependencies' : []}),
1119 ('create_job', data, True, 180)],
1120 out_words_ok=['test_job0', 'Created'],
1121 out_words_no=['Uploading', 'Done'])
1124 def test_execute_create_job_oth_exists(self):
1125 data = self.data.copy()
1126 data['hosts'] = []
1127 data['one_time_hosts'] = ['host0']
1128 self.run_cmd(argv=['atest', 'job', 'create', '-t', 'sleeptest',
1129 'test_job0', '--one-time-hosts', 'host0'],
1130 rpcs=[('generate_control_file',
1131 {'tests': ['sleeptest']},
1132 True,
1133 {'control_file' : self.ctrl_file,
1134 'synch_count' : 1,
1135 'is_server' : False,
1136 'dependencies' : []}),
1137 ('create_job', data, False,
1138 '''ValidationError: {'hostname': 'host0 '''
1139 '''already exists in the autotest DB. '''
1140 '''Select it rather than entering it as '''
1141 '''a one time host.'}''')],
1142 out_words_no=['test_job0', 'Created'],
1143 err_words_ok=['failed', 'already exists'])
1146 def test_execute_create_job_with_control_and_labels(self):
1147 data = self.data.copy()
1148 data['hosts'] = ['host0', 'host1', 'host2']
1149 file_temp = cli_mock.create_file(self.ctrl_file)
1150 self.run_cmd(argv=['atest', 'job', 'create', '-f', file_temp.name,
1151 'test_job0', '-m', 'host0', '-b', 'label1,label2',
1152 '--ignore_site_file'],
1153 rpcs=[('get_hosts', {'multiple_labels': ['label1',
1154 'label2']}, True,
1155 [{u'status': u'Running', u'lock_time': None,
1156 u'hostname': u'host1', u'locked': False,
1157 u'locked_by': None, u'invalid': False, u'id': 42,
1158 u'labels': [u'label1'], u'platform':
1159 u'Warp18_Diskfull', u'protection':
1160 u'Repair software only', u'dirty':
1161 True, u'synch_id': None},
1162 {u'status': u'Running', u'lock_time': None,
1163 u'hostname': u'host2', u'locked': False,
1164 u'locked_by': None, u'invalid': False, u'id': 43,
1165 u'labels': [u'label2'], u'platform':
1166 u'Warp18_Diskfull', u'protection':
1167 u'Repair software only', u'dirty': True,
1168 u'synch_id': None}]),
1169 ('create_job', data, True, 42)],
1170 out_words_ok=['test_job0', 'Created'],
1171 out_words_no=['Uploading', 'Done'])
1172 file_temp.clean()
1175 def test_execute_create_job_with_label_and_duplicate_hosts(self):
1176 data = self.data.copy()
1177 data['hosts'] = ['host1', 'host0']
1178 file_temp = cli_mock.create_file(self.ctrl_file)
1179 self.run_cmd(argv=['atest', 'job', 'create', '-f', file_temp.name,
1180 'test_job0', '-m', 'host0,host1', '-b', 'label1',
1181 '--ignore_site_file'],
1182 rpcs=[('get_hosts', {'multiple_labels': ['label1']}, True,
1183 [{u'status': u'Running', u'lock_time': None,
1184 u'hostname': u'host1', u'locked': False,
1185 u'locked_by': None, u'invalid': False, u'id': 42,
1186 u'labels': [u'label1'], u'platform':
1187 u'Warp18_Diskfull', u'protection':
1188 u'Repair software only', u'dirty':
1189 True, u'synch_id': None}]),
1190 ('create_job', data, True, 42)],
1191 out_words_ok=['test_job0', 'Created'],
1192 out_words_no=['Uploading', 'Done'])
1193 file_temp.clean()
1196 def test_execute_create_job_with_label_commas_and_duplicate_hosts(self):
1197 data = self.data.copy()
1198 data['hosts'] = ['host1', 'host0']
1199 file_temp = cli_mock.create_file(self.ctrl_file)
1200 self.run_cmd(argv=['atest', 'job', 'create', '-f', file_temp.name,
1201 'test_job0', '-m', 'host0,host1', '-b',
1202 'label1,label\\,2', '--ignore_site_file'],
1203 rpcs=[('get_hosts', {'multiple_labels': ['label1',
1204 'label,2']}, True,
1205 [{u'status': u'Running', u'lock_time': None,
1206 u'hostname': u'host1', u'locked': False,
1207 u'locked_by': None, u'invalid': False, u'id': 42,
1208 u'labels': [u'label1', u'label,2'], u'platform':
1209 u'Warp18_Diskfull', u'protection':
1210 u'Repair software only', u'dirty':
1211 True, u'synch_id': None}]),
1212 ('create_job', data, True, 42)],
1213 out_words_ok=['test_job0', 'Created'],
1214 out_words_no=['Uploading', 'Done'])
1215 file_temp.clean()
1218 def test_execute_create_job_with_label_escaping_and_duplicate_hosts(self):
1219 data = self.data.copy()
1220 data['hosts'] = ['host1', 'host0']
1221 file_temp = cli_mock.create_file(self.ctrl_file)
1222 self.run_cmd(argv=['atest', 'job', 'create', '-f', file_temp.name,
1223 'test_job0', '-m', 'host0,host1', '-b',
1224 'label1,label\\,2\\\\,label3', '--ignore_site_file'],
1225 rpcs=[('get_hosts', {'multiple_labels': ['label,2\\',
1226 'label1', 'label3']}, True,
1227 [{u'status': u'Running', u'lock_time': None,
1228 u'hostname': u'host1', u'locked': False,
1229 u'locked_by': None, u'invalid': False, u'id': 42,
1230 u'labels': [u'label1', u'label,2\\', u'label3'],
1231 u'platform': u'Warp18_Diskfull', u'protection':
1232 u'Repair software only', u'dirty':
1233 True, u'synch_id': None}]),
1234 ('create_job', data, True, 42)],
1235 out_words_ok=['test_job0', 'Created'],
1236 out_words_no=['Uploading', 'Done'])
1237 file_temp.clean()
1240 def _test_parse_hosts(self, args, exp_hosts=[], exp_meta_hosts=[]):
1241 testjob = job.job_create_or_clone()
1242 (hosts, meta_hosts) = testjob._parse_hosts(args)
1243 self.assertEqualNoOrder(hosts, exp_hosts)
1244 self.assertEqualNoOrder(meta_hosts, exp_meta_hosts)
1247 def test_parse_hosts_regular(self):
1248 self._test_parse_hosts(['host0'], ['host0'])
1251 def test_parse_hosts_regulars(self):
1252 self._test_parse_hosts(['host0', 'host1'], ['host0', 'host1'])
1255 def test_parse_hosts_meta_one(self):
1256 self._test_parse_hosts(['*meta0'], [], ['meta0'])
1259 def test_parse_hosts_meta_five(self):
1260 self._test_parse_hosts(['5*meta0'], [], ['meta0']*5)
1263 def test_parse_hosts_metas_five(self):
1264 self._test_parse_hosts(['5*meta0', '2*meta1'], [],
1265 ['meta0']*5 + ['meta1']*2)
1268 def test_parse_hosts_mix(self):
1269 self._test_parse_hosts(['5*meta0', 'host0', '2*meta1', 'host1',
1270 '*meta2'], ['host0', 'host1'],
1271 ['meta0']*5 + ['meta1']*2 + ['meta2'])
1274 class job_clone_unittest(cli_mock.cli_unittest):
1275 job_data = {'control_file': u'NAME = \'Server Sleeptest\'\nAUTHOR = \'mbligh@google.com (Martin Bligh)\'\nTIME = \'SHORT\'\nTEST_CLASS = \'Software\'\nTEST_CATEGORY = \'Functional\'\nTEST_TYPE = \'server\'\nEXPERIMENTAL = \'False\'\n\nDOC = """\nruns sleep for one second on the list of machines.\n"""\n\ndef run(machine):\n host = hosts.create_host(machine)\n job.run_test(\'sleeptest\')\n\njob.parallel_simple(run, machines)\n',
1276 'control_type': u'Server',
1277 'dependencies': [],
1278 'email_list': u'',
1279 'max_runtime_hrs': 480,
1280 'parse_failed_repair': True,
1281 'priority': u'Medium',
1282 'reboot_after': u'Always',
1283 'reboot_before': u'If dirty',
1284 'run_verify': True,
1285 'synch_count': 1,
1286 'timeout': 480}
1288 local_hosts = [{u'acls': [u'acl0'],
1289 u'atomic_group': None,
1290 u'attributes': {},
1291 u'dirty': False,
1292 u'hostname': u'host0',
1293 u'id': 8,
1294 u'invalid': False,
1295 u'labels': [u'label0', u'label1'],
1296 u'lock_time': None,
1297 u'locked': False,
1298 u'locked_by': None,
1299 u'other_labels': u'label0, label1',
1300 u'platform': u'plat0',
1301 u'protection': u'Repair software only',
1302 u'status': u'Ready',
1303 u'synch_id': None},
1304 {u'acls': [u'acl0'],
1305 u'atomic_group': None,
1306 u'attributes': {},
1307 u'dirty': False,
1308 u'hostname': u'host1',
1309 u'id': 9,
1310 u'invalid': False,
1311 u'labels': [u'label0', u'label1'],
1312 u'lock_time': None,
1313 u'locked': False,
1314 u'locked_by': None,
1315 u'other_labels': u'label0, label1',
1316 u'platform': u'plat0',
1317 u'protection': u'Repair software only',
1318 u'status': u'Ready',
1319 u'synch_id': None}]
1322 def setUp(self):
1323 super(job_clone_unittest, self).setUp()
1324 self.job_data_clone_info = copy.deepcopy(self.job_data)
1325 self.job_data_clone_info['created_on'] = '2009-07-23 16:21:29'
1326 self.job_data_clone_info['name'] = 'testing_clone'
1327 self.job_data_clone_info['id'] = 42
1328 self.job_data_clone_info['owner'] = 'user0'
1330 self.job_data_cloned = copy.deepcopy(self.job_data)
1331 self.job_data_cloned['name'] = 'cloned'
1332 self.job_data_cloned['hosts'] = [u'host0']
1333 self.job_data_cloned['meta_hosts'] = []
1336 def test_backward_compat(self):
1337 self.run_cmd(argv=['atest', 'job', 'create', '--clone', '42',
1338 '-r', 'cloned'],
1339 rpcs=[('get_info_for_clone', {'id': '42',
1340 'preserve_metahosts': True},
1341 True,
1342 {u'atomic_group_name': None,
1343 u'hosts': [{u'acls': [u'acl0'],
1344 u'atomic_group': None,
1345 u'attributes': {},
1346 u'dirty': False,
1347 u'hostname': u'host0',
1348 u'id': 4378,
1349 u'invalid': False,
1350 u'labels': [u'label0', u'label1'],
1351 u'lock_time': None,
1352 u'locked': False,
1353 u'locked_by': None,
1354 u'other_labels': u'label0, label1',
1355 u'platform': u'plat0',
1356 u'protection': u'Repair software only',
1357 u'status': u'Ready',
1358 u'synch_id': None}],
1359 u'job': self.job_data_clone_info,
1360 u'meta_host_counts': {}}),
1361 ('create_job', self.job_data_cloned, True, 43)],
1362 out_words_ok=['Created job', '43'])
1365 def test_clone_reuse_hosts(self):
1366 self.job_data_cloned['hosts'] = [u'host0', 'host1']
1367 self.run_cmd(argv=['atest', 'job', 'clone', '--id', '42',
1368 '-r', 'cloned'],
1369 rpcs=[('get_info_for_clone', {'id': '42',
1370 'preserve_metahosts': True},
1371 True,
1372 {u'atomic_group_name': None,
1373 u'hosts': self.local_hosts,
1374 u'job': self.job_data_clone_info,
1375 u'meta_host_counts': {}}),
1376 ('create_job', self.job_data_cloned, True, 43)],
1377 out_words_ok=['Created job', '43'])
1380 def test_clone_reuse_metahosts(self):
1381 self.job_data_cloned['hosts'] = []
1382 self.job_data_cloned['meta_hosts'] = ['type1']*4 + ['type0']
1383 self.run_cmd(argv=['atest', 'job', 'clone', '--id', '42',
1384 '-r', 'cloned'],
1385 rpcs=[('get_info_for_clone', {'id': '42',
1386 'preserve_metahosts': True},
1387 True,
1388 {u'atomic_group_name': None,
1389 u'hosts': [],
1390 u'job': self.job_data_clone_info,
1391 u'meta_host_counts': {u'type0': 1,
1392 u'type1': 4}}),
1393 ('create_job', self.job_data_cloned, True, 43)],
1394 out_words_ok=['Created job', '43'])
1397 def test_clone_reuse_both(self):
1398 self.job_data_cloned['hosts'] = [u'host0', 'host1']
1399 self.job_data_cloned['meta_hosts'] = ['type1']*4 + ['type0']
1400 self.run_cmd(argv=['atest', 'job', 'clone', '--id', '42',
1401 '-r', 'cloned'],
1402 rpcs=[('get_info_for_clone', {'id': '42',
1403 'preserve_metahosts': True},
1404 True,
1405 {u'atomic_group_name': None,
1406 u'hosts': self.local_hosts,
1407 u'job': self.job_data_clone_info,
1408 u'meta_host_counts': {u'type0': 1,
1409 u'type1': 4}}),
1410 ('create_job', self.job_data_cloned, True, 43)],
1411 out_words_ok=['Created job', '43'])
1414 def test_clone_no_hosts(self):
1415 self.run_cmd(argv=['atest', 'job', 'clone', '--id', '42', 'cloned'],
1416 exit_code=1,
1417 out_words_ok=['usage'],
1418 err_words_ok=['machine'])
1421 def test_clone_reuse_and_hosts(self):
1422 self.run_cmd(argv=['atest', 'job', 'clone', '--id', '42',
1423 '-r', '-m', 'host5', 'cloned'],
1424 exit_code=1,
1425 out_words_ok=['usage'],
1426 err_words_ok=['specify'])
1429 def test_clone_new_multiple_hosts(self):
1430 self.job_data_cloned['hosts'] = [u'host5', 'host4', 'host3']
1431 self.run_cmd(argv=['atest', 'job', 'clone', '--id', '42',
1432 '-m', 'host5,host4,host3', 'cloned'],
1433 rpcs=[('get_info_for_clone', {'id': '42',
1434 'preserve_metahosts': False},
1435 True,
1436 {u'atomic_group_name': None,
1437 u'hosts': self.local_hosts,
1438 u'job': self.job_data_clone_info,
1439 u'meta_host_counts': {}}),
1440 ('create_job', self.job_data_cloned, True, 43)],
1441 out_words_ok=['Created job', '43'])
1444 def test_clone_oth(self):
1445 self.job_data_cloned['hosts'] = []
1446 self.job_data_cloned['one_time_hosts'] = [u'host5']
1447 self.run_cmd(argv=['atest', 'job', 'clone', '--id', '42',
1448 '--one-time-hosts', 'host5', 'cloned'],
1449 rpcs=[('get_info_for_clone', {'id': '42',
1450 'preserve_metahosts': False},
1451 True,
1452 {u'atomic_group_name': None,
1453 u'hosts': self.local_hosts,
1454 u'job': self.job_data_clone_info,
1455 u'meta_host_counts': {}}),
1456 ('create_job', self.job_data_cloned, True, 43)],
1457 out_words_ok=['Created job', '43'])
1460 class job_abort_unittest(cli_mock.cli_unittest):
1461 results = [{u'status_counts': {u'Aborted': 1}, u'control_file':
1462 u"job.run_test('sleeptest')\n", u'name': u'test_job0',
1463 u'control_type': u'Server', u'priority':
1464 u'Medium', u'owner': u'user0', u'created_on':
1465 u'2008-07-08 17:45:44', u'synch_count': 2, u'id': 180}]
1467 def test_execute_job_abort(self):
1468 self.run_cmd(argv=['atest', 'job', 'abort', '180',
1469 '--ignore_site_file'],
1470 rpcs=[('abort_host_queue_entries',
1471 {'job__id__in': ['180']}, True, None)],
1472 out_words_ok=['Aborting', '180'])
1475 if __name__ == '__main__':
1476 unittest.main()