Add THP test variants to tests_base.cfg.sample
[autotest-zwu.git] / server / hosts / abstract_ssh.py
blob0f61391de2f5ca9425091e66420c7c3702090d70
1 import os, time, types, socket, shutil, glob, logging, traceback
2 from autotest_lib.client.common_lib import autotemp, error, logging_manager
3 from autotest_lib.server import utils, autotest
4 from autotest_lib.server.hosts import remote
5 from autotest_lib.client.common_lib.global_config import global_config
8 get_value = global_config.get_config_value
9 enable_master_ssh = get_value('AUTOSERV', 'enable_master_ssh', type=bool,
10 default=False)
13 def _make_ssh_cmd_default(user="root", port=22, opts='', hosts_file='/dev/null',
14 connect_timeout=30, alive_interval=300):
15 base_command = ("/usr/bin/ssh -a -x %s -o StrictHostKeyChecking=no "
16 "-o UserKnownHostsFile=%s -o BatchMode=yes "
17 "-o ConnectTimeout=%d -o ServerAliveInterval=%d "
18 "-l %s -p %d")
19 assert isinstance(connect_timeout, (int, long))
20 assert connect_timeout > 0 # can't disable the timeout
21 return base_command % (opts, hosts_file, connect_timeout,
22 alive_interval, user, port)
25 make_ssh_command = utils.import_site_function(
26 __file__, "autotest_lib.server.hosts.site_host", "make_ssh_command",
27 _make_ssh_cmd_default)
30 # import site specific Host class
31 SiteHost = utils.import_site_class(
32 __file__, "autotest_lib.server.hosts.site_host", "SiteHost",
33 remote.RemoteHost)
36 class AbstractSSHHost(SiteHost):
37 """
38 This class represents a generic implementation of most of the
39 framework necessary for controlling a host via ssh. It implements
40 almost all of the abstract Host methods, except for the core
41 Host.run method.
42 """
44 def _initialize(self, hostname, user="root", port=22, password="",
45 *args, **dargs):
46 super(AbstractSSHHost, self)._initialize(hostname=hostname,
47 *args, **dargs)
48 self.ip = socket.getaddrinfo(self.hostname, None)[0][4][0]
49 self.user = user
50 self.port = port
51 self.password = password
52 self._use_rsync = None
53 self.known_hosts_file = os.tmpfile()
54 known_hosts_fd = self.known_hosts_file.fileno()
55 self.known_hosts_fd = '/dev/fd/%s' % known_hosts_fd
57 """
58 Master SSH connection background job, socket temp directory and socket
59 control path option. If master-SSH is enabled, these fields will be
60 initialized by start_master_ssh when a new SSH connection is initiated.
61 """
62 self.master_ssh_job = None
63 self.master_ssh_tempdir = None
64 self.master_ssh_option = ''
67 def use_rsync(self):
68 if self._use_rsync is not None:
69 return self._use_rsync
71 # Check if rsync is available on the remote host. If it's not,
72 # don't try to use it for any future file transfers.
73 self._use_rsync = self._check_rsync()
74 if not self._use_rsync:
75 logging.warn("rsync not available on remote host %s -- disabled",
76 self.hostname)
77 return self._use_rsync
80 def _check_rsync(self):
81 """
82 Check if rsync is available on the remote host.
83 """
84 try:
85 self.run("rsync --version", stdout_tee=None, stderr_tee=None)
86 except error.AutoservRunError:
87 return False
88 return True
91 def _encode_remote_paths(self, paths, escape=True):
92 """
93 Given a list of file paths, encodes it as a single remote path, in
94 the style used by rsync and scp.
95 """
96 if escape:
97 paths = [utils.scp_remote_escape(path) for path in paths]
98 return '%s@%s:"%s"' % (self.user, self.hostname, " ".join(paths))
101 def _make_rsync_cmd(self, sources, dest, delete_dest, preserve_symlinks):
103 Given a list of source paths and a destination path, produces the
104 appropriate rsync command for copying them. Remote paths must be
105 pre-encoded.
107 ssh_cmd = make_ssh_command(user=self.user, port=self.port,
108 opts=self.master_ssh_option,
109 hosts_file=self.known_hosts_fd)
110 if delete_dest:
111 delete_flag = "--delete"
112 else:
113 delete_flag = ""
114 if preserve_symlinks:
115 symlink_flag = ""
116 else:
117 symlink_flag = "-L"
118 command = "rsync %s %s --timeout=1800 --rsh='%s' -az %s %s"
119 return command % (symlink_flag, delete_flag, ssh_cmd,
120 " ".join(sources), dest)
123 def _make_ssh_cmd(self, cmd):
125 Create a base ssh command string for the host which can be used
126 to run commands directly on the machine
128 base_cmd = make_ssh_command(user=self.user, port=self.port,
129 opts=self.master_ssh_option,
130 hosts_file=self.known_hosts_fd)
132 return '%s %s "%s"' % (base_cmd, self.hostname, utils.sh_escape(cmd))
134 def _make_scp_cmd(self, sources, dest):
136 Given a list of source paths and a destination path, produces the
137 appropriate scp command for encoding it. Remote paths must be
138 pre-encoded.
140 command = ("scp -rq %s -o StrictHostKeyChecking=no "
141 "-o UserKnownHostsFile=%s -P %d %s '%s'")
142 return command % (self.master_ssh_option, self.known_hosts_fd,
143 self.port, " ".join(sources), dest)
146 def _make_rsync_compatible_globs(self, path, is_local):
148 Given an rsync-style path, returns a list of globbed paths
149 that will hopefully provide equivalent behaviour for scp. Does not
150 support the full range of rsync pattern matching behaviour, only that
151 exposed in the get/send_file interface (trailing slashes).
153 The is_local param is flag indicating if the paths should be
154 interpreted as local or remote paths.
157 # non-trailing slash paths should just work
158 if len(path) == 0 or path[-1] != "/":
159 return [path]
161 # make a function to test if a pattern matches any files
162 if is_local:
163 def glob_matches_files(path, pattern):
164 return len(glob.glob(path + pattern)) > 0
165 else:
166 def glob_matches_files(path, pattern):
167 result = self.run("ls \"%s\"%s" % (utils.sh_escape(path),
168 pattern),
169 stdout_tee=None, ignore_status=True)
170 return result.exit_status == 0
172 # take a set of globs that cover all files, and see which are needed
173 patterns = ["*", ".[!.]*"]
174 patterns = [p for p in patterns if glob_matches_files(path, p)]
176 # convert them into a set of paths suitable for the commandline
177 if is_local:
178 return ["\"%s\"%s" % (utils.sh_escape(path), pattern)
179 for pattern in patterns]
180 else:
181 return [utils.scp_remote_escape(path) + pattern
182 for pattern in patterns]
185 def _make_rsync_compatible_source(self, source, is_local):
187 Applies the same logic as _make_rsync_compatible_globs, but
188 applies it to an entire list of sources, producing a new list of
189 sources, properly quoted.
191 return sum((self._make_rsync_compatible_globs(path, is_local)
192 for path in source), [])
195 def _set_umask_perms(self, dest):
197 Given a destination file/dir (recursively) set the permissions on
198 all the files and directories to the max allowed by running umask.
201 # now this looks strange but I haven't found a way in Python to _just_
202 # get the umask, apparently the only option is to try to set it
203 umask = os.umask(0)
204 os.umask(umask)
206 max_privs = 0777 & ~umask
208 def set_file_privs(filename):
209 file_stat = os.stat(filename)
211 file_privs = max_privs
212 # if the original file permissions do not have at least one
213 # executable bit then do not set it anywhere
214 if not file_stat.st_mode & 0111:
215 file_privs &= ~0111
217 os.chmod(filename, file_privs)
219 # try a bottom-up walk so changes on directory permissions won't cut
220 # our access to the files/directories inside it
221 for root, dirs, files in os.walk(dest, topdown=False):
222 # when setting the privileges we emulate the chmod "X" behaviour
223 # that sets to execute only if it is a directory or any of the
224 # owner/group/other already has execute right
225 for dirname in dirs:
226 os.chmod(os.path.join(root, dirname), max_privs)
228 for filename in files:
229 set_file_privs(os.path.join(root, filename))
232 # now set privs for the dest itself
233 if os.path.isdir(dest):
234 os.chmod(dest, max_privs)
235 else:
236 set_file_privs(dest)
239 def get_file(self, source, dest, delete_dest=False, preserve_perm=True,
240 preserve_symlinks=False):
242 Copy files from the remote host to a local path.
244 Directories will be copied recursively.
245 If a source component is a directory with a trailing slash,
246 the content of the directory will be copied, otherwise, the
247 directory itself and its content will be copied. This
248 behavior is similar to that of the program 'rsync'.
250 Args:
251 source: either
252 1) a single file or directory, as a string
253 2) a list of one or more (possibly mixed)
254 files or directories
255 dest: a file or a directory (if source contains a
256 directory or more than one element, you must
257 supply a directory dest)
258 delete_dest: if this is true, the command will also clear
259 out any old files at dest that are not in the
260 source
261 preserve_perm: tells get_file() to try to preserve the sources
262 permissions on files and dirs
263 preserve_symlinks: try to preserve symlinks instead of
264 transforming them into files/dirs on copy
266 Raises:
267 AutoservRunError: the scp command failed
270 # Start a master SSH connection if necessary.
271 self.start_master_ssh()
273 if isinstance(source, basestring):
274 source = [source]
275 dest = os.path.abspath(dest)
277 # If rsync is disabled or fails, try scp.
278 try_scp = True
279 if self.use_rsync():
280 try:
281 remote_source = self._encode_remote_paths(source)
282 local_dest = utils.sh_escape(dest)
283 rsync = self._make_rsync_cmd([remote_source], local_dest,
284 delete_dest, preserve_symlinks)
285 utils.run(rsync)
286 try_scp = False
287 except error.CmdError, e:
288 logging.warn("trying scp, rsync failed: %s" % e)
290 if try_scp:
291 # scp has no equivalent to --delete, just drop the entire dest dir
292 if delete_dest and os.path.isdir(dest):
293 shutil.rmtree(dest)
294 os.mkdir(dest)
296 remote_source = self._make_rsync_compatible_source(source, False)
297 if remote_source:
298 # _make_rsync_compatible_source() already did the escaping
299 remote_source = self._encode_remote_paths(remote_source,
300 escape=False)
301 local_dest = utils.sh_escape(dest)
302 scp = self._make_scp_cmd([remote_source], local_dest)
303 try:
304 utils.run(scp)
305 except error.CmdError, e:
306 raise error.AutoservRunError(e.args[0], e.args[1])
308 if not preserve_perm:
309 # we have no way to tell scp to not try to preserve the
310 # permissions so set them after copy instead.
311 # for rsync we could use "--no-p --chmod=ugo=rwX" but those
312 # options are only in very recent rsync versions
313 self._set_umask_perms(dest)
316 def send_file(self, source, dest, delete_dest=False,
317 preserve_symlinks=False):
319 Copy files from a local path to the remote host.
321 Directories will be copied recursively.
322 If a source component is a directory with a trailing slash,
323 the content of the directory will be copied, otherwise, the
324 directory itself and its content will be copied. This
325 behavior is similar to that of the program 'rsync'.
327 Args:
328 source: either
329 1) a single file or directory, as a string
330 2) a list of one or more (possibly mixed)
331 files or directories
332 dest: a file or a directory (if source contains a
333 directory or more than one element, you must
334 supply a directory dest)
335 delete_dest: if this is true, the command will also clear
336 out any old files at dest that are not in the
337 source
338 preserve_symlinks: controls if symlinks on the source will be
339 copied as such on the destination or transformed into the
340 referenced file/directory
342 Raises:
343 AutoservRunError: the scp command failed
346 # Start a master SSH connection if necessary.
347 self.start_master_ssh()
349 if isinstance(source, basestring):
350 source = [source]
351 remote_dest = self._encode_remote_paths([dest])
353 # If rsync is disabled or fails, try scp.
354 try_scp = True
355 if self.use_rsync():
356 try:
357 local_sources = [utils.sh_escape(path) for path in source]
358 rsync = self._make_rsync_cmd(local_sources, remote_dest,
359 delete_dest, preserve_symlinks)
360 utils.run(rsync)
361 try_scp = False
362 except error.CmdError, e:
363 logging.warn("trying scp, rsync failed: %s" % e)
365 if try_scp:
366 # scp has no equivalent to --delete, just drop the entire dest dir
367 if delete_dest:
368 is_dir = self.run("ls -d %s/" % dest,
369 ignore_status=True).exit_status == 0
370 if is_dir:
371 cmd = "rm -rf %s && mkdir %s"
372 cmd %= (dest, dest)
373 self.run(cmd)
375 local_sources = self._make_rsync_compatible_source(source, True)
376 if local_sources:
377 scp = self._make_scp_cmd(local_sources, remote_dest)
378 try:
379 utils.run(scp)
380 except error.CmdError, e:
381 raise error.AutoservRunError(e.args[0], e.args[1])
384 def ssh_ping(self, timeout=60):
385 try:
386 self.run("true", timeout=timeout, connect_timeout=timeout)
387 except error.AutoservSSHTimeout:
388 msg = "Host (ssh) verify timed out (timeout = %d)" % timeout
389 raise error.AutoservSSHTimeout(msg)
390 except error.AutoservSshPermissionDeniedError:
391 #let AutoservSshPermissionDeniedError be visible to the callers
392 raise
393 except error.AutoservRunError, e:
394 # convert the generic AutoservRunError into something more
395 # specific for this context
396 raise error.AutoservSshPingHostError(e.description + '\n' +
397 repr(e.result_obj))
400 def is_up(self):
402 Check if the remote host is up.
404 @returns True if the remote host is up, False otherwise
406 try:
407 self.ssh_ping()
408 except error.AutoservError:
409 return False
410 else:
411 return True
414 def wait_up(self, timeout=None):
416 Wait until the remote host is up or the timeout expires.
418 In fact, it will wait until an ssh connection to the remote
419 host can be established, and getty is running.
421 @param timeout time limit in seconds before returning even
422 if the host is not up.
424 @returns True if the host was found to be up, False otherwise
426 if timeout:
427 end_time = time.time() + timeout
429 while not timeout or time.time() < end_time:
430 if self.is_up():
431 try:
432 if self.are_wait_up_processes_up():
433 logging.debug('Host %s is now up', self.hostname)
434 return True
435 except error.AutoservError:
436 pass
437 time.sleep(1)
439 logging.debug('Host %s is still down after waiting %d seconds',
440 self.hostname, int(timeout + time.time() - end_time))
441 return False
444 def wait_down(self, timeout=None, warning_timer=None, old_boot_id=None):
446 Wait until the remote host is down or the timeout expires.
448 If old_boot_id is provided, this will wait until either the machine
449 is unpingable or self.get_boot_id() returns a value different from
450 old_boot_id. If the boot_id value has changed then the function
451 returns true under the assumption that the machine has shut down
452 and has now already come back up.
454 If old_boot_id is None then until the machine becomes unreachable the
455 method assumes the machine has not yet shut down.
457 @param timeout Time limit in seconds before returning even
458 if the host is still up.
459 @param warning_timer Time limit in seconds that will generate
460 a warning if the host is not down yet.
461 @param old_boot_id A string containing the result of self.get_boot_id()
462 prior to the host being told to shut down. Can be None if this is
463 not available.
465 @returns True if the host was found to be down, False otherwise
467 #TODO: there is currently no way to distinguish between knowing
468 #TODO: boot_id was unsupported and not knowing the boot_id.
469 current_time = time.time()
470 if timeout:
471 end_time = current_time + timeout
473 if warning_timer:
474 warn_time = current_time + warning_timer
476 if old_boot_id is not None:
477 logging.debug('Host %s pre-shutdown boot_id is %s',
478 self.hostname, old_boot_id)
480 while not timeout or current_time < end_time:
481 try:
482 new_boot_id = self.get_boot_id()
483 except error.AutoservError:
484 logging.debug('Host %s is now unreachable over ssh, is down',
485 self.hostname)
486 return True
487 else:
488 # if the machine is up but the boot_id value has changed from
489 # old boot id, then we can assume the machine has gone down
490 # and then already come back up
491 if old_boot_id is not None and old_boot_id != new_boot_id:
492 logging.debug('Host %s now has boot_id %s and so must '
493 'have rebooted', self.hostname, new_boot_id)
494 return True
496 if warning_timer and current_time > warn_time:
497 self.record("WARN", None, "shutdown",
498 "Shutdown took longer than %ds" % warning_timer)
499 # Print the warning only once.
500 warning_timer = None
501 # If a machine is stuck switching runlevels
502 # This may cause the machine to reboot.
503 self.run('kill -HUP 1', ignore_status=True)
505 time.sleep(1)
506 current_time = time.time()
508 return False
511 # tunable constants for the verify & repair code
512 AUTOTEST_GB_DISKSPACE_REQUIRED = get_value("SERVER",
513 "gb_diskspace_required",
514 type=int,
515 default=20)
518 def verify_connectivity(self):
519 super(AbstractSSHHost, self).verify_connectivity()
521 logging.info('Pinging host ' + self.hostname)
522 self.ssh_ping()
523 logging.info("Host (ssh) %s is alive", self.hostname)
525 if self.is_shutting_down():
526 raise error.AutoservHostIsShuttingDownError("Host is shutting down")
529 def verify_software(self):
530 super(AbstractSSHHost, self).verify_software()
531 try:
532 self.check_diskspace(autotest.Autotest.get_install_dir(self),
533 self.AUTOTEST_GB_DISKSPACE_REQUIRED)
534 except error.AutoservHostError:
535 raise # only want to raise if it's a space issue
536 except autotest.AutodirNotFoundError:
537 # autotest dir may not exist, etc. ignore
538 logging.debug('autodir space check exception, this is probably '
539 'safe to ignore\n' + traceback.format_exc())
542 def close(self):
543 super(AbstractSSHHost, self).close()
544 self._cleanup_master_ssh()
545 self.known_hosts_file.close()
548 def _cleanup_master_ssh(self):
550 Release all resources (process, temporary directory) used by an active
551 master SSH connection.
553 # If a master SSH connection is running, kill it.
554 if self.master_ssh_job is not None:
555 utils.nuke_subprocess(self.master_ssh_job.sp)
556 self.master_ssh_job = None
558 # Remove the temporary directory for the master SSH socket.
559 if self.master_ssh_tempdir is not None:
560 self.master_ssh_tempdir.clean()
561 self.master_ssh_tempdir = None
562 self.master_ssh_option = ''
565 def start_master_ssh(self):
567 Called whenever a slave SSH connection needs to be initiated (e.g., by
568 run, rsync, scp). If master SSH support is enabled and a master SSH
569 connection is not active already, start a new one in the background.
570 Also, cleanup any zombie master SSH connections (e.g., dead due to
571 reboot).
573 if not enable_master_ssh:
574 return
576 # If a previously started master SSH connection is not running
577 # anymore, it needs to be cleaned up and then restarted.
578 if self.master_ssh_job is not None:
579 if self.master_ssh_job.sp.poll() is not None:
580 logging.info("Master ssh connection to %s is down.",
581 self.hostname)
582 self._cleanup_master_ssh()
584 # Start a new master SSH connection.
585 if self.master_ssh_job is None:
586 # Create a shared socket in a temp location.
587 self.master_ssh_tempdir = autotemp.tempdir(unique_id='ssh-master')
588 self.master_ssh_option = ("-o ControlPath=%s/socket" %
589 self.master_ssh_tempdir.name)
591 # Start the master SSH connection in the background.
592 master_cmd = self.ssh_command(options="-N -o ControlMaster=yes")
593 logging.info("Starting master ssh connection '%s'" % master_cmd)
594 self.master_ssh_job = utils.BgJob(master_cmd)
597 def clear_known_hosts(self):
598 """Clears out the temporary ssh known_hosts file.
600 This is useful if the test SSHes to the machine, then reinstalls it,
601 then SSHes to it again. It can be called after the reinstall to
602 reduce the spam in the logs.
604 logging.info("Clearing known hosts for host '%s', file '%s'.",
605 self.hostname, self.known_hosts_fd)
606 # Clear out the file by opening it for writing and then closing.
607 fh = open(self.known_hosts_fd, "w")
608 fh.close()