Remove M2Crypto and replace with cryptography module
[unleashed-pkg5.git] / src / modules / client / publisher.py
blob247d1e6d2e576f80bc42a0d28b93b93c78de2c5a
1 #!/usr/bin/python2.7
3 # CDDL HEADER START
5 # The contents of this file are subject to the terms of the
6 # Common Development and Distribution License (the "License").
7 # You may not use this file except in compliance with the License.
9 # You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
10 # or http://www.opensolaris.org/os/licensing.
11 # See the License for the specific language governing permissions
12 # and limitations under the License.
14 # When distributing Covered Code, include this CDDL HEADER in each
15 # file and include the License file at usr/src/OPENSOLARIS.LICENSE.
16 # If applicable, add the following below this CDDL HEADER, with the
17 # fields enclosed by brackets "[]" replaced with your own identifying
18 # information: Portions Copyright [yyyy] [name of copyright owner]
20 # CDDL HEADER END
24 # Copyright (c) 2009, 2015, Oracle and/or its affiliates. All rights reserved.
28 # NOTE: Any changes to this file are considered a change in client api
29 # interfaces and must be fully documented in doc/client_api_versions.txt
30 # if they are visible changes to the public interfaces provided.
32 # This also means that changes to the interfaces here must be reflected in
33 # the client version number and compatible_versions specifier found in
34 # modules/client/api.py:__init__.
37 import calendar
38 import collections
39 import copy
40 import cStringIO
41 import datetime as dt
42 import errno
43 import hashlib
44 import os
45 import pycurl
46 import shutil
47 import tempfile
48 import time
49 import urllib
50 import urlparse
51 import uuid
53 from pkg.client import global_settings
54 from pkg.client.debugvalues import DebugValues
55 logger = global_settings.logger
56 from cryptography import x509
57 from cryptography.hazmat.backends import default_backend
58 from cryptography.hazmat.primitives import serialization
59 from cryptography.hazmat.primitives.asymmetric import padding
61 import pkg.catalog
62 import pkg.client.api_errors as api_errors
63 import pkg.client.sigpolicy as sigpolicy
64 import pkg.client.pkgdefs as pkgdefs
65 import pkg.digest as digest
66 import pkg.misc as misc
67 import pkg.portable as portable
68 import pkg.server.catalog as old_catalog
70 from pkg.misc import EmptyDict, EmptyI, SIGNATURE_POLICY, DictProperty, \
71 PKG_RO_FILE_MODE
73 # The "core" type indicates that a repository contains all of the dependencies
74 # declared by packages in the repository. It is primarily used for operating
75 # system repositories.
76 REPO_CTYPE_CORE = "core"
78 # The "supplemental" type indicates that a repository contains packages that
79 # rely on or are intended to be used with packages located in another
80 # repository.
81 REPO_CTYPE_SUPPLEMENTAL = "supplemental"
83 # Mapping of constant values to names (in the event these ever get changed to
84 # numeric values or it is decided they need "prettier" or different labels).
85 REPO_COLLECTION_TYPES = {
86 REPO_CTYPE_CORE: "core",
87 REPO_CTYPE_SUPPLEMENTAL: "supplemental",
90 # Supported Protocol Schemes
91 SUPPORTED_SCHEMES = set(("file", "http", "https"))
92 SUPPORTED_PROXY_SCHEMES = ("http")
94 # SSL Protocol Schemes
95 SSL_SCHEMES = set(("https",))
97 # Supported RepositoryURI sorting policies.
98 URI_SORT_PRIORITY = "priority"
100 # Sort policy mapping.
101 URI_SORT_POLICIES = {
102 URI_SORT_PRIORITY: lambda obj: (obj.priority, obj.uri),
105 # The strings in the value field refer to the boolean properties of the
106 # Cryptography extension classes. If a property has a value True set, it means
107 # this property is added as an extension value in the certificate generation,
108 # and vice versa.
109 EXTENSIONS_VALUES = {
110 x509.BasicConstraints: ["ca", "path_length"],
111 x509.KeyUsage: ["digital_signature", "content_commitment",
112 "key_encipherment", "data_encipherment", "key_agreement", "key_cert_sign",
113 "crl_sign", "encipher_only", "decipher_only"]
116 # Only listed extension values (properties) here can have a value True set in a
117 # certificate extension; any other properties with a value True set will be
118 # treated as unsupported.
119 SUPPORTED_EXTENSION_VALUES = {
120 x509.BasicConstraints: ("ca", "path_length"),
121 x509.KeyUsage: ("digital_signature", "key_cert_sign", "crl_sign")
124 # These dictionaries map uses into their extensions.
125 CODE_SIGNING_USE = {
126 x509.KeyUsage: ["digital_signature"],
129 CERT_SIGNING_USE = {
130 x509.BasicConstraints: ["ca"],
131 x509.KeyUsage: ["key_cert_sign"],
134 CRL_SIGNING_USE = {
135 x509.KeyUsage: ["crl_sign"],
138 POSSIBLE_USES = [CODE_SIGNING_USE, CERT_SIGNING_USE, CRL_SIGNING_USE]
140 # A special token used in place of the system repository URL which is
141 # replaced at runtime by the actual address and port of the
142 # system-repository.
143 SYSREPO_PROXY = "<sysrepo>"
145 class RepositoryURI(object):
146 """Class representing a repository URI and any transport-related
147 information."""
149 # These properties are declared here so that they show up in the pydoc
150 # documentation as private, and for clarity in the property declarations
151 # found near the end of the class definition.
152 __priority = None
153 __proxies = None
154 __ssl_cert = None
155 __ssl_key = None
156 __trailing_slash = None
157 __uri = None
159 # Used to store the id of the original object this one was copied
160 # from during __copy__.
161 _source_object_id = None
163 def __init__(self, uri, priority=None, ssl_cert=None, ssl_key=None,
164 trailing_slash=True, proxy=None, system=False, proxies=None):
167 # Must set first.
168 self.__trailing_slash = trailing_slash
169 self.__scheme = None
170 self.__netloc = None
171 self.__proxies = []
173 # Note that the properties set here are intentionally lacking
174 # the '__' prefix which means assignment will occur using the
175 # get/set methods declared for the property near the end of
176 # the class definition.
177 self.priority = priority
178 self.uri = uri
179 self.ssl_cert = ssl_cert
180 self.ssl_key = ssl_key
181 # The proxy parameter is deprecated and remains for backwards
182 # compatibity, for now. If we get given both, then we must
183 # complain - this error is for internal use only.
184 if proxy and proxies:
185 raise api_errors.PublisherError("Both 'proxies' and "
186 "'proxy' values were used to create a "
187 "RepositoryURI object.")
189 if proxy:
190 self.proxies = [ProxyURI(proxy)]
191 if proxies:
192 self.proxies = proxies
193 self.system = system
195 def __copy__(self):
196 uri = RepositoryURI(self.__uri, priority=self.__priority,
197 ssl_cert=self.__ssl_cert, ssl_key=self.__ssl_key,
198 trailing_slash=self.__trailing_slash,
199 proxies=self.__proxies, system=self.system)
200 uri._source_object_id = id(self)
201 return uri
203 def __eq__(self, other):
204 if isinstance(other, RepositoryURI):
205 return self.uri == other.uri
206 if isinstance(other, str):
207 return self.uri == other
208 return False
210 def __ne__(self, other):
211 if isinstance(other, RepositoryURI):
212 return self.uri != other.uri
213 if isinstance(other, str):
214 return self.uri != other
215 return True
217 def __cmp__(self, other):
218 if not other:
219 return 1
220 if not isinstance(other, RepositoryURI):
221 other = RepositoryURI(other)
222 return cmp(self.uri, other.uri)
224 def __set_priority(self, value):
225 if value is not None:
226 try:
227 value = int(value)
228 except (TypeError, ValueError):
229 raise api_errors.BadRepositoryURIPriority(value)
230 self.__priority = value
232 def __get_proxy(self):
233 if not self.__proxies:
234 return None
235 else:
236 return self.__proxies[0].uri
238 def __set_proxy(self, proxy):
239 if not proxy:
240 return
241 if not isinstance(proxy, ProxyURI):
242 p = ProxyURI(proxy)
243 else:
244 p = proxy
246 self.__proxies = [p]
248 def __set_proxies(self, proxies):
250 for proxy in proxies:
251 if not isinstance(proxy, ProxyURI):
252 raise api_errors.BadRepositoryAttributeValue(
253 "proxies", value=proxy)
255 if proxies and self.scheme == "file":
256 raise api_errors.UnsupportedRepositoryURIAttribute(
257 "proxies", scheme=self.scheme)
259 if not (isinstance(proxies, list) or
260 isinstance(proxies, tuple)):
261 raise api_errors.BadRepositoryAttributeValue(
262 "proxies", value=proxies)
264 # for now, we only support a single proxy per RepositoryURI
265 if len(proxies) > 1:
266 raise api_errors.BadRepositoryAttributeValue(
267 "proxies", value=proxies)
269 if proxies:
270 self.__proxies = proxies
271 else:
272 self.__proxies = []
274 def __set_ssl_cert(self, filename):
275 if self.scheme not in SSL_SCHEMES and filename:
276 raise api_errors.UnsupportedRepositoryURIAttribute(
277 "ssl_cert", scheme=self.scheme)
278 if filename:
279 if not isinstance(filename, basestring):
280 raise api_errors.BadRepositoryAttributeValue(
281 "ssl_cert", value=filename)
282 filename = os.path.normpath(filename)
283 if filename == "":
284 filename = None
285 self.__ssl_cert = filename
287 def __set_ssl_key(self, filename):
288 if self.scheme not in SSL_SCHEMES and filename:
289 raise api_errors.UnsupportedRepositoryURIAttribute(
290 "ssl_key", scheme=self.scheme)
291 if filename:
292 if not isinstance(filename, basestring):
293 raise api_errors.BadRepositoryAttributeValue(
294 "ssl_key", value=filename)
295 filename = os.path.normpath(filename)
296 if filename == "":
297 filename = None
298 self.__ssl_key = filename
300 def __set_trailing_slash(self, value):
301 if value not in (True, False):
302 raise api_errors.BadRepositoryAttributeValue(
303 "trailing_slash", value=value)
304 self.__trailing_slash = value
306 def __set_uri(self, uri):
307 if uri is None:
308 raise api_errors.BadRepositoryURI(uri)
310 # if we're setting the URI to an existing value, do nothing.
311 if uri == self.__uri:
312 return
314 # This is not ideal, but determining whether we're operating
315 # on a ProxyURI saves us duplicating code in that class,
316 # which we would otherwise need, due to __protected members
317 # here.
318 if isinstance(self, ProxyURI):
319 is_proxy = True
320 else:
321 is_proxy = False
323 # Decompose URI to verify attributes.
324 scheme, netloc, path, params, query = \
325 urlparse.urlsplit(uri, allow_fragments=0)
327 self.__scheme = scheme.lower()
328 self.__netloc = netloc
330 # The set of currently supported protocol schemes.
331 if is_proxy and self.__scheme not in \
332 SUPPORTED_PROXY_SCHEMES:
333 raise api_errors.UnsupportedProxyURI(uri)
334 else:
335 if self.__scheme not in SUPPORTED_SCHEMES:
336 raise api_errors.UnsupportedRepositoryURI(uri)
338 # XXX valid_pub_url's check isn't quite right and could prevent
339 # usage of IDNs (international domain names).
340 if (self.__scheme.startswith("http") and not netloc) or \
341 not misc.valid_pub_url(uri, proxy=is_proxy):
342 raise api_errors.BadRepositoryURI(uri)
344 if self.__scheme == "file" and netloc:
345 raise api_errors.BadRepositoryURI(uri)
347 # Normalize URI scheme.
348 uri = uri.replace(scheme, self.__scheme, 1)
350 if self.__trailing_slash:
351 uri = uri.rstrip("/")
352 uri = misc.url_affix_trailing_slash(uri)
354 if self.__scheme not in SSL_SCHEMES:
355 self.__ssl_cert = None
356 self.__ssl_key = None
358 self.__uri = uri
360 def _override_uri(self, uri):
361 """Allow the __uri field of the object to be overridden in
362 special cases."""
363 if uri not in [None, SYSREPO_PROXY]:
364 raise api_errors.BadRepositoryURI(uri)
365 self.__uri = uri
367 def __str__(self):
368 return str(self.__uri)
370 def change_scheme(self, new_scheme):
371 """Change the scheme of this uri."""
373 assert self.__uri
374 scheme, netloc, path, params, query, fragment = \
375 urlparse.urlparse(self.__uri, allow_fragments=False)
376 if new_scheme == scheme:
377 return
378 self.uri = urlparse.urlunparse(
379 (new_scheme, netloc, path, params, query, fragment))
381 def get_host(self):
382 """Get the host and port of this URI if it's a http uri."""
384 scheme, netloc, path, params, query, fragment = \
385 urlparse.urlparse(self.__uri, allow_fragments=0)
386 if scheme != "file":
387 return netloc
388 return ""
390 def get_pathname(self):
391 """Returns the URI path as a pathname if the URI is a file
392 URI or '' otherwise."""
394 scheme, netloc, path, params, query, fragment = \
395 urlparse.urlparse(self.__uri, allow_fragments=0)
396 if scheme == "file":
397 return urllib.url2pathname(path)
398 return ""
400 ssl_cert = property(lambda self: self.__ssl_cert, __set_ssl_cert, None,
401 "The absolute pathname of a PEM-encoded SSL certificate file.")
403 ssl_key = property(lambda self: self.__ssl_key, __set_ssl_key, None,
404 "The absolute pathname of a PEM-encoded SSL key file.")
406 uri = property(lambda self: self.__uri, __set_uri, None,
407 "The URI used to access a repository.")
409 priority = property(lambda self: self.__priority, __set_priority, None,
410 "An integer value representing the importance of this repository "
411 "URI relative to others.")
413 proxy = property(__get_proxy, __set_proxy, None, "The proxy to use to "
414 "access this repository.")
416 proxies = property(lambda self: self.__proxies, __set_proxies, None,
417 "A list of proxies that can be used to access this repository."
418 "At runtime, a $http_proxy environment variable might override this."
421 @property
422 def scheme(self):
423 """The URI scheme."""
424 if not self.__uri:
425 return ""
426 return urlparse.urlsplit(self.__uri, allow_fragments=0)[0]
428 trailing_slash = property(lambda self: self.__trailing_slash,
429 __set_trailing_slash, None,
430 "A boolean value indicating whether any URI provided for this "
431 "object should have a trailing slash appended when setting the "
432 "URI property.")
435 class ProxyURI(RepositoryURI):
436 """A class to represent the URI of a proxy. The 'uri' value can be
437 'None' if 'system' is set to True."""
439 def __init__(self, uri, system=False):
440 self.__system = None
441 self.system = system
442 if not system:
443 self.uri = uri
445 def __set_system(self, value):
446 """A property to specify whether we should use the system
447 publisher as the proxy. Note that this method modifies the
448 'uri' property when set or cleared."""
449 if value not in (True, False):
450 raise api_errors.BadRepositoryAttributeValue(
451 "system", value=value)
452 self.__system = value
453 if value:
454 # Set a special value for the uri, intentionally an
455 # invalid URI which should get caught by any consumers
456 # using it by mistake. This also allows us to reuse
457 # the __eq__, __cmp__, etc. methods from the parent
458 # (where there is no public way of setting the URI to
459 # SYSREPO_PROXY, '<sysrepo>')
460 self._override_uri(SYSREPO_PROXY)
461 else:
462 self._override_uri(None)
464 def __unsupported(self, value):
465 """A method used to prevent certain properties defined in the
466 parent class from being set on ProxyURI objects."""
468 # We don't expect this string to be exposed to users.
469 raise ValueError("This property cannot be set to {0} on a "
470 "ProxyURI object.".format(value))
472 system = property(lambda self: self.__system, __set_system, None,
473 "True, if we should use the system publisher as a proxy.")
475 # Ensure we can't set any of the following properties.
476 proxies = property(lambda self: None, __unsupported, None,
477 "proxies is an invalid property for ProxyURI properties")
479 ssl_cert = property(lambda self: None, __unsupported, None,
480 "ssl_cert is an invalid property for ProxyURI properties")
482 ssl_key = property(lambda self: None, __unsupported, None,
483 "ssl_key is an invalid property for ProxyURI properties")
485 priority = property(lambda self: None, __unsupported, None,
486 "priority is an invalid property for ProxyURI properties")
488 trailing_slash = property(lambda self: None, __unsupported, None,
489 "trailing_slash is an invalid property for ProxyURI properties")
492 class TransportRepoURI(RepositoryURI):
493 """A TransportRepoURI allows for multiple representations of a given
494 RepositoryURI, each with different properties.
496 One RepositoryURI could be represented by several TransportRepoURIs,
497 used to allow the transport to properly track repo statistics for
498 for each discrete path to a given URI, perhaps using different proxies
499 or trying one of several SSL key/cert pairs."""
501 def __init__(self, uri, priority=None, ssl_cert=None, ssl_key=None,
502 trailing_slash=True, proxy=None, system=False):
503 # Must set first.
504 self.__proxy = None
505 self.__runtime_proxy = None
506 self.proxy = proxy
508 RepositoryURI.__init__(self, uri, priority=priority,
509 ssl_cert=ssl_cert, ssl_key=ssl_key,
510 trailing_slash=trailing_slash, system=system)
512 def __eq__(self, other):
513 if isinstance(other, TransportRepoURI):
514 return self.uri == other.uri and \
515 self.proxy == other.proxy
516 if isinstance(other, basestring):
517 return self.uri == other and self.proxy == None
518 return False
520 def __ne__(self, other):
521 if isinstance(other, TransportRepoURI):
522 return self.uri != other.uri or \
523 self.proxy != other.proxy
524 if isinstance(other, basestring):
525 return self.uri != other or self.proxy != None
526 return True
528 def __cmp__(self, other):
529 if not other:
530 return 1
531 if isinstance(other, basestring):
532 other = TransportRepoURI(other)
533 elif not isinstance(other, TransportRepoURI):
534 return 1
535 res = cmp(self.uri, other.uri)
536 if res == 0:
537 return cmp(self.proxy, other.proxy)
538 else:
539 return res
541 def key(self):
542 """Returns a value that can be used to identify this RepoURI
543 uniquely for the transport system. Normally, this would be done
544 using __hash__() however, TransportRepoURI objects are not
545 guaranteed to be immutable.
547 The key is a (uri, proxy) tuple, where the proxy is
548 the proxy used to reach that URI. Note that in the transport
549 system, we may choose to override the proxy value here.
551 If this key format changes, a corresponding change should be
552 made in pkg.client.transport.engine.__cleanup_requests(..)"""
554 u = self.uri
555 p = self.__proxy
557 if self.uri:
558 u = self.uri.rstrip("/")
559 return (u, p)
561 def __set_proxy(self, proxy):
562 assert not self.ssl_cert
563 assert not self.ssl_key
565 if proxy and self.scheme == "file":
566 raise api_errors.UnsupportedRepositoryURIAttribute(
567 "proxy", scheme=self.scheme)
568 if proxy:
569 self.__proxy = proxy.rstrip("/")
570 else:
571 self.__proxy = None
572 # Changing the proxy value causes us to clear any cached
573 # value we have in __runtime_proxy.
574 self.__runtime_proxy = None
576 def __get_runtime_proxy(self):
577 """Returns the proxy that should be used at runtime, which may
578 differ from the persisted proxy value. We check for http_proxy,
579 https_proxy and all_proxy OS environment variables.
581 To avoid repeated environment lookups, we cache the results."""
583 # we don't permit the proxy used by system publishers to be
584 # overridden by environment variables.
585 if self.system:
586 return self.proxy
588 if not self.__runtime_proxy:
589 self.__runtime_proxy = misc.get_runtime_proxy(
590 self.__proxy, self.uri)
591 return self.__runtime_proxy
593 def __set_runtime_proxy(self, runtime_proxy):
594 """The runtime proxy value is always computed dynamically,
595 we should not allow a caller to set it."""
597 assert False, "Refusing to set a runtime_proxy value."
599 @staticmethod
600 def fromrepouri(repouri):
601 """Build a list of TransportRepositoryURI objects using
602 properties from the given RepositoryURI, 'repouri'.
604 This is to allow the transport to try different paths to
605 a given RepositoryURI, if more than one is possible."""
607 trans_repouris = []
608 # we just use the proxies for now, but in future, we may want
609 # other per-origin/mirror properties
610 if repouri.proxies:
611 for p in repouri.proxies:
612 t = TransportRepoURI(repouri.uri,
613 priority=repouri.priority,
614 ssl_cert=repouri.ssl_cert,
615 ssl_key=repouri.ssl_key,
616 system=repouri.system,
617 trailing_slash=repouri.trailing_slash,
618 proxy=p.uri)
619 trans_repouris.append(t)
620 else:
621 trans_repouris.append(TransportRepoURI(repouri.uri,
622 priority=repouri.priority,
623 ssl_cert=repouri.ssl_cert,
624 ssl_key=repouri.ssl_key,
625 system=repouri.system,
626 trailing_slash=repouri.trailing_slash))
627 return trans_repouris
629 proxy = property(lambda self: self.__proxy, __set_proxy, None,
630 "The proxy that is used to access this repository."
631 "At runtime, a $http_proxy environnent variable might override this."
634 runtime_proxy = property(__get_runtime_proxy, __set_runtime_proxy, None,
635 "The proxy to use to access this repository. This value checks"
636 "OS environment variables, and expands any $user:$password values.")
639 class Repository(object):
640 """Class representing a repository object.
642 A repository object represents a location where clients can publish
643 and retrieve package content and/or metadata. It has the following
644 characteristics:
646 - may have one or more origins (URIs) for publication and
647 retrieval of package metadata and content.
649 - may have zero or more mirrors (URIs) for retrieval of package
650 content."""
652 # These properties are declared here so that they show up in the pydoc
653 # documentation as private, and for clarity in the property declarations
654 # found near the end of the class definition.
655 __collection_type = None
656 __legal_uris = []
657 __mirrors = []
658 __origins = []
659 __refresh_seconds = None
660 __registration_uri = None
661 __related_uris = []
662 __sort_policy = URI_SORT_PRIORITY
664 # Used to store the id of the original object this one was copied
665 # from during __copy__.
666 _source_object_id = None
668 name = None
669 description = None
670 registered = False
672 def __init__(self, collection_type=REPO_CTYPE_CORE, description=None,
673 legal_uris=None, mirrors=None, name=None, origins=None,
674 refresh_seconds=None, registered=False, registration_uri=None,
675 related_uris=None, sort_policy=URI_SORT_PRIORITY):
676 """Initializes a repository object.
678 'collection_type' is an optional constant value indicating the
679 type of packages in the repository.
681 'description' is an optional string value containing a
682 descriptive paragraph for the repository.
684 'legal_uris' should be a list of RepositoryURI objects or URI
685 strings indicating where licensing, legal, and terms of service
686 information for the repository can be found.
688 'mirrors' is an optional list of RepositoryURI objects or URI
689 strings indicating where package content can be retrieved.
691 'name' is an optional, short, descriptive name for the
692 repository.
694 'origins' should be a list of RepositoryURI objects or URI
695 strings indicating where package metadata can be retrieved.
697 'refresh_seconds' is an optional integer value indicating the
698 number of seconds clients should wait before refreshing cached
699 repository catalog or repository metadata information.
701 'registered' is an optional boolean value indicating whether
702 a client has registered with the repository's publisher.
704 'registration_uri' is an optional RepositoryURI object or a URI
705 string indicating a location clients can use to register or
706 obtain credentials needed to access the repository.
708 'related_uris' is an optional list of RepositoryURI objects or a
709 list of URI strings indicating the location of related
710 repositories that a client may be interested in.
712 'sort_policy' is an optional constant value indicating how
713 legal_uris, mirrors, origins, and related_uris should be
714 sorted."""
716 # Note that the properties set here are intentionally lacking
717 # the '__' prefix which means assignment will occur using the
718 # get/set methods declared for the property near the end of
719 # the class definition.
721 # Must be set first so that it will apply to attributes set
722 # afterwards.
723 self.sort_policy = sort_policy
725 self.collection_type = collection_type
726 self.description = description
727 self.legal_uris = legal_uris
728 self.mirrors = mirrors
729 self.name = name
730 self.origins = origins
731 self.refresh_seconds = refresh_seconds
732 self.registered = registered
733 self.registration_uri = registration_uri
734 self.related_uris = related_uris
736 def __add_uri(self, attr, uri, dup_check=None, priority=None,
737 ssl_cert=None, ssl_key=None, trailing_slash=True):
738 if not isinstance(uri, RepositoryURI):
739 uri = RepositoryURI(uri, priority=priority,
740 ssl_cert=ssl_cert, ssl_key=ssl_key,
741 trailing_slash=trailing_slash)
743 if dup_check:
744 dup_check(uri)
746 ulist = getattr(self, attr)
747 ulist.append(uri)
748 ulist.sort(key=URI_SORT_POLICIES[self.__sort_policy])
750 def __copy__(self):
751 cluris = [copy.copy(u) for u in self.legal_uris]
752 cmirrors = [copy.copy(u) for u in self.mirrors]
753 cruris = [copy.copy(u) for u in self.related_uris]
754 corigins = [copy.copy(u) for u in self.origins]
756 repo = Repository(collection_type=self.collection_type,
757 description=self.description,
758 legal_uris=cluris,
759 mirrors=cmirrors, name=self.name,
760 origins=corigins,
761 refresh_seconds=self.refresh_seconds,
762 registered=self.registered,
763 registration_uri=copy.copy(self.registration_uri),
764 related_uris=cruris)
765 repo._source_object_id = id(self)
766 return repo
768 def __replace_uris(self, attr, value, trailing_slash=True):
769 if value is None:
770 value = []
771 if not isinstance(value, list):
772 raise api_errors.BadRepositoryAttributeValue(attr,
773 value=value)
774 uris = []
775 for u in value:
776 if not isinstance(u, RepositoryURI):
777 u = RepositoryURI(u,
778 trailing_slash=trailing_slash)
779 elif trailing_slash:
780 u.uri = misc.url_affix_trailing_slash(u.uri)
781 uris.append(u)
782 uris.sort(key=URI_SORT_POLICIES[self.__sort_policy])
783 return uris
785 def __set_collection_type(self, value):
786 if value not in REPO_COLLECTION_TYPES:
787 raise api_errors.BadRepositoryCollectionType(value)
788 self.__collection_type = value
790 def __set_legal_uris(self, value):
791 self.__legal_uris = self.__replace_uris("legal_uris", value,
792 trailing_slash=False)
794 def __set_mirrors(self, value):
795 self.__mirrors = self.__replace_uris("mirrors", value)
797 def __set_origins(self, value):
798 self.__origins = self.__replace_uris("origins", value)
800 def __set_registration_uri(self, value):
801 if value and not isinstance(value, RepositoryURI):
802 value = RepositoryURI(value, trailing_slash=False)
803 self.__registration_uri = value
805 def __set_related_uris(self, value):
806 self.__related_uris = self.__replace_uris("related_uris",
807 value, trailing_slash=False)
809 def __set_refresh_seconds(self, value):
810 if value is not None:
811 try:
812 value = int(value)
813 except (TypeError, ValueError):
814 raise api_errors.BadRepositoryAttributeValue(
815 "refresh_seconds", value=value)
816 if value < 0:
817 raise api_errors.BadRepositoryAttributeValue(
818 "refresh_seconds", value=value)
819 self.__refresh_seconds = value
821 def __set_sort_policy(self, value):
822 if value not in URI_SORT_POLICIES:
823 raise api_errors.BadRepositoryURISortPolicy(value)
824 self.__sort_policy = value
826 def add_legal_uri(self, uri, priority=None, ssl_cert=None,
827 ssl_key=None):
828 """Adds the specified legal URI to the repository.
830 'uri' can be a RepositoryURI object or a URI string. If
831 it is a RepositoryURI object, all other parameters will be
832 ignored."""
834 self.__add_uri("legal_uris", uri, priority=priority,
835 ssl_cert=ssl_cert, ssl_key=ssl_key, trailing_slash=False)
837 def add_mirror(self, mirror, priority=None, ssl_cert=None,
838 ssl_key=None):
839 """Adds the specified mirror to the repository.
841 'mirror' can be a RepositoryURI object or a URI string. If
842 it is a RepositoryURI object, all other parameters will be
843 ignored."""
845 def dup_check(mirror):
846 if self.has_mirror(mirror):
847 o = self.get_mirror(mirror)
848 if o.system:
849 raise api_errors.DuplicateSyspubMirror(
850 mirror)
851 raise api_errors.DuplicateRepositoryMirror(
852 mirror)
854 self.__add_uri("mirrors", mirror, dup_check=dup_check,
855 priority=priority, ssl_cert=ssl_cert, ssl_key=ssl_key)
857 def add_origin(self, origin, priority=None, ssl_cert=None,
858 ssl_key=None):
859 """Adds the specified origin to the repository.
861 'origin' can be a RepositoryURI object or a URI string. If
862 it is a RepositoryURI object, all other parameters will be
863 ignored."""
865 def dup_check(origin):
866 if self.has_origin(origin):
867 o = self.get_origin(origin)
868 if o.system:
869 raise api_errors.DuplicateSyspubOrigin(
870 origin)
871 raise api_errors.DuplicateRepositoryOrigin(
872 origin)
874 self.__add_uri("origins", origin, dup_check=dup_check,
875 priority=priority, ssl_cert=ssl_cert, ssl_key=ssl_key)
877 def add_related_uri(self, uri, priority=None, ssl_cert=None,
878 ssl_key=None):
879 """Adds the specified related URI to the repository.
881 'uri' can be a RepositoryURI object or a URI string. If
882 it is a RepositoryURI object, all other parameters will be
883 ignored."""
885 self.__add_uri("related_uris", uri, priority=priority,
886 ssl_cert=ssl_cert, ssl_key=ssl_key, trailing_slash=False)
888 def get_mirror(self, mirror):
889 """Returns a RepositoryURI object representing the mirror
890 that matches 'mirror'.
892 'mirror' can be a RepositoryURI object or a URI string."""
894 if not isinstance(mirror, RepositoryURI):
895 mirror = misc.url_affix_trailing_slash(mirror)
896 for m in self.mirrors:
897 if mirror == m.uri:
898 return m
899 raise api_errors.UnknownRepositoryMirror(mirror)
901 def get_origin(self, origin):
902 """Returns a RepositoryURI object representing the origin
903 that matches 'origin'.
905 'origin' can be a RepositoryURI object or a URI string."""
907 if not isinstance(origin, RepositoryURI):
908 origin = misc.url_affix_trailing_slash(origin)
909 for o in self.origins:
910 if origin == o.uri:
911 return o
912 raise api_errors.UnknownRepositoryOrigin(origin)
914 def has_mirror(self, mirror):
915 """Returns a boolean value indicating whether a matching
916 'mirror' exists for the repository.
918 'mirror' can be a RepositoryURI object or a URI string."""
920 if not isinstance(mirror, RepositoryURI):
921 mirror = RepositoryURI(mirror)
922 return mirror in self.mirrors
924 def has_origin(self, origin):
925 """Returns a boolean value indicating whether a matching
926 'origin' exists for the repository.
928 'origin' can be a RepositoryURI object or a URI string."""
930 if not isinstance(origin, RepositoryURI):
931 origin = RepositoryURI(origin)
932 return origin in self.origins
934 def remove_legal_uri(self, uri):
935 """Removes the legal URI matching 'uri' from the repository.
937 'uri' can be a RepositoryURI object or a URI string."""
939 for i, m in enumerate(self.legal_uris):
940 if uri == m.uri:
941 # Immediate return as the index into the array
942 # changes with each removal.
943 del self.legal_uris[i]
944 return
945 raise api_errors.UnknownLegalURI(uri)
947 def remove_mirror(self, mirror):
948 """Removes the mirror matching 'mirror' from the repository.
950 'mirror' can be a RepositoryURI object or a URI string."""
952 if not isinstance(mirror, RepositoryURI):
953 mirror = misc.url_affix_trailing_slash(mirror)
954 for i, m in enumerate(self.mirrors):
955 if mirror == m.uri:
956 if m.system:
957 api_errors.RemoveSyspubMirror(
958 mirror.uri)
959 # Immediate return as the index into the array
960 # changes with each removal.
961 del self.mirrors[i]
962 return
963 raise api_errors.UnknownRepositoryMirror(mirror)
965 def remove_origin(self, origin):
966 """Removes the origin matching 'origin' from the repository.
968 'origin' can be a RepositoryURI object or a URI string."""
970 if not isinstance(origin, RepositoryURI):
971 origin = RepositoryURI(origin)
972 for i, o in enumerate(self.origins):
973 if origin == o.uri:
974 if o.system:
975 raise api_errors.RemoveSyspubOrigin(
976 origin.uri)
977 # Immediate return as the index into the array
978 # changes with each removal.
979 del self.origins[i]
980 return
981 raise api_errors.UnknownRepositoryOrigin(origin)
983 def remove_related_uri(self, uri):
984 """Removes the related URI matching 'uri' from the repository.
986 'uri' can be a RepositoryURI object or a URI string."""
988 for i, m in enumerate(self.related_uris):
989 if uri == m.uri:
990 # Immediate return as the index into the array
991 # changes with each removal.
992 del self.related_uris[i]
993 return
994 raise api_errors.UnknownRelatedURI(uri)
996 def update_mirror(self, mirror, priority=None, ssl_cert=None,
997 ssl_key=None):
998 """Updates an existing mirror object matching 'mirror'.
1000 'mirror' can be a RepositoryURI object or a URI string.
1002 This method is deprecated, and may be removed in future API
1003 versions."""
1005 if not isinstance(mirror, RepositoryURI):
1006 mirror = RepositoryURI(mirror, priority=priority,
1007 ssl_cert=ssl_cert, ssl_key=ssl_key)
1009 target = self.get_mirror(mirror)
1010 target.priority = mirror.priority
1011 target.ssl_cert = mirror.ssl_cert
1012 target.ssl_key = mirror.ssl_key
1013 target.proxies = mirror.proxies
1014 self.mirrors.sort(key=URI_SORT_POLICIES[self.__sort_policy])
1016 def update_origin(self, origin, priority=None, ssl_cert=None,
1017 ssl_key=None):
1018 """Updates an existing origin object matching 'origin'.
1020 'origin' can be a RepositoryURI object or a URI string.
1022 This method is deprecated, and may be removed in future API
1023 versions."""
1025 if not isinstance(origin, RepositoryURI):
1026 origin = RepositoryURI(origin, priority=priority,
1027 ssl_cert=ssl_cert, ssl_key=ssl_key)
1029 target = self.get_origin(origin)
1030 target.priority = origin.priority
1031 target.ssl_cert = origin.ssl_cert
1032 target.ssl_key = origin.ssl_key
1033 target.proxies = origin.proxies
1034 self.origins.sort(key=URI_SORT_POLICIES[self.__sort_policy])
1036 def reset_mirrors(self):
1037 """Discards the current list of repository mirrors."""
1039 self.mirrors = []
1041 def reset_origins(self):
1042 """Discards the current list of repository origins."""
1044 self.origins = []
1046 collection_type = property(lambda self: self.__collection_type,
1047 __set_collection_type, None,
1048 """A constant value indicating the type of packages in the
1049 repository. The following collection types are recognized:
1051 REPO_CTYPE_CORE
1052 The "core" type indicates that the repository contains
1053 all of the dependencies declared by packages in the
1054 repository. It is primarily used for operating system
1055 repositories.
1057 REPO_CTYPE_SUPPLEMENTAL
1058 The "supplemental" type indicates that the repository
1059 contains packages that rely on or are intended to be
1060 used with packages located in another repository.""")
1062 legal_uris = property(lambda self: self.__legal_uris,
1063 __set_legal_uris, None,
1064 """A list of RepositoryURI objects indicating where licensing,
1065 legal, and terms of service information for the repository can be
1066 found.""")
1068 mirrors = property(lambda self: self.__mirrors, __set_mirrors, None,
1069 """A list of RepositoryURI objects indicating where package content
1070 can be retrieved. If any value in the list provided is a URI
1071 string, it will be replaced with a RepositoryURI object.""")
1073 origins = property(lambda self: self.__origins, __set_origins, None,
1074 """A list of RepositoryURI objects indicating where package content
1075 can be retrieved. If any value in the list provided is a URI
1076 string, it will be replaced with a RepositoryURI object.""")
1078 registration_uri = property(lambda self: self.__registration_uri,
1079 __set_registration_uri, None,
1080 """A RepositoryURI object indicating a location clients can use to
1081 register or obtain credentials needed to access the repository. If
1082 the value provided is a URI string, it will be replaced with a
1083 RepositoryURI object.""")
1085 related_uris = property(lambda self: self.__related_uris,
1086 __set_related_uris, None,
1087 """A list of RepositoryURI objects indicating the location of
1088 related repositories that a client may be interested in. If any
1089 value in the list provided is a URI string, it will be replaced with
1090 a RepositoryURI object.""")
1092 refresh_seconds = property(lambda self: self.__refresh_seconds,
1093 __set_refresh_seconds, None,
1094 """An integer value indicating the number of seconds clients should
1095 wait before refreshing cached repository metadata information. A
1096 value of None indicates that refreshes should be performed at the
1097 client's discretion.""")
1099 sort_policy = property(lambda self: self.__sort_policy,
1100 __set_sort_policy, None,
1101 """A constant value indicating how legal_uris, mirrors, origins, and
1102 related_uris should be sorted. The following policies are
1103 recognized:
1105 URI_SORT_PRIORITY
1106 The "priority" policy indicate that URIs should be
1107 sorted according to the value of their priority
1108 attribute.""")
1111 class Publisher(object):
1112 """Class representing a publisher object and a set of interfaces to set
1113 and retrieve its information.
1115 A publisher is a forward or reverse domain name identifying a source
1116 (e.g. "publisher") of packages."""
1118 # These properties are declared here so that they show up in the pydoc
1119 # documentation as private, and for clarity in the property declarations
1120 # found near the end of the class definition.
1121 _catalog = None
1122 __alias = None
1123 __client_uuid = None
1124 __disabled = False
1125 __meta_root = None
1126 __origin_root = None
1127 __prefix = None
1128 __repository = None
1129 __sticky = True
1130 transport = None
1132 # Used to store the id of the original object this one was copied
1133 # from during __copy__.
1134 _source_object_id = None
1136 # Used to record those CRLs which are unreachable during the current
1137 # operation.
1138 __bad_crls = set()
1140 def __init__(self, prefix, alias=None, catalog=None, client_uuid=None,
1141 disabled=False, meta_root=None, repository=None,
1142 transport=None, sticky=True, props=None, revoked_ca_certs=EmptyI,
1143 approved_ca_certs=EmptyI, sys_pub=False):
1144 """Initialize a new publisher object.
1146 'catalog' is an optional Catalog object to use in place of
1147 retrieving one from the publisher's meta_root. This option
1148 may only be used when meta_root is not provided.
1151 assert not (catalog and meta_root)
1153 if client_uuid is None:
1154 self.reset_client_uuid()
1155 else:
1156 self.__client_uuid = client_uuid
1158 self.sys_pub = False
1160 # Note that the properties set here are intentionally lacking
1161 # the '__' prefix which means assignment will occur using the
1162 # get/set methods declared for the property near the end of
1163 # the class definition.
1164 self.alias = alias
1165 self.disabled = disabled
1166 self.prefix = prefix
1167 self.transport = transport
1168 self.meta_root = meta_root
1169 self.sticky = sticky
1172 self.__sig_policy = None
1173 self.__delay_validation = False
1175 self.__properties = {}
1176 self.__tmp_crls = {}
1178 # Writing out an EmptyI to a config file and reading it back
1179 # in doesn't work correctly at the moment, but reading and
1180 # writing an empty list does. So if intermediate_certs is empty,
1181 # make sure it's stored as an empty list.
1183 # The relevant implementation is probably the line which
1184 # strips ][ from the input in imageconfig.read_list.
1185 if revoked_ca_certs:
1186 self.revoked_ca_certs = revoked_ca_certs
1187 else:
1188 self.revoked_ca_certs = []
1190 if approved_ca_certs:
1191 self.approved_ca_certs = approved_ca_certs
1192 else:
1193 self.approved_ca_certs = []
1195 if props:
1196 self.properties.update(props)
1198 self.ca_dict = None
1200 if repository:
1201 self.repository = repository
1202 self.sys_pub = sys_pub
1204 # A dictionary to story the mapping for subject -> certificate
1205 # for those certificates we couldn't store on disk.
1206 self.__issuers = {}
1208 # Must be done last.
1209 self._catalog = catalog
1211 def __cmp__(self, other):
1212 if other is None:
1213 return 1
1214 if isinstance(other, Publisher):
1215 return cmp(self.prefix, other.prefix)
1216 return cmp(self.prefix, other)
1218 @staticmethod
1219 def __contains__(key):
1220 """Supports deprecated compatibility interface."""
1222 return key in ("client_uuid", "disabled", "mirrors", "origin",
1223 "prefix", "ssl_cert", "ssl_key")
1225 def __copy__(self):
1226 selected = None
1227 pub = Publisher(self.__prefix, alias=self.__alias,
1228 client_uuid=self.__client_uuid, disabled=self.__disabled,
1229 meta_root=self.meta_root,
1230 repository=copy.copy(self.repository),
1231 transport=self.transport, sticky=self.__sticky,
1232 props=self.properties,
1233 revoked_ca_certs=self.revoked_ca_certs,
1234 approved_ca_certs=self.approved_ca_certs,
1235 sys_pub=self.sys_pub)
1236 pub._catalog = self._catalog
1237 pub._source_object_id = id(self)
1238 return pub
1240 def __eq__(self, other):
1241 if isinstance(other, Publisher):
1242 return self.prefix == other.prefix
1243 if isinstance(other, str):
1244 return self.prefix == other
1245 return False
1247 def __getitem__(self, key):
1248 """Deprecated compatibility interface allowing publisher
1249 attributes to be read as pub["attribute"]."""
1251 if key == "client_uuid":
1252 return self.__client_uuid
1253 if key == "disabled":
1254 return self.__disabled
1255 if key == "prefix":
1256 return self.__prefix
1258 repo = self.repository
1259 if key == "mirrors":
1260 return [str(m) for m in repo.mirrors]
1261 if key == "origin":
1262 if not repo.origins[0]:
1263 return None
1264 return repo.origins[0].uri
1265 if key == "ssl_cert":
1266 if not repo.origins[0]:
1267 return None
1268 return repo.origins[0].ssl_cert
1269 if key == "ssl_key":
1270 if not repo.origins[0]:
1271 return None
1272 return repo.origins[0].ssl_key
1274 def __get_last_refreshed(self):
1275 if not self.meta_root:
1276 return None
1278 lcfile = os.path.join(self.meta_root, "last_refreshed")
1279 try:
1280 mod_time = os.stat(lcfile).st_mtime
1281 except EnvironmentError as e:
1282 if e.errno == errno.ENOENT:
1283 return None
1284 raise
1285 return dt.datetime.utcfromtimestamp(mod_time)
1287 def __ne__(self, other):
1288 if isinstance(other, Publisher):
1289 return self.prefix != other.prefix
1290 if isinstance(other, str):
1291 return self.prefix != other
1292 return True
1294 def __set_alias(self, value):
1295 if self.sys_pub:
1296 raise api_errors.ModifyingSyspubException(
1297 "Cannot set the alias of a system publisher")
1298 # Aliases must comply with the same restrictions that prefixes
1299 # have as they are intended to be useable in any case where
1300 # a prefix may be used.
1301 if value is not None and value != "" and \
1302 not misc.valid_pub_prefix(value):
1303 raise api_errors.BadPublisherAlias(value)
1304 self.__alias = value
1306 def __set_disabled(self, disabled):
1307 if self.sys_pub:
1308 raise api_errors.ModifyingSyspubException(_("Cannot "
1309 "enable or disable a system publisher"))
1311 if disabled:
1312 self.__disabled = True
1313 else:
1314 self.__disabled = False
1316 def __set_last_refreshed(self, value):
1317 if not self.meta_root:
1318 return
1320 if value is not None and not isinstance(value, dt.datetime):
1321 raise api_errors.BadRepositoryAttributeValue(
1322 "last_refreshed", value=value)
1324 lcfile = os.path.join(self.meta_root, "last_refreshed")
1325 if not value:
1326 # If no value was provided, attempt to remove the
1327 # tracking file.
1328 try:
1329 portable.remove(lcfile)
1330 except EnvironmentError as e:
1331 # If the file can't be removed due to
1332 # permissions, a read-only filesystem, or
1333 # because it doesn't exist, continue on.
1334 if e.errno not in (errno.ENOENT, errno.EACCES,
1335 errno.EROFS):
1336 raise
1337 return
1339 def create_tracker():
1340 try:
1341 # If the file is a symlink we catch an
1342 # exception and do not update the file.
1343 fd = os.open(lcfile,
1344 os.O_WRONLY|os.O_NOFOLLOW|os.O_CREAT)
1345 os.write(fd, "{0}\n".format(
1346 misc.time_to_timestamp(
1347 calendar.timegm(value.utctimetuple()))))
1348 os.close(fd)
1349 except EnvironmentError as e:
1350 if e.errno == errno.ELOOP:
1351 raise api_errors.UnexpectedLinkError(
1352 os.path.dirname(lcfile),
1353 os.path.basename(lcfile),
1354 e.errno)
1355 # If the file can't be written due to
1356 # permissions or because the filesystem is
1357 # read-only, continue on.
1358 if e.errno not in (errno.EACCES, errno.EROFS):
1359 raise
1360 try:
1361 # If a time was provided, write out a special file that
1362 # can be used to track the information with the actual
1363 # time (in UTC) contained within.
1364 create_tracker()
1365 except EnvironmentError as e:
1366 if e.errno != errno.ENOENT:
1367 raise
1369 # Assume meta_root doesn't exist and create it.
1370 try:
1371 self.create_meta_root()
1372 except api_errors.PermissionsException:
1373 # If the directory can't be created due to
1374 # permissions, move on.
1375 pass
1376 except EnvironmentError as e:
1377 # If the directory can't be created due to a
1378 # read-only filesystem, move on.
1379 if e.errno != errno.EROFS:
1380 raise
1381 else:
1382 # Try one last time.
1383 create_tracker()
1385 def __set_meta_root(self, pathname):
1386 if pathname:
1387 pathname = os.path.abspath(pathname)
1388 self.__meta_root = pathname
1389 if self._catalog:
1390 self._catalog.meta_root = self.catalog_root
1391 if self.__meta_root:
1392 self.__origin_root = os.path.join(self.__meta_root,
1393 "origins")
1394 self.cert_root = os.path.join(self.__meta_root, "certs")
1395 self.__subj_root = os.path.join(self.cert_root,
1396 "subject_hashes")
1397 self.__crl_root = os.path.join(self.cert_root, "crls")
1399 def __set_prefix(self, prefix):
1400 if not misc.valid_pub_prefix(prefix):
1401 raise api_errors.BadPublisherPrefix(prefix)
1402 self.__prefix = prefix
1404 def __set_repository(self, value):
1405 if not isinstance(value, Repository):
1406 raise api_errors.UnknownRepository(value)
1407 self.__repository = value
1408 self._catalog = None
1410 def __set_client_uuid(self, value):
1411 self.__client_uuid = value
1413 def __set_stickiness(self, value):
1414 if self.sys_pub:
1415 raise api_errors.ModifyingSyspubException(_("Cannot "
1416 "change the stickiness of a system publisher"))
1417 self.__sticky = bool(value)
1419 def __str__(self):
1420 return self.prefix
1422 def __validate_metadata(self, croot, repo):
1423 """Private helper function to check the publisher's metadata
1424 for configuration or other issues and log appropriate warnings
1425 or errors. Currently only checks catalog metadata."""
1427 c = pkg.catalog.Catalog(meta_root=croot, read_only=True)
1428 if not c.exists:
1429 # Nothing to validate.
1430 return
1431 if not c.version > 0:
1432 # Validation doesn't apply.
1433 return
1434 if not c.package_count:
1435 # Nothing to do.
1436 return
1438 # XXX For now, perform this check using the catalog data.
1439 # In the future, it should be done using the output of the
1440 # publisher/0 operation.
1441 pubs = c.publishers()
1443 if self.prefix not in pubs:
1444 origins = repo.origins
1445 origin = origins[0]
1446 logger.error(_("""
1447 Unable to retrieve package data for publisher '{prefix}' from one
1448 of the following origin(s):
1450 {origins}
1452 The catalog retrieved from one of the origin(s) listed above only
1453 contains package data for: {pubs}.
1454 """).format(origins="\n".join(str(o) for o in origins), prefix=self.prefix,
1455 pubs=", ".join(pubs)))
1457 if global_settings.client_name != "pkg":
1458 logger.error(_("""\
1459 This is either a result of invalid origin information being provided
1460 for publisher '{0}', or because the wrong publisher name was
1461 provided when this publisher was added.
1462 """).format(self.prefix))
1463 # Remaining messages are for pkg client only.
1464 return
1466 logger.error(_("""\
1467 To resolve this issue, correct the origin information provided for
1468 publisher '{prefix}' using the pkg set-publisher subcommand, or re-add
1469 the publisher using the correct name and remove the '{prefix}'
1470 publisher.
1471 """).format(prefix=self.prefix))
1473 if len(pubs) == 1:
1474 logger.warning(_("""\
1475 To re-add this publisher with the correct name, execute the following
1476 commands as a privileged user:
1478 pkg set-publisher -P -g {origin} {pub}
1479 pkg unset-publisher {prefix}
1480 """).format(origin=origin, prefix=self.prefix, pub=list(pubs)[0]))
1481 return
1483 logger.warning(_("""\
1484 The origin(s) listed above contain package data for more than one
1485 publisher, but this issue can likely be resolved by executing one
1486 of the following commands as a privileged user:
1487 """))
1489 for pfx in pubs:
1490 logger.warning(_("pkg set-publisher -P -g "
1491 "{origin} {pub}\n").format(
1492 origin=origin, pub=pfx))
1494 logger.warning(_("""\
1495 Afterwards, the old publisher should be removed by executing the
1496 following command as a privileged user:
1498 pkg unset-publisher {0}
1499 """).format(self.prefix))
1501 @property
1502 def catalog(self):
1503 """A reference to the Catalog object for the publisher's
1504 selected repository, or None if available."""
1506 if not self.meta_root:
1507 if self._catalog:
1508 return self._catalog
1509 return None
1511 if not self._catalog:
1512 croot = self.catalog_root
1513 if not os.path.isdir(croot):
1514 # Current meta_root structure is likely in
1515 # a state of transition, so don't provide a
1516 # meta_root. Assume that an empty catalog
1517 # is desired instead. (This can happen during
1518 # an image format upgrade.)
1519 croot = None
1520 self._catalog = pkg.catalog.Catalog(
1521 meta_root=croot)
1522 return self._catalog
1524 @property
1525 def catalog_root(self):
1526 """The absolute pathname of the directory containing the
1527 Catalog data for the publisher, or None if meta_root is
1528 not defined."""
1530 if self.meta_root:
1531 return os.path.join(self.meta_root, "catalog")
1533 def create_meta_root(self):
1534 """Create the publisher's meta_root."""
1536 if not self.meta_root:
1537 raise api_errors.BadPublisherMetaRoot(self.meta_root,
1538 operation="create_meta_root")
1540 for path in (self.meta_root, self.catalog_root):
1541 try:
1542 os.makedirs(path)
1543 except EnvironmentError as e:
1544 if e.errno == errno.EACCES:
1545 raise api_errors.PermissionsException(
1546 e.filename)
1547 if e.errno == errno.EROFS:
1548 raise api_errors.ReadOnlyFileSystemException(
1549 e.filename)
1550 elif e.errno != errno.EEXIST:
1551 # If the path already exists, move on.
1552 # Otherwise, raise the exception.
1553 raise
1554 # Optional roots not needed for all operations.
1555 for path in (self.cert_root, self.__origin_root,
1556 self.__subj_root, self.__crl_root):
1557 try:
1558 os.makedirs(path)
1559 except EnvironmentError as e:
1560 if e.errno in (errno.EACCES, errno.EROFS):
1561 pass
1562 elif e.errno != errno.EEXIST:
1563 # If the path already exists, move on.
1564 # Otherwise, raise the exception.
1565 raise
1567 def get_origin_sets(self):
1568 """Returns a list of Repository objects representing the unique
1569 groups of origins available. Each group is based on the origins
1570 that share identical package catalog data."""
1572 if not self.repository or not self.repository.origins:
1573 # Guard against failure for publishers with no
1574 # transport information.
1575 return []
1577 if not self.meta_root or not os.path.exists(self.__origin_root):
1578 # No way to identify unique sets.
1579 return [self.repository]
1581 # Index origins by tuple of (catalog creation, catalog modified)
1582 osets = collections.defaultdict(list)
1584 for origin, opath in self.__gen_origin_paths():
1585 cat = pkg.catalog.Catalog(meta_root=opath,
1586 read_only=True)
1587 if not cat.exists:
1588 key = None
1589 else:
1590 key = (str(cat.created), str(cat.last_modified))
1591 osets[key].append(origin)
1593 # Now return a list of Repository objects (copies of the
1594 # currently selected one) assigning each set of origins.
1595 # Sort by index to ensure consistent ordering.
1596 rval = []
1597 for k in sorted(osets):
1598 nrepo = copy.copy(self.repository)
1599 nrepo.origins = osets[k]
1600 rval.append(nrepo)
1602 return rval
1604 def has_configuration(self):
1605 """Returns whether this publisher has any configuration which
1606 should prevent its removal."""
1608 return bool(self.__repository.origins or
1609 self.__repository.mirrors or self.__sig_policy or
1610 self.approved_ca_certs or self.revoked_ca_certs)
1612 @property
1613 def needs_refresh(self):
1614 """A boolean value indicating whether the publisher's
1615 metadata for the currently selected repository needs to be
1616 refreshed."""
1618 if not self.repository or not self.meta_root:
1619 # Nowhere to obtain metadata from; this should rarely
1620 # occur except during publisher initialization.
1621 return False
1623 lc = self.last_refreshed
1624 if not lc:
1625 # There is no record of when the publisher metadata was
1626 # last refreshed, so assume it should be refreshed now.
1627 return True
1629 ts_now = time.time()
1630 ts_last = calendar.timegm(lc.utctimetuple())
1632 rs = self.repository.refresh_seconds
1633 if not rs:
1634 # There is no indicator of how often often publisher
1635 # metadata should be refreshed, so assume it should be
1636 # now.
1637 return True
1639 if (ts_now - ts_last) >= rs:
1640 # The number of seconds that has elapsed since the
1641 # publisher metadata was last refreshed exceeds or
1642 # equals the specified interval.
1643 return True
1644 return False
1646 def __get_origin_path(self, origin):
1647 if not os.path.exists(self.__origin_root):
1648 return
1649 # A digest of the URI string is used here to attempt to avoid
1650 # path length problems. In order for this image to interoperate
1651 # with older clients, we must use sha-1 here.
1652 return os.path.join(self.__origin_root,
1653 hashlib.sha1(origin.uri).hexdigest())
1655 def __gen_origin_paths(self):
1656 if not os.path.exists(self.__origin_root):
1657 return
1658 for origin in self.repository.origins:
1659 yield origin, self.__get_origin_path(origin)
1661 def __rebuild_catalog(self):
1662 """Private helper function that builds publisher catalog based
1663 on catalog from each origin."""
1665 # First, remove catalogs for any origins that no longer exist.
1666 # We must interoperate with older clients, so force the use of
1667 # sha-1 here.
1668 ohashes = [
1669 hashlib.sha1(o.uri).hexdigest()
1670 for o in self.repository.origins
1673 removals = False
1674 for entry in os.listdir(self.__origin_root):
1675 opath = os.path.join(self.__origin_root, entry)
1676 try:
1677 if entry in ohashes:
1678 continue
1679 except Exception:
1680 # Discard anything that isn't an origin.
1681 pass
1683 # An origin was removed, so publisher should inform
1684 # image to force image catalog rebuild.
1685 removals = True
1687 # Not an origin or origin no longer exists; either way,
1688 # it shouldn't exist here.
1689 try:
1690 if os.path.isdir(opath):
1691 shutil.rmtree(opath)
1692 else:
1693 portable.remove(opath)
1694 except EnvironmentError as e:
1695 raise api_errors._convert_error(e)
1697 # if the catalog already exists on disk, is empty, and if
1698 # no origins are configured, we're done.
1699 if self.catalog.exists and \
1700 self.catalog.package_count == 0 and \
1701 len(self.repository.origins) == 0:
1702 return removals
1704 # Discard existing catalog.
1705 self.catalog.destroy()
1706 self._catalog = None
1708 # Ensure all old catalog files are removed.
1709 for entry in os.listdir(self.catalog_root):
1710 if entry == "attrs" or entry == "catalog" or \
1711 entry.startswith("catalog."):
1712 try:
1713 portable.remove(os.path.join(
1714 self.catalog_root, entry))
1715 except EnvironmentError as e:
1716 raise apx._convert_error(e)
1718 # If there's only one origin, then just symlink its catalog
1719 # files into place.
1720 # Symlinking includes updates for publication tools.
1721 opaths = [entry for entry in self.__gen_origin_paths()]
1722 if len(opaths) == 1:
1723 opath = opaths[0][1]
1724 for fname in os.listdir(opath):
1725 if fname.startswith("catalog.") or \
1726 fname.startswith("update."):
1727 src = os.path.join(opath, fname)
1728 dest = os.path.join(self.catalog_root,
1729 fname)
1730 os.symlink(misc.relpath(src,
1731 self.catalog_root), dest)
1732 return removals
1734 # If there's more than one origin, then create a new catalog
1735 # based on a composite of the catalogs for all origins.
1736 ncat = pkg.catalog.Catalog(batch_mode=True,
1737 meta_root=self.catalog_root, sign=False)
1739 # Mark all operations as occurring at this time.
1740 op_time = dt.datetime.utcnow()
1742 for origin, opath in opaths:
1743 src_cat = pkg.catalog.Catalog(meta_root=opath,
1744 read_only=True)
1745 for name in src_cat.parts:
1746 spart = src_cat.get_part(name, must_exist=True)
1747 if spart is None:
1748 # Client hasn't retrieved this part.
1749 continue
1751 npart = ncat.get_part(name)
1752 base = name.startswith("catalog.base.")
1754 # Avoid accessor overhead since these will be
1755 # used for every entry.
1756 cat_ver = src_cat.version
1758 for t, sentry in spart.tuple_entries(
1759 pubs=[self.prefix]):
1760 pub, stem, ver = t
1762 entry = dict(sentry.iteritems())
1763 try:
1764 npart.add(metadata=entry,
1765 op_time=op_time, pub=pub,
1766 stem=stem, ver=ver)
1767 except api_errors.DuplicateCatalogEntry:
1768 if not base:
1769 # Don't care.
1770 continue
1772 # Destination entry is in
1773 # catalog already.
1774 entry = npart.get_entry(
1775 pub=pub, stem=stem, ver=ver)
1777 src_sigs = set(
1779 for s in sentry
1780 if s.startswith("signature-")
1782 dest_sigs = set(
1784 for s in entry
1785 if s.startswith("signature-")
1788 if src_sigs != dest_sigs:
1789 # Ignore any packages
1790 # that are different
1791 # from the first
1792 # encountered for this
1793 # package version.
1794 # The client expects
1795 # these to always be
1796 # the same. This seems
1797 # saner than failing.
1798 continue
1799 else:
1800 if not base:
1801 # Nothing to do.
1802 continue
1804 # Destination entry is one just
1805 # added.
1806 entry["metadata"] = {
1807 "sources": [],
1808 "states": [],
1811 entry["metadata"]["sources"].append(
1812 origin.uri)
1814 states = entry["metadata"]["states"]
1815 if src_cat.version == 0:
1816 states.append(
1817 pkgdefs.PKG_STATE_V0)
1819 # Now go back and trim each entry to minimize footprint. This
1820 # ensures each package entry only has state and source info
1821 # recorded when needed.
1822 for t, entry in ncat.tuple_entries():
1823 pub, stem, ver = t
1824 mdata = entry["metadata"]
1825 if len(mdata["sources"]) == len(opaths):
1826 # Package is available from all origins, so
1827 # there's no need to require which ones
1828 # have it.
1829 del mdata["sources"]
1831 if len(mdata["states"]) < len(opaths):
1832 # At least one source is not V0, so the lazy-
1833 # load fallback for the package metadata isn't
1834 # needed.
1835 del mdata["states"]
1836 elif len(mdata["states"]) > 1:
1837 # Ensure only one instance of state value.
1838 mdata["states"] = [pkgdefs.PKG_STATE_V0]
1839 if not mdata:
1840 mdata = None
1841 ncat.update_entry(mdata, pub=pub, stem=stem, ver=ver)
1843 # Finally, write out publisher catalog.
1844 ncat.batch_mode = False
1845 ncat.finalize()
1846 ncat.save()
1847 return removals
1849 def __convert_v0_catalog(self, v0_cat, v1_root):
1850 """Transforms the contents of the provided version 0 Catalog
1851 into a version 1 Catalog, replacing the current Catalog."""
1853 v0_lm = v0_cat.last_modified()
1854 if v0_lm:
1855 # last_modified can be none if the catalog is empty.
1856 v0_lm = pkg.catalog.ts_to_datetime(v0_lm)
1858 # There's no point in signing this catalog since it's simply
1859 # a transformation of a v0 catalog.
1860 v1_cat = pkg.catalog.Catalog(batch_mode=True,
1861 meta_root=v1_root, sign=False)
1863 # A check for a previous non-zero package count is made to
1864 # determine whether the last_modified date alone can be
1865 # relied on. This works around some oddities with empty
1866 # v0 catalogs.
1867 try:
1868 # Could be 'None'
1869 n0_pkgs = int(v0_cat.npkgs())
1870 except (TypeError, ValueError):
1871 n0_pkgs = 0
1873 if v1_cat.exists and n0_pkgs != v1_cat.package_version_count:
1874 if v0_lm == v1_cat.last_modified:
1875 # Already converted.
1876 return
1877 # Simply rebuild the entire v1 catalog every time, this
1878 # avoids many of the problems that could happen due to
1879 # deficiencies in the v0 implementation.
1880 v1_cat.destroy()
1881 self._catalog = None
1882 v1_cat = pkg.catalog.Catalog(meta_root=v1_root,
1883 sign=False)
1885 # Now populate the v1 Catalog with the v0 Catalog's data.
1886 for f in v0_cat.fmris():
1887 v1_cat.add_package(f)
1889 # Normally, the Catalog's attributes are automatically
1890 # populated as a result of catalog operations. But in
1891 # this case, we want the v1 Catalog's attributes to
1892 # match those of the v0 catalog.
1893 v1_cat.last_modified = v0_lm
1895 # While this is a v1 catalog format-wise, v0 data is stored.
1896 # This allows consumers to be aware that certain data won't be
1897 # available in this catalog (such as dependencies, etc.).
1898 v1_cat.version = 0
1900 # Finally, save the new Catalog, and replace the old in-memory
1901 # catalog.
1902 v1_cat.batch_mode = False
1903 v1_cat.finalize()
1904 v1_cat.save()
1906 def __refresh_v0(self, croot, full_refresh, immediate, repo):
1907 """The method to refresh the publisher's metadata against
1908 a catalog/0 source. If the more recent catalog/1 version
1909 isn't supported, this routine gets invoked as a fallback.
1910 Returns a tuple of (changed, refreshed) where 'changed'
1911 indicates whether new catalog data was found and 'refreshed'
1912 indicates that catalog data was actually retrieved to determine
1913 if there were any updates."""
1915 if full_refresh:
1916 immediate = True
1918 # Catalog needs v0 -> v1 transformation if repository only
1919 # offers v0 catalog.
1920 v0_cat = old_catalog.ServerCatalog(croot, read_only=True,
1921 publisher=self.prefix)
1923 new_cat = True
1924 v0_lm = None
1925 if v0_cat.exists:
1926 repo = self.repository
1927 if full_refresh or v0_cat.origin() not in repo.origins:
1928 try:
1929 v0_cat.destroy(root=croot)
1930 except EnvironmentError as e:
1931 if e.errno == errno.EACCES:
1932 raise api_errors.PermissionsException(
1933 e.filename)
1934 if e.errno == errno.EROFS:
1935 raise api_errors.ReadOnlyFileSystemException(
1936 e.filename)
1937 raise
1938 immediate = True
1939 else:
1940 new_cat = False
1941 v0_lm = v0_cat.last_modified()
1943 if not immediate and not self.needs_refresh:
1944 # No refresh needed.
1945 return False, False
1947 import pkg.updatelog as old_ulog
1948 try:
1949 # Note that this currently retrieves a v0 catalog that
1950 # has to be converted to v1 format.
1951 self.transport.get_catalog(self, v0_lm, path=croot,
1952 alt_repo=repo)
1953 except old_ulog.UpdateLogException:
1954 # If an incremental update fails, attempt a full
1955 # catalog retrieval instead.
1956 try:
1957 v0_cat.destroy(root=croot)
1958 except EnvironmentError as e:
1959 if e.errno == errno.EACCES:
1960 raise api_errors.PermissionsException(
1961 e.filename)
1962 if e.errno == errno.EROFS:
1963 raise api_errors.ReadOnlyFileSystemException(
1964 e.filename)
1965 raise
1966 self.transport.get_catalog(self, path=croot,
1967 alt_repo=repo)
1969 v0_cat = pkg.server.catalog.ServerCatalog(croot, read_only=True,
1970 publisher=self.prefix)
1972 self.__convert_v0_catalog(v0_cat, croot)
1973 if new_cat or v0_lm != v0_cat.last_modified():
1974 # If the catalog was rebuilt, or the timestamp of the
1975 # catalog changed, then an update has occurred.
1976 return True, True
1977 return False, True
1979 def __refresh_v1(self, croot, tempdir, full_refresh, immediate,
1980 mismatched, repo, progtrack=None, include_updates=False):
1981 """The method to refresh the publisher's metadata against
1982 a catalog/1 source. If the more recent catalog/1 version
1983 isn't supported, __refresh_v0 is invoked as a fallback.
1984 Returns a tuple of (changed, refreshed) where 'changed'
1985 indicates whether new catalog data was found and 'refreshed'
1986 indicates that catalog data was actually retrieved to determine
1987 if there were any updates."""
1989 # If full_refresh is True, then redownload should be True to
1990 # ensure a non-cached version of the catalog is retrieved.
1991 # If full_refresh is False, but mismatched is True, then
1992 # the retrieval requests should indicate that content should
1993 # be revalidated before being returned. Note that this
1994 # only applies to the catalog v1 case.
1995 redownload = full_refresh
1996 revalidate = not redownload and mismatched
1998 v1_cat = pkg.catalog.Catalog(meta_root=croot)
1999 try:
2000 self.transport.get_catalog1(self, ["catalog.attrs"],
2001 path=tempdir, redownload=redownload,
2002 revalidate=revalidate, alt_repo=repo,
2003 progtrack=progtrack)
2004 except api_errors.UnsupportedRepositoryOperation:
2005 # No v1 catalogs available.
2006 if v1_cat.exists:
2007 # Ensure v1 -> v0 transition works right.
2008 v1_cat.destroy()
2009 self._catalog = None
2010 return self.__refresh_v0(croot, full_refresh, immediate,
2011 repo)
2013 # If a v0 catalog is present, remove it before proceeding to
2014 # ensure transitions between catalog versions work correctly.
2015 v0_cat = old_catalog.ServerCatalog(croot, read_only=True,
2016 publisher=self.prefix)
2017 if v0_cat.exists:
2018 v0_cat.destroy(root=croot)
2020 # If above succeeded, we now have a catalog.attrs file. Parse
2021 # this to determine what other constituent parts need to be
2022 # downloaded.
2023 flist = []
2024 if not full_refresh and v1_cat.exists:
2025 flist = v1_cat.get_updates_needed(tempdir)
2026 if flist == None:
2027 return False, True
2028 else:
2029 attrs = pkg.catalog.CatalogAttrs(meta_root=tempdir)
2030 for name in attrs.parts:
2031 locale = name.split(".", 2)[2]
2032 # XXX Skip parts that aren't in the C locale for
2033 # now.
2034 if locale != "C":
2035 continue
2036 flist.append(name)
2037 if include_updates:
2038 for update in attrs.updates:
2039 flist.append(update)
2041 if flist:
2042 # More catalog files to retrieve.
2043 try:
2044 self.transport.get_catalog1(self, flist,
2045 path=tempdir, redownload=redownload,
2046 revalidate=revalidate, alt_repo=repo,
2047 progtrack=progtrack)
2048 except api_errors.UnsupportedRepositoryOperation:
2049 # Couldn't find a v1 catalog after getting one
2050 # before. This would be a bizzare error, but we
2051 # can try for a v0 catalog anyway.
2052 return self.__refresh_v0(croot, full_refresh,
2053 immediate, repo)
2055 # Clear _catalog, so we'll read in the new catalog.
2056 self._catalog = None
2057 v1_cat = pkg.catalog.Catalog(meta_root=croot)
2059 # At this point the client should have a set of the constituent
2060 # pieces that are necessary to construct a catalog. If a
2061 # catalog already exists, call apply_updates. Otherwise,
2062 # move the files to the appropriate location.
2063 validate = False
2064 if not full_refresh and v1_cat.exists:
2065 v1_cat.apply_updates(tempdir)
2066 else:
2067 if v1_cat.exists:
2068 # This is a full refresh. Destroy
2069 # the existing catalog.
2070 v1_cat.destroy()
2072 for fn in os.listdir(tempdir):
2073 srcpath = os.path.join(tempdir, fn)
2074 dstpath = os.path.join(croot, fn)
2075 pkg.portable.rename(srcpath, dstpath)
2077 # Apply_updates validates the newly constructed catalog.
2078 # If refresh didn't call apply_updates, arrange to
2079 # have the new catalog validated.
2080 validate = True
2082 if validate:
2083 try:
2084 v1_cat = pkg.catalog.Catalog(meta_root=croot)
2085 v1_cat.validate()
2086 except api_errors.BadCatalogSignatures:
2087 # If signature validation fails here, that means
2088 # that the attributes and individual parts were
2089 # self-consistent and not corrupt, but that the
2090 # attributes and parts didn't match. This could
2091 # be the result of a broken source providing
2092 # an attributes file that is much older or newer
2093 # than the catalog parts being provided.
2094 v1_cat.destroy()
2095 raise api_errors.MismatchedCatalog(self.prefix)
2096 return True, True
2098 def __refresh_origin(self, croot, full_refresh, immediate, mismatched,
2099 origin, progtrack=None, include_updates=False):
2100 """Private helper method used to refresh catalog data for each
2101 origin. Returns a tuple of (changed, refreshed) where 'changed'
2102 indicates whether new catalog data was found and 'refreshed'
2103 indicates that catalog data was actually retrieved to determine
2104 if there were any updates."""
2106 # Create a copy of the current repository object that only
2107 # contains the origin specified.
2108 repo = copy.copy(self.repository)
2109 repo.origins = [origin]
2111 # Create temporary directory for assembly of catalog pieces.
2112 try:
2113 misc.makedirs(croot)
2114 tempdir = tempfile.mkdtemp(dir=croot)
2115 except EnvironmentError as e:
2116 if e.errno == errno.EACCES:
2117 raise api_errors.PermissionsException(
2118 e.filename)
2119 if e.errno == errno.EROFS:
2120 raise api_errors.ReadOnlyFileSystemException(
2121 e.filename)
2122 raise
2124 # Ensure that the temporary directory gets removed regardless
2125 # of success or failure.
2126 try:
2127 rval = self.__refresh_v1(croot, tempdir,
2128 full_refresh, immediate, mismatched, repo,
2129 progtrack=progtrack,
2130 include_updates=include_updates)
2132 # Perform publisher metadata sanity checks.
2133 self.__validate_metadata(croot, repo)
2135 return rval
2136 finally:
2137 # Cleanup tempdir.
2138 shutil.rmtree(tempdir, True)
2140 def __refresh(self, full_refresh, immediate, mismatched=False,
2141 progtrack=None, include_updates=False):
2142 """The method to handle the overall refresh process. It
2143 determines if a refresh is actually needed, and then calls
2144 the first version-specific refresh method in the chain."""
2146 assert self.transport
2148 if full_refresh:
2149 immediate = True
2151 for origin, opath in self.__gen_origin_paths():
2152 misc.makedirs(opath)
2153 cat = pkg.catalog.Catalog(meta_root=opath,
2154 read_only=True)
2155 if not cat.exists:
2156 # If a catalog hasn't been retrieved for
2157 # any of the origins, then a refresh is
2158 # needed now.
2159 immediate = True
2160 break
2162 # Ensure consistent directory structure.
2163 self.create_meta_root()
2165 # Check if we already have a v1 catalog on disk.
2166 if not full_refresh and self.catalog.exists:
2167 # If catalog is on disk, check if refresh is necessary.
2168 if not immediate and not self.needs_refresh:
2169 # No refresh needed.
2170 return False
2172 any_changed = False
2173 any_refreshed = False
2174 for origin, opath in self.__gen_origin_paths():
2175 changed, refreshed = self.__refresh_origin(opath,
2176 full_refresh, immediate, mismatched, origin,
2177 progtrack=progtrack,
2178 include_updates=include_updates)
2179 if changed:
2180 any_changed = True
2181 if refreshed:
2182 any_refreshed = True
2184 if any_refreshed:
2185 # Update refresh time.
2186 self.last_refreshed = dt.datetime.utcnow()
2188 # Finally, build a new catalog for this publisher based on a
2189 # composite of the catalogs from all origins.
2190 if self.__rebuild_catalog():
2191 any_changed = True
2193 return any_changed
2195 def refresh(self, full_refresh=False, immediate=False, progtrack=None,
2196 include_updates=False):
2197 """Refreshes the publisher's metadata, returning a boolean
2198 value indicating whether any updates to the publisher's
2199 metadata occurred.
2201 'full_refresh' is an optional boolean value indicating whether
2202 a full retrieval of publisher metadata (e.g. catalogs) or only
2203 an update to the existing metadata should be performed. When
2204 True, 'immediate' is also set to True.
2206 'immediate' is an optional boolean value indicating whether
2207 a refresh should occur now. If False, a publisher's selected
2208 repository will be checked for updates only if needs_refresh
2209 is True.
2211 'include_updates' is an optional boolean value indicating
2212 whether all catalog updates should be retrieved additionally to
2213 the catalog."""
2215 try:
2216 return self.__refresh(full_refresh, immediate,
2217 progtrack=progtrack,
2218 include_updates=include_updates)
2219 except (api_errors.BadCatalogUpdateIdentity,
2220 api_errors.DuplicateCatalogEntry,
2221 api_errors.ObsoleteCatalogUpdate,
2222 api_errors.UnknownUpdateType):
2223 if full_refresh:
2224 # Completely unexpected failure.
2225 # These exceptions should never
2226 # be raised for a full refresh
2227 # case anyway, so the error should
2228 # definitely be raised.
2229 raise
2231 # The incremental update likely failed for one or
2232 # more of the following reasons:
2234 # * The origin for the publisher has changed.
2236 # * The catalog that the publisher is offering
2237 # is now completely different (due to a restore
2238 # from backup or --rebuild possibly).
2240 # * The catalog that the publisher is offering
2241 # has been restored to an older version, and
2242 # packages that already exist in this client's
2243 # copy of the catalog have been re-addded.
2245 # * The type of incremental update operation that
2246 # that was performed on the catalog isn't supported
2247 # by this version of the client, so a full retrieval
2248 # is required.
2250 return self.__refresh(True, True, progtrack=progtrack)
2251 except api_errors.MismatchedCatalog:
2252 if full_refresh:
2253 # If this was a full refresh, don't bother
2254 # retrying as it implies that the content
2255 # retrieved wasn't cached.
2256 raise
2258 # Retrieval of the catalog attributes and/or parts was
2259 # successful, but the identity (digest or other
2260 # information) didn't match the catalog attributes.
2261 # This could be the result of a misbehaving or stale
2262 # cache.
2263 return self.__refresh(False, True, mismatched=True,
2264 progtrack=progtrack)
2265 except (api_errors.BadCatalogSignatures,
2266 api_errors.InvalidCatalogFile):
2267 # Assembly of the catalog failed, but this could be due
2268 # to a transient error. So, retry at least once more.
2269 return self.__refresh(True, True, progtrack=progtrack)
2270 except (api_errors.BadCatalogSignatures,
2271 api_errors.InvalidCatalogFile):
2272 # Assembly of the catalog failed, but this could be due
2273 # to a transient error. So, retry at least once more.
2274 return self.__refresh(True, True, progtrack=progtrack)
2276 def remove_meta_root(self):
2277 """Removes the publisher's meta_root."""
2279 if not self.meta_root:
2280 raise api_errors.BadPublisherMetaRoot(self.meta_root,
2281 operation="remove_meta_root")
2283 try:
2284 shutil.rmtree(self.meta_root)
2285 except EnvironmentError as e:
2286 if e.errno == errno.EACCES:
2287 raise api_errors.PermissionsException(
2288 e.filename)
2289 if e.errno == errno.EROFS:
2290 raise api_errors.ReadOnlyFileSystemException(
2291 e.filename)
2292 if e.errno not in (errno.ENOENT, errno.ESRCH):
2293 raise
2295 def reset_client_uuid(self):
2296 """Replaces the current client_uuid with a new UUID."""
2298 self.__client_uuid = str(uuid.uuid1())
2300 def validate_config(self, repo_uri=None):
2301 """Verify that the publisher's configuration (such as prefix)
2302 matches that provided by the repository. If the configuration
2303 does not match as expected, an UnknownRepositoryPublishers
2304 exception will be raised.
2306 'repo_uri' is an optional RepositoryURI object or URI string
2307 containing the location of the repository. If not provided,
2308 the publisher's repository will be used instead."""
2310 if repo_uri and not isinstance(repo_uri, RepositoryURI):
2311 repo = RepositoryURI(repo_uri)
2312 elif not repo_uri:
2313 # Transport actually allows both type of objects.
2314 repo = self
2315 else:
2316 repo = repo_uri
2318 pubs = None
2319 try:
2320 pubs = self.transport.get_publisherdata(repo)
2321 except (api_errors.TransportError,
2322 api_errors.UnsupportedRepositoryOperation):
2323 # Nothing more can be done (because the target origin
2324 # can't be contacted, or because it doesn't support
2325 # retrieval of publisher configuration data).
2326 return
2328 if not pubs:
2329 raise api_errors.RepoPubConfigUnavailable(
2330 location=repo_uri, pub=self)
2332 if self.prefix not in pubs:
2333 known = [p.prefix for p in pubs]
2334 if repo_uri:
2335 raise api_errors.UnknownRepositoryPublishers(
2336 known=known, unknown=[self.prefix],
2337 location=repo_uri)
2338 raise api_errors.UnknownRepositoryPublishers(
2339 known=known, unknown=[self.prefix],
2340 origins=self.repository.origins)
2342 def approve_ca_cert(self, cert):
2343 """Add the cert as a CA for manifest signing for this publisher.
2345 The 'cert' parameter is a string of the certificate to add.
2348 cert = self.__string_to_cert(cert)
2349 hsh = self.__add_cert(cert)
2350 # If the user had previously revoked this certificate, remove
2351 # the certificate from that list.
2352 if hsh in self.revoked_ca_certs:
2353 t = set(self.revoked_ca_certs)
2354 t.remove(hsh)
2355 self.revoked_ca_certs = list(t)
2356 self.approved_ca_certs.append(hsh)
2358 def revoke_ca_cert(self, s):
2359 """Record that the cert with hash 's' is no longer trusted
2360 as a CA. This method currently assumes it's only invoked as
2361 a result of user action."""
2363 self.revoked_ca_certs.append(s)
2364 self.revoked_ca_certs = list(set(
2365 self.revoked_ca_certs))
2366 if s in self.approved_ca_certs:
2367 t = set(self.approved_ca_certs)
2368 t.remove(s)
2369 self.approved_ca_certs = list(t)
2371 def unset_ca_cert(self, s):
2372 """If the cert with hash 's' has been added or removed by the
2373 user, undo the add or removal."""
2375 if s in self.approved_ca_certs:
2376 t = set(self.approved_ca_certs)
2377 t.remove(s)
2378 self.approved_ca_certs = list(t)
2379 if s in self.revoked_ca_certs:
2380 t = set(self.revoked_ca_certs)
2381 t.remove(s)
2382 self.revoked_ca_certs = list(t)
2384 @staticmethod
2385 def __hash_cert(c):
2386 # In order to interoperate with older images, we must use SHA-1
2387 # here.
2388 return hashlib.sha1(
2389 c.public_bytes(serialization.Encoding.PEM)).hexdigest()
2391 @staticmethod
2392 def __string_to_cert(s, pkg_hash=None):
2393 """Convert a string to a X509 cert."""
2395 try:
2396 return x509.load_pem_x509_certificate(
2397 misc.force_bytes(s), default_backend())
2398 except ValueError:
2399 if pkg_hash is not None:
2400 raise api_errors.BadFileFormat(_("The file "
2401 "with hash {0} was expected to be a PEM "
2402 "certificate but it could not be "
2403 "read.").format(pkg_hash))
2404 raise api_errors.BadFileFormat(_("The following string "
2405 "was expected to be a PEM certificate, but it "
2406 "could not be parsed as such:\n{0}".format(s)))
2408 def __add_cert(self, cert):
2409 """Add the pem representation of the certificate 'cert' to the
2410 certificates this publisher knows about."""
2412 self.create_meta_root()
2413 pkg_hash = self.__hash_cert(cert)
2414 pkg_hash_pth = os.path.join(self.cert_root, pkg_hash)
2415 file_problem = False
2416 try:
2417 with open(pkg_hash_pth, "wb") as fh:
2418 fh.write(cert.public_bytes(
2419 serialization.Encoding.PEM))
2420 except EnvironmentError as e:
2421 if e.errno == errno.EACCES:
2422 raise api_errors.PermissionsException(
2423 e.filename)
2424 file_problem = True
2426 # Note that while we store certs by their subject hashes,
2427 # we use our own hashing since cryptography has no interface
2428 # for the subject hash and other crypto frameworks have been
2429 # inconsistent with OpenSSL.
2430 subj_hsh = hashlib.sha1(misc.force_bytes(
2431 cert.subject)).hexdigest()
2432 c = 0
2433 made_link = False
2434 while not made_link:
2435 fn = os.path.join(self.__subj_root,
2436 "{0}.{1}".format(subj_hsh, c))
2437 if os.path.exists(fn):
2438 c += 1
2439 continue
2440 if not file_problem:
2441 try:
2442 portable.link(pkg_hash_pth, fn)
2443 made_link = True
2444 except EnvironmentError as e:
2445 pass
2446 if not made_link:
2447 self.__issuers.setdefault(subj_hsh, []).append(
2449 made_link = True
2450 return pkg_hash
2452 def get_cert_by_hash(self, pkg_hash, verify_hash=False,
2453 only_retrieve=False, hash_func=digest.DEFAULT_HASH_FUNC):
2454 """Given a pkg5 hash, retrieve the cert that's associated with
2457 The 'pkg_hash' parameter contains the file hash of the
2458 certificate to retrieve.
2460 The 'verify_hash' parameter determines the file that's read
2461 from disk matches the expected hash.
2463 The 'only_retrieve' parameter determines whether a X509 object
2464 is built from the certificate retrieved or if the certificate
2465 is only stored on disk. """
2467 assert not (verify_hash and only_retrieve)
2468 pth = os.path.join(self.cert_root, pkg_hash)
2469 pth_exists = os.path.exists(pth)
2470 if pth_exists and only_retrieve:
2471 return None
2472 if pth_exists:
2473 with open(pth, "rb") as fh:
2474 s = fh.read()
2475 else:
2476 s = self.transport.get_content(self, pkg_hash,
2477 hash_func=hash_func)
2478 c = self.__string_to_cert(s, pkg_hash)
2479 if not pth_exists:
2480 try:
2481 self.__add_cert(c)
2482 except api_errors.PermissionsException:
2483 pass
2484 if only_retrieve:
2485 return None
2487 if verify_hash:
2488 h = misc.get_data_digest(cStringIO.StringIO(s),
2489 length=len(s), hash_func=hash_func)[0]
2490 if h != pkg_hash:
2491 raise api_errors.ModifiedCertificateException(c,
2492 pth)
2493 return c
2495 def __rebuild_subj_root(self):
2496 """Rebuild subject hash metadata."""
2498 # clean up the old subject hash files to prevent
2499 # junk files residing in the directory
2500 try:
2501 shutil.rmtree(self.__subj_root)
2502 except EnvironmentError:
2503 # if unprivileged user, we can't add
2504 # certs to it
2505 pass
2506 else:
2507 for p in os.listdir(self.cert_root):
2508 path = os.path.join(self.cert_root, p)
2509 if not os.path.isfile(path):
2510 continue
2511 with open(path, "rb") as fh:
2512 s = fh.read()
2513 cert = self.__string_to_cert(s)
2514 self.__add_cert(cert)
2516 def __get_certs_by_name(self, name):
2517 """Given 'name', a Cryptograhy 'Name' object, return the certs
2518 with that name as a subject."""
2520 res = []
2521 count = 0
2522 name_hsh = hashlib.sha1(misc.force_bytes(name)).hexdigest()
2524 def load_cert(pth):
2525 with open(pth, "rb") as f:
2526 return x509.load_pem_x509_certificate(
2527 f.read(), default_backend())
2529 try:
2530 while True:
2531 pth = os.path.join(self.__subj_root,
2532 "{0}.{1}".format(name_hsh, count))
2533 res.append(load_cert(pth))
2534 count += 1
2535 except EnvironmentError as e:
2536 # When switching to a different hash algorithm, the hash
2537 # name of file changes so that we couldn't find the
2538 # file. We try harder to rebuild the subject's metadata
2539 # if it's the first time we fail (count == 0).
2540 if count == 0 and e.errno == errno.ENOENT:
2541 self.__rebuild_subj_root()
2542 try:
2543 res.append(load_cert(pth))
2544 except EnvironmentError as e:
2545 if e.errno != errno.ENOENT:
2546 raise
2548 t = api_errors._convert_error(e,
2549 [errno.ENOENT])
2550 if t:
2551 raise t
2552 res.extend(self.__issuers.get(name_hsh, []))
2553 return res
2555 def get_ca_certs(self):
2556 """Return a dictionary of the CA certificates for this
2557 publisher."""
2559 if self.ca_dict is not None:
2560 return self.ca_dict
2561 self.ca_dict = {}
2562 # CA certs approved for this publisher are stored by hash to
2563 # prevent the later substitution or confusion over what certs
2564 # have or have not been approved.
2565 for h in set(self.approved_ca_certs):
2566 c = self.get_cert_by_hash(h, verify_hash=True)
2567 s = hashlib.sha1(misc.force_bytes(
2568 c.subject)).hexdigest()
2569 self.ca_dict.setdefault(s, [])
2570 self.ca_dict[s].append(c)
2571 return self.ca_dict
2573 def update_props(self, set_props=EmptyI, add_prop_values=EmptyDict,
2574 remove_prop_values=EmptyDict, unset_props=EmptyI):
2575 """Update the properties set for this publisher with the ones
2576 provided as arguments. The order of application is that any
2577 existing properties are unset, then properties are set to their
2578 new values, then values are added to properties, and finally
2579 values are removed from properties."""
2581 # Delay validation so that any intermittent inconsistent state
2582 # doesn't cause problems.
2583 self.__delay_validation = True
2584 # Remove existing properties.
2585 for n in unset_props:
2586 self.properties.pop(n, None)
2587 # Add or reset new properties.
2588 self.properties.update(set_props)
2589 # Add new values to properties.
2590 for n in add_prop_values.keys():
2591 self.properties.setdefault(n, [])
2592 if not isinstance(self.properties[n], list):
2593 raise api_errors.InvalidPropertyValue(_(
2594 "Cannot add a value to a single valued "
2595 "property, The property name is '{name}' "
2596 "and the current value is '{value}'"
2597 ).format(name=n, value=self.properties[n]))
2598 self.properties[n].extend(add_prop_values[n])
2599 # Remove values from properties.
2600 for n in remove_prop_values.keys():
2601 if n not in self.properties:
2602 raise api_errors.InvalidPropertyValue(_(
2603 "Cannot remove a value from the property "
2604 "{name} because the property does not "
2605 "exist.").format(name=n))
2606 if not isinstance(self.properties[n], list):
2607 raise api_errors.InvalidPropertyValue(_(
2608 "Cannot remove a value from a single "
2609 "valued property, unset must be used. The "
2610 "property name is '{name}' and the "
2611 "current value is '{value}'").format(
2612 name=n, value=self.properties[n]))
2613 for v in remove_prop_values[n]:
2614 try:
2615 self.properties[n].remove(v)
2616 except ValueError:
2617 raise api_errors.InvalidPropertyValue(_(
2618 "Cannot remove the value {value} "
2619 "from the property {name} "
2620 "because the value is not in the "
2621 "property's list.").format(
2622 value=v, name=n))
2623 self.__delay_validation = False
2624 self.__validate_properties()
2626 def __validate_properties(self):
2627 """Check that the properties set for this publisher are
2628 consistent with each other."""
2630 if self.__properties.get(SIGNATURE_POLICY, "") == \
2631 "require-names":
2632 if not self.__properties.get("signature-required-names",
2633 None):
2634 raise api_errors.InvalidPropertyValue(_(
2635 "At least one name must be provided for "
2636 "the signature-required-names policy."))
2638 def __format_safe_read_crl(self, pth):
2639 """CRLs seem to frequently come in DER format, so try reading
2640 the CRL using both of the formats before giving up."""
2642 with open(pth, "rb") as f:
2643 raw = f.read()
2645 try:
2646 return x509.load_pem_x509_crl(raw, default_backend())
2647 except ValueError:
2648 try:
2649 return x509.load_der_x509_crl(raw,
2650 default_backend())
2651 except ValueError:
2652 raise api_errors.BadFileFormat(_("The CRL file "
2653 "{0} is not in a recognized "
2654 "format.").format(pth))
2656 def __get_crl(self, uri):
2657 """Given a URI (for now only http URIs are supported), return
2658 the CRL object created from the file stored at that uri."""
2660 uri = uri.strip()
2661 if uri.startswith("Full Name:"):
2662 uri = uri[len("Full Name:"):]
2663 uri = uri.strip()
2664 if uri.startswith("URI:"):
2665 uri = uri[4:]
2666 if not uri.startswith("http://") and \
2667 not uri.startswith("file://"):
2668 raise api_errors.InvalidResourceLocation(uri.strip())
2669 crl_host = DebugValues.get_value("crl_host")
2670 if crl_host:
2671 orig = urlparse.urlparse(uri)
2672 crl = urlparse.urlparse(crl_host)
2673 uri = urlparse.urlunparse(urlparse.ParseResult(
2674 scheme=crl.scheme, netloc=crl.netloc,
2675 path=orig.path,
2676 params=orig.params, query=orig.params,
2677 fragment=orig.fragment))
2678 # If we've already read the CRL, use the previously created
2679 # object.
2680 if uri in self.__tmp_crls:
2681 return self.__tmp_crls[uri]
2682 fn = urllib.quote(uri, "")
2683 assert os.path.isdir(self.__crl_root)
2684 fpath = os.path.join(self.__crl_root, fn)
2685 crl = None
2686 # Check if we already have a CRL for this URI.
2687 if os.path.exists(fpath):
2688 # If we already have a CRL that we can read, check
2689 # whether it's time to retrieve a new one from the
2690 # location.
2691 try:
2692 crl = self.__format_safe_read_crl(fpath)
2693 except EnvironmentError:
2694 pass
2695 else:
2696 nu = crl.next_update
2697 cur_time = dt.datetime.utcnow()
2699 if cur_time < nu:
2700 self.__tmp_crls[uri] = crl
2701 return crl
2702 # If the CRL is already known to be unavailable, don't try
2703 # connecting to it again.
2704 if uri in Publisher.__bad_crls:
2705 return crl
2706 # If no CRL already exists or it's time to try to get a new one,
2707 # try to retrieve it from the server.
2708 try:
2709 tmp_fd, tmp_pth = tempfile.mkstemp(dir=self.__crl_root)
2710 except EnvironmentError as e:
2711 if e.errno in (errno.EACCES, errno.EPERM):
2712 tmp_fd, tmp_pth = tempfile.mkstemp()
2713 else:
2714 raise apx._convert_error(e)
2715 with os.fdopen(tmp_fd, "wb") as fh:
2716 hdl = pycurl.Curl()
2717 hdl.setopt(pycurl.URL, uri)
2718 hdl.setopt(pycurl.WRITEDATA, fh)
2719 hdl.setopt(pycurl.FAILONERROR, 1)
2720 hdl.setopt(pycurl.CONNECTTIMEOUT,
2721 global_settings.PKG_CLIENT_CONNECT_TIMEOUT)
2722 try:
2723 hdl.perform()
2724 except pycurl.error:
2725 # If the CRL is unavailable, add it to the list
2726 # of bad crls.
2727 Publisher.__bad_crls.add(uri)
2728 # If we should treat failure to get a new CRL
2729 # as a failure, raise an exception here. If not,
2730 # if we should use an old CRL if it exists,
2731 # return that here. If none is available and
2732 # that means the cert should not be treated as
2733 # revoked, return None here.
2734 return crl
2735 try:
2736 ncrl = self.__format_safe_read_crl(tmp_pth)
2737 except api_errors.BadFileFormat:
2738 portable.remove(tmp_pth)
2739 return crl
2740 try:
2741 portable.rename(tmp_pth, fpath)
2742 # Because the file was made using mkstemp, we need to
2743 # chmod it to match the other files in var/pkg.
2744 os.chmod(fpath, PKG_RO_FILE_MODE)
2745 except EnvironmentError:
2746 self.__tmp_crls[uri] = ncrl
2747 try:
2748 portable.remove(tmp_pth)
2749 except EnvironmentError:
2750 pass
2751 return ncrl
2754 def __verify_x509_signature(self, c, key):
2755 """Verify the signature of a certificate or CRL 'c' against a
2756 provided public key 'key'."""
2758 verifier = key.verifier(
2759 c.signature, padding.PKCS1v15(),
2760 c.signature_hash_algorithm)
2762 if isinstance(c, x509.Certificate):
2763 data = c.tbs_certificate_bytes
2764 elif isinstance(c, x509.CertificateRevocationList):
2765 data = c.tbs_certlist_bytes
2766 else:
2767 raise AssertionError("Invalid x509 object for "
2768 "signature verification: {0}".format(type(c)))
2770 verifier.update(data)
2771 try:
2772 verifier.verify()
2773 return True
2774 except Exception:
2775 return False
2777 def __check_crl(self, cert, ca_dict, crl_uri):
2778 """Determines whether the certificate has been revoked by the
2779 CRL located at 'crl_uri'.
2781 The 'cert' parameter is the certificate to check for revocation.
2783 The 'ca_dict' is a dictionary which maps subject hashes to
2784 certs treated as trust anchors."""
2786 crl = self.__get_crl(crl_uri)
2788 # If we couldn't retrieve a CRL from the distribution point
2789 # and no CRL is cached on disk, assume the cert has not been
2790 # revoked. It's possible that this should be an image or
2791 # publisher setting in the future.
2792 if not crl:
2793 return True
2795 # A CRL has been found, now it needs to be validated like
2796 # a certificate is.
2797 verified_crl = False
2798 crl_issuer = crl.issuer
2799 tas = ca_dict.get(hashlib.sha1(misc.force_bytes(
2800 crl_issuer)).hexdigest(), [])
2801 for t in tas:
2802 try:
2803 if self.__verify_x509_signature(crl,
2804 t.public_key()):
2805 # If t isn't approved for signing crls,
2806 # the exception __check_extensions
2807 # raises will take the code to the
2808 # except below.
2809 self.__check_extensions(t,
2810 CRL_SIGNING_USE, 0)
2811 verified_crl = True
2812 except api_errors.SigningException:
2813 pass
2814 if not verified_crl:
2815 crl_cas = self.__get_certs_by_name(crl_issuer)
2816 for c in crl_cas:
2817 if self.__verify_x509_signature(crl,
2818 c.public_key()):
2819 try:
2820 self.verify_chain(c, ca_dict, 0,
2821 True,
2822 usages=CRL_SIGNING_USE)
2823 except api_errors.SigningException:
2824 pass
2825 else:
2826 verified_crl = True
2827 break
2828 if not verified_crl:
2829 return True
2831 # For a certificate to be revoked, its CRL must be validated
2832 # and revoked the certificate.
2834 assert crl.issuer == cert.issuer
2835 for rev in crl:
2836 if rev.serial_number != cert.serial:
2837 continue
2838 try:
2839 reason = rev.extensions.get_extension_for_oid(
2840 x509.OID_CRL_REASON).value
2841 except x509.ExtensionNotFound:
2842 reason = None
2843 raise api_errors.RevokedCertificate(cert, reason)
2845 def __check_crls(self, cert, ca_dict):
2846 """Determines whether the certificate has been revoked by one of
2847 its CRLs.
2849 The 'cert' parameter is the certificate to check for revocation.
2851 The 'ca_dict' is a dictionary which maps subject hashes to
2852 certs treated as trust anchors."""
2854 # If the certificate doesn't have a CRL location listed, treat
2855 # it as valid.
2857 # The CRLs to be retrieved are stored in the
2858 # CRLDistributionPoints extensions which is structured like
2859 # this:
2861 # CRLDitsributionPoints = [
2862 # CRLDistributionPoint = [
2863 # union {
2864 # full_name = [ GeneralName, ... ]
2865 # relative_name = [ GeneralName, ... ]
2866 # }, ... ]
2867 # , ... ]
2869 # Relative names are a feature in X509 certs which allow to
2870 # specify a location relative to another certificate. We are not
2871 # supporting this and I'm not sure anybody is using this for
2872 # CRLs.
2873 # Full names are absolute locations but can be in different
2874 # formats (refer to RFC5280) but in general only the URI type is
2875 # used for CRLs. So this is the only thing we support here.
2877 try:
2878 dps = cert.extensions.get_extension_for_oid(
2879 x509.oid.ExtensionOID.CRL_DISTRIBUTION_POINTS).value
2880 except x509.ExtensionNotFound:
2881 return
2883 for dp in dps:
2884 if not dp.full_name:
2885 # we don't support relative names
2886 continue
2887 for uri in dp.full_name:
2888 if not isinstance(uri,
2889 x509.UniformResourceIdentifier):
2890 # we only support URIs
2891 continue
2892 self.__check_crl(cert, ca_dict, str(uri.value))
2894 def __check_revocation(self, cert, ca_dict, use_crls):
2895 hsh = self.__hash_cert(cert)
2896 if hsh in self.revoked_ca_certs:
2897 raise api_errors.RevokedCertificate(cert,
2898 "User manually revoked certificate.")
2899 if use_crls:
2900 self.__check_crls(cert, ca_dict)
2902 def __check_extensions(self, cert, usages, cur_pathlen):
2903 """Check whether the critical extensions in this certificate
2904 are supported and allow the provided use(s)."""
2906 try:
2907 exts = cert.extensions
2908 except (ValueError, x509.UnsupportedExtension) as e:
2909 raise api_errors.InvalidCertificateExtensions(
2910 cert, e)
2912 def check_values(vs):
2913 for v in vs:
2914 if v in supported_vs:
2915 continue
2916 # If there is only one extension value, it must
2917 # be the problematic one. Otherwise, we also
2918 # output the first unsupported value as the
2919 # problematic value following extension value.
2920 if len(vs) < 2:
2921 raise api_errors.UnsupportedExtensionValue(
2922 cert, ext, ", ".join(vs))
2923 raise api_errors.UnsupportedExtensionValue(
2924 cert, ext, ", ".join(vs), v)
2926 for ext in exts:
2927 etype = type(ext.value)
2928 if etype in SUPPORTED_EXTENSION_VALUES:
2929 supported_vs = SUPPORTED_EXTENSION_VALUES[etype]
2930 keys = EXTENSIONS_VALUES[etype]
2931 if etype == x509.BasicConstraints:
2932 pathlen = ext.value.path_length
2933 if pathlen is not None and \
2934 cur_pathlen > pathlen:
2935 raise api_errors.PathlenTooShort(cert,
2936 cur_pathlen, pathlen)
2937 elif etype == x509.KeyUsage:
2938 keys = list(EXTENSIONS_VALUES[etype])
2939 if not getattr(ext.value,
2940 "key_agreement"):
2941 # Cryptography error:
2942 # encipher_only/decipher_only is
2943 # undefined unless key_agreement
2944 # is true
2945 keys.remove("encipher_only")
2946 keys.remove("decipher_only")
2947 vs = [
2949 for key in keys
2950 if getattr(ext.value, key)
2952 # Check whether the values for the extension are
2953 # recognized.
2954 check_values(vs)
2955 # For each use, check to see whether it's
2956 # permitted by the certificate's extension
2957 # values.
2958 if etype not in usages:
2959 continue
2960 for u in usages[etype]:
2961 if u not in vs:
2962 raise api_errors.InappropriateCertificateUse(
2963 cert, ext, u, ", ".join(vs))
2964 # If the extension name is unrecognized and critical,
2965 # then the chain cannot be verified.
2966 elif ext.critical:
2967 raise api_errors.UnsupportedCriticalExtension(
2968 cert, ext)
2970 def verify_chain(self, cert, ca_dict, cur_pathlen, use_crls,
2971 required_names=None, usages=None):
2972 """Validates the certificate against the given trust anchors.
2974 The 'cert' parameter is the certificate to validate.
2976 The 'ca_dict' parameter is a dictionary which maps subject
2977 hashes to certs treated as trust anchors.
2979 The 'cur_pathlen' parameter is an integer indicating how many
2980 certificates have been found between cert and the leaf cert.
2982 The 'use_crls' parameter is a boolean indicating whether
2983 certificates should be checked to see if they've been revoked.
2985 The 'required_names' parameter is a set of strings that must
2986 be seen as a CN in the chain of trust for the certificate."""
2988 if required_names is None:
2989 required_names = set()
2990 verified = False
2991 continue_loop = True
2992 certs_with_problems = []
2994 ca_dict = copy.copy(ca_dict)
2995 for k, v in self.get_ca_certs().iteritems():
2996 if k in ca_dict:
2997 ca_dict[k].extend(v)
2998 else:
2999 ca_dict[k] = v
3001 def merge_dicts(d1, d2):
3002 """Function for merging usage dictionaries."""
3003 res = copy.deepcopy(d1)
3004 for k in d2:
3005 if k in res:
3006 res[k].extend(d2[k])
3007 else:
3008 res[k] = d2[k]
3009 return res
3011 def discard_names(cert, required_names):
3012 for cert_cn in [
3013 str(c.value)
3014 for c
3015 in cert.subject.get_attributes_for_oid(
3016 x509.oid.NameOID.COMMON_NAME)
3018 required_names.discard(cert_cn)
3020 if not usages:
3021 usages = {}
3022 for u in POSSIBLE_USES:
3023 usages = merge_dicts(usages, u)
3025 # Check whether we can validate this certificate.
3026 self.__check_extensions(cert, usages, cur_pathlen)
3028 # Check whether this certificate has been revoked.
3029 self.__check_revocation(cert, ca_dict, use_crls)
3031 while continue_loop:
3032 # If this certificate's CN is in the set of required
3033 # names, remove it.
3034 discard_names(cert, required_names)
3036 # Find the certificate that issued this certificate.
3037 issuer = cert.issuer
3038 issuer_hash = hashlib.sha1(misc.force_bytes(
3039 issuer)).hexdigest()
3041 # See whether this certificate was issued by any of the
3042 # given trust anchors.
3043 for c in ca_dict.get(issuer_hash, []):
3044 if self.__verify_x509_signature(cert,
3045 c.public_key()):
3046 verified = True
3047 # Remove any required names found in the
3048 # trust anchor.
3049 discard_names(c, required_names)
3050 # If there are more names to check for
3051 # continue up the chain of trust to look
3052 # for them.
3053 if not required_names:
3054 continue_loop = False
3055 break
3057 # If the subject and issuer for this certificate are
3058 # identical and the certificate hasn't been verified
3059 # then this is an untrusted self-signed cert and should
3060 # be rejected.
3061 if hashlib.sha1(misc.force_bytes(
3062 cert.subject)).hexdigest() == issuer_hash:
3063 if not verified:
3064 raise \
3065 api_errors.UntrustedSelfSignedCert(
3066 cert)
3067 # This break should break the
3068 # while continue_loop loop.
3069 break
3071 # If the certificate hasn't been issued by a trust
3072 # anchor or more names need to be found, continue
3073 # looking up the chain of trust.
3074 if continue_loop:
3075 up_chain = False
3076 # Keep track of certs that would have verified
3077 # this certificate but had critical extensions
3078 # we can't handle yet for error reporting.
3079 certs_with_problems = []
3080 for c in self.__get_certs_by_name(issuer):
3081 # If the certificate is approved to
3082 # sign another certificate, verifies
3083 # the current certificate, and hasn't
3084 # been revoked, consider it as the
3085 # next link in the chain. check_ca
3086 # checks both the basicConstraints
3087 # extension and the keyUsage extension.
3088 if misc.check_ca(c) and \
3089 self.__verify_x509_signature(cert,
3090 c.public_key()):
3091 problem = False
3092 # Check whether this certificate
3093 # has a critical extension we
3094 # don't understand.
3095 try:
3096 self.__check_extensions(
3097 c, CERT_SIGNING_USE,
3098 cur_pathlen)
3099 self.__check_revocation(c,
3100 ca_dict, use_crls)
3101 except (api_errors.UnsupportedCriticalExtension, api_errors.RevokedCertificate) as e:
3102 certs_with_problems.append(e)
3103 problem = True
3104 # If this certificate has no
3105 # problems with it, it's the
3106 # next link in the chain so make
3107 # it the current certificate and
3108 # add one to cur_pathlen since
3109 # there's one more chain cert
3110 # between the code signing cert
3111 # and the root of the chain.
3112 if not problem:
3113 up_chain = True
3114 cert = c
3115 cur_pathlen += 1
3116 break
3117 # If there's not another link in the chain to be
3118 # found, stop the iteration.
3119 if not up_chain:
3120 continue_loop = False
3121 # If the certificate wasn't verified against a trust anchor,
3122 # raise an exception.
3123 if not verified:
3124 raise api_errors.BrokenChain(cert,
3125 certs_with_problems)
3127 alias = property(lambda self: self.__alias, __set_alias,
3128 doc="An alternative name for a publisher.")
3130 client_uuid = property(lambda self: self.__client_uuid,
3131 __set_client_uuid,
3132 doc="A Universally Unique Identifier (UUID) used to identify a "
3133 "client image to a publisher.")
3135 disabled = property(lambda self: self.__disabled, __set_disabled,
3136 doc="A boolean value indicating whether the publisher should be "
3137 "used for packaging operations.")
3139 last_refreshed = property(__get_last_refreshed, __set_last_refreshed,
3140 doc="A datetime object representing the time (in UTC) the "
3141 "publisher's selected repository was last refreshed for new "
3142 "metadata (such as catalog updates). 'None' if the publisher "
3143 "hasn't been refreshed yet or the time is not available.")
3145 meta_root = property(lambda self: self.__meta_root, __set_meta_root,
3146 doc="The absolute pathname of the directory where the publisher's "
3147 "metadata should be written to and read from.")
3149 prefix = property(lambda self: self.__prefix, __set_prefix,
3150 doc="The name of the publisher.")
3152 repository = property(lambda self: self.__repository,
3153 __set_repository,
3154 doc="A reference to the selected repository object.")
3156 sticky = property(lambda self: self.__sticky, __set_stickiness,
3157 doc="Whether or not installed packages from this publisher are"
3158 " always preferred to other publishers.")
3160 def __get_prop(self, name):
3161 """Accessor method for properties dictionary"""
3162 return self.__properties[name]
3164 @staticmethod
3165 def __read_list(list_str):
3166 """Take a list in string representation and convert it back
3167 to a Python list."""
3169 list_str = list_str.encode("utf-8")
3170 # Strip brackets and any whitespace
3171 list_str = list_str.strip("][ ")
3172 # Strip comma and any whitespeace
3173 lst = list_str.split(", ")
3174 # Strip empty whitespace, single, and double quotation marks
3175 lst = [ s.strip("' \"") for s in lst ]
3176 # Eliminate any empty strings
3177 lst = [ s for s in lst if s != '' ]
3179 return lst
3181 def __set_prop(self, name, values):
3182 """Accessor method to add a property"""
3183 if self.sys_pub:
3184 raise api_errors.ModifyingSyspubException(_("Cannot "
3185 "set a property for a system publisher. The "
3186 "property was:{0}").format(name))
3188 if name == SIGNATURE_POLICY:
3189 self.__sig_policy = None
3190 if isinstance(values, basestring):
3191 values = [values]
3192 policy_name = values[0]
3193 if policy_name not in sigpolicy.Policy.policies():
3194 raise api_errors.InvalidPropertyValue(_(
3195 "{val} is not a valid value for this "
3196 "property:{prop}").format(val=policy_name,
3197 prop=SIGNATURE_POLICY))
3198 if policy_name == "require-names":
3199 if self.__delay_validation:
3200 # If __delay_validation is set, then
3201 # it's possible that
3202 # signature-required-names was
3203 # set by a previous call to set_prop
3204 # file. If so, don't overwrite the
3205 # values that have already been read.
3206 self.__properties.setdefault(
3207 "signature-required-names", [])
3208 self.__properties[
3209 "signature-required-names"].extend(
3210 values[1:])
3211 else:
3212 self.__properties[
3213 "signature-required-names"] = \
3214 values[1:]
3215 self.__validate_properties()
3216 else:
3217 if len(values) > 1:
3218 raise api_errors.InvalidPropertyValue(_(
3219 "The {0} signature-policy takes no "
3220 "argument.").format(policy_name))
3221 self.__properties[SIGNATURE_POLICY] = policy_name
3222 return
3223 if name == "signature-required-names":
3224 if isinstance(values, basestring):
3225 values = self.__read_list(values)
3226 self.__properties[name] = values
3228 def __del_prop(self, name):
3229 """Accessor method for properties"""
3230 if self.sys_pub:
3231 raise api_errors.ModifyingSyspubException(_("Cannot "
3232 "unset a property for a system publisher. The "
3233 "property was:{0}").format(name))
3234 del self.__properties[name]
3236 def __prop_iter(self):
3237 return self.__properties.__iter__()
3239 def __prop_iteritems(self):
3240 """Support iteritems on properties"""
3241 return self.__properties.iteritems()
3243 def __prop_keys(self):
3244 """Support keys() on properties"""
3245 return self.__properties.keys()
3247 def __prop_values(self):
3248 """Support values() on properties"""
3249 return self.__properties.values()
3251 def __prop_getdefault(self, name, value):
3252 """Support getdefault() on properties"""
3253 return self.__properties.get(name, value)
3255 def __prop_setdefault(self, name, value):
3256 """Support setdefault() on properties"""
3257 # Must set it this way so that the logic in __set_prop is used.
3258 try:
3259 return self.__properties[name]
3260 except KeyError:
3261 self.properties[name] = value
3262 return value
3264 def __prop_update(self, d):
3265 """Support update() on properties"""
3267 for k, v in d.iteritems():
3268 # Must iterate through each value and
3269 # set it this way so that the logic
3270 # in __set_prop is used.
3271 self.properties[k] = v
3273 def __prop_pop(self, d, default):
3274 """Support pop() on properties"""
3275 if self.sys_pub:
3276 raise api_errors.ModifyingSyspubException(_("Cannot "
3277 "unset a property for a system publisher."))
3278 return self.__properties.pop(d, default)
3280 properties = DictProperty(__get_prop, __set_prop, __del_prop,
3281 __prop_iteritems, __prop_keys, __prop_values, __prop_iter,
3282 doc="A dict holding the properties for an image.",
3283 fgetdefault=__prop_getdefault, fsetdefault=__prop_setdefault,
3284 update=__prop_update, pop=__prop_pop)
3286 @property
3287 def signature_policy(self):
3288 """Return the signature policy for the publisher."""
3290 if self.__sig_policy is not None:
3291 return self.__sig_policy
3292 txt = self.properties.get(SIGNATURE_POLICY,
3293 sigpolicy.DEFAULT_POLICY)
3294 names = self.properties.get("signature-required-names", [])
3295 self.__sig_policy = sigpolicy.Policy.policy_factory(txt, names)
3296 return self.__sig_policy