Add google appengine to repo
[frozenviper.git] / google_appengine / google / appengine / tools / appcfg.py
blobd80462ecede91e63076474a86456b44cedb94432
1 #!/usr/bin/env python
3 # Copyright 2007 Google Inc.
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
9 # http://www.apache.org/licenses/LICENSE-2.0
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
18 """Tool for deploying apps to an app server.
20 Currently, the application only uploads new appversions. To do this, it first
21 walks the directory tree rooted at the path the user specifies, adding all the
22 files it finds to a list. It then uploads the application configuration
23 (app.yaml) to the server using HTTP, followed by uploading each of the files.
24 It then commits the transaction with another request.
26 The bulk of this work is handled by the AppVersionUpload class, which exposes
27 methods to add to the list of files, fetch a list of modified files, upload
28 files, and commit or rollback the transaction.
29 """
32 import calendar
33 import datetime
34 import getpass
35 import logging
36 import mimetypes
37 import optparse
38 import os
39 import random
40 import re
41 import sha
42 import sys
43 import tempfile
44 import time
45 import urllib
46 import urllib2
48 import google
49 import yaml
50 from google.appengine.cron import groctimespecification
51 from google.appengine.api import appinfo
52 from google.appengine.api import croninfo
53 from google.appengine.api import dosinfo
54 from google.appengine.api import queueinfo
55 from google.appengine.api import validation
56 from google.appengine.api import yaml_errors
57 from google.appengine.api import yaml_object
58 from google.appengine.datastore import datastore_index
59 from google.appengine.tools import appengine_rpc
60 from google.appengine.tools import bulkloader
63 MAX_FILES_TO_CLONE = 100
64 LIST_DELIMITER = '\n'
65 TUPLE_DELIMITER = '|'
67 VERSION_FILE = '../VERSION'
69 UPDATE_CHECK_TIMEOUT = 3
71 NAG_FILE = '.appcfg_nag'
73 MAX_LOG_LEVEL = 4
75 MAX_BATCH_SIZE = 1000000
76 MAX_BATCH_COUNT = 100
77 MAX_BATCH_FILE_SIZE = 200000
78 BATCH_OVERHEAD = 500
80 verbosity = 1
83 appinfo.AppInfoExternal.ATTRIBUTES[appinfo.RUNTIME] = 'python'
84 _api_versions = os.environ.get('GOOGLE_TEST_API_VERSIONS', '1')
85 _options = validation.Options(*_api_versions.split(','))
86 appinfo.AppInfoExternal.ATTRIBUTES[appinfo.API_VERSION] = _options
87 del _api_versions, _options
90 def StatusUpdate(msg):
91 """Print a status message to stderr.
93 If 'verbosity' is greater than 0, print the message.
95 Args:
96 msg: The string to print.
97 """
98 if verbosity > 0:
99 print >>sys.stderr, msg
102 def GetMimeTypeIfStaticFile(config, filename):
103 """Looks up the mime type for 'filename'.
105 Uses the handlers in 'config' to determine if the file should
106 be treated as a static file.
108 Args:
109 config: The app.yaml object to check the filename against.
110 filename: The name of the file.
112 Returns:
113 The mime type string. For example, 'text/plain' or 'image/gif'.
114 None if this is not a static file.
116 for handler in config.handlers:
117 handler_type = handler.GetHandlerType()
118 if handler_type in ('static_dir', 'static_files'):
119 if handler_type == 'static_dir':
120 regex = os.path.join(re.escape(handler.GetHandler()), '.*')
121 else:
122 regex = handler.upload
123 if re.match(regex, filename):
124 if handler.mime_type is not None:
125 return handler.mime_type
126 else:
127 guess = mimetypes.guess_type(filename)[0]
128 if guess is None:
129 default = 'application/octet-stream'
130 print >>sys.stderr, ('Could not guess mimetype for %s. Using %s.'
131 % (filename, default))
132 return default
133 return guess
134 return None
137 def LookupErrorBlob(config, filename):
138 """Looks up the mime type and error_code for 'filename'.
140 Uses the error handlers in 'config' to determine if the file should
141 be treated as an error blob.
143 Args:
144 config: The app.yaml object to check the filename against.
145 filename: The name of the file.
147 Returns:
149 A tuple of (mime_type, error_code), or (None, None) if this is not an error
150 blob. For example, ('text/plain', default) or ('image/gif', timeout) or
151 (None, None).
153 if not config.error_handlers:
154 return (None, None)
155 for error_handler in config.error_handlers:
156 if error_handler.file == filename:
157 error_code = error_handler.error_code
158 if not error_code:
159 error_code = 'default'
160 if error_handler.mime_type is not None:
161 return (error_handler.mime_type, error_code)
162 else:
163 guess = mimetypes.guess_type(filename)[0]
164 if guess is None:
165 default = 'application/octet-stream'
166 print >>sys.stderr, ('Could not guess mimetype for %s. Using %s.'
167 % (filename, default))
168 return (default, error_code)
169 return (guess, error_code)
170 return (None, None)
173 def BuildClonePostBody(file_tuples):
174 """Build the post body for the /api/clone{files,blobs,errorblobs} urls.
176 Args:
177 file_tuples: A list of tuples. Each tuple should contain the entries
178 appropriate for the endpoint in question.
180 Returns:
181 A string containing the properly delimited tuples.
183 file_list = []
184 for tup in file_tuples:
185 path = tup[0]
186 tup = tup[1:]
187 file_list.append(TUPLE_DELIMITER.join([path] + list(tup)))
188 return LIST_DELIMITER.join(file_list)
191 class NagFile(validation.Validated):
192 """A validated YAML class to represent the user's nag preferences.
194 Attributes:
195 timestamp: The timestamp of the last nag.
196 opt_in: True if the user wants to check for updates on dev_appserver
197 start. False if not. May be None if we have not asked the user yet.
200 ATTRIBUTES = {
201 'timestamp': validation.TYPE_FLOAT,
202 'opt_in': validation.Optional(validation.TYPE_BOOL),
205 @staticmethod
206 def Load(nag_file):
207 """Load a single NagFile object where one and only one is expected.
209 Args:
210 nag_file: A file-like object or string containing the yaml data to parse.
212 Returns:
213 A NagFile instance.
215 return yaml_object.BuildSingleObject(NagFile, nag_file)
218 def GetVersionObject(isfile=os.path.isfile, open_fn=open):
219 """Gets the version of the SDK by parsing the VERSION file.
221 Args:
222 isfile: used for testing.
223 open_fn: Used for testing.
225 Returns:
226 A Yaml object or None if the VERSION file does not exist.
228 version_filename = os.path.join(os.path.dirname(google.__file__),
229 VERSION_FILE)
230 if not isfile(version_filename):
231 logging.error('Could not find version file at %s', version_filename)
232 return None
234 version_fh = open_fn(version_filename, 'r')
235 try:
236 version = yaml.safe_load(version_fh)
237 finally:
238 version_fh.close()
240 return version
243 def RetryWithBackoff(initial_delay, backoff_factor, max_delay, max_tries,
244 callable_func):
245 """Calls a function multiple times, backing off more and more each time.
247 Args:
248 initial_delay: Initial delay after first try, in seconds.
249 backoff_factor: Delay will be multiplied by this factor after each try.
250 max_delay: Max delay factor.
251 max_tries: Maximum number of tries.
252 callable_func: The method to call, will pass no arguments.
254 Returns:
255 True if the function succeded in one of its tries.
257 Raises:
258 Whatever the function raises--an exception will immediately stop retries.
260 delay = initial_delay
261 if callable_func():
262 return True
263 while max_tries > 1:
264 StatusUpdate('Will check again in %s seconds.' % delay)
265 time.sleep(delay)
266 delay *= backoff_factor
267 if max_delay and delay > max_delay:
268 delay = max_delay
269 max_tries -= 1
270 if callable_func():
271 return True
272 return False
275 def _VersionList(release):
276 """Parse a version string into a list of ints.
278 Args:
279 release: The 'release' version, e.g. '1.2.4'.
280 (Due to YAML parsing this may also be an int or float.)
282 Returns:
283 A list of ints corresponding to the parts of the version string
284 between periods. Example:
285 '1.2.4' -> [1, 2, 4]
286 '1.2.3.4' -> [1, 2, 3, 4]
288 Raises:
289 ValueError if not all the parts are valid integers.
291 return [int(part) for part in str(release).split('.')]
294 class UpdateCheck(object):
295 """Determines if the local SDK is the latest version.
297 Nags the user when there are updates to the SDK. As the SDK becomes
298 more out of date, the language in the nagging gets stronger. We
299 store a little yaml file in the user's home directory so that we nag
300 the user only once a week.
302 The yaml file has the following field:
303 'timestamp': Last time we nagged the user in seconds since the epoch.
305 Attributes:
306 server: An AbstractRpcServer instance used to check for the latest SDK.
307 config: The app's AppInfoExternal. Needed to determine which api_version
308 the app is using.
311 def __init__(self,
312 server,
313 config,
314 isdir=os.path.isdir,
315 isfile=os.path.isfile,
316 open_fn=open):
317 """Create a new UpdateCheck.
319 Args:
320 server: The AbstractRpcServer to use.
321 config: The yaml object that specifies the configuration of this
322 application.
323 isdir: Replacement for os.path.isdir (for testing).
324 isfile: Replacement for os.path.isfile (for testing).
325 open_fn: Replacement for the open builtin (for testing).
327 self.server = server
328 self.config = config
329 self.isdir = isdir
330 self.isfile = isfile
331 self.open = open_fn
333 @staticmethod
334 def MakeNagFilename():
335 """Returns the filename for the nag file for this user."""
336 user_homedir = os.path.expanduser('~/')
337 if not os.path.isdir(user_homedir):
338 drive, unused_tail = os.path.splitdrive(os.__file__)
339 if drive:
340 os.environ['HOMEDRIVE'] = drive
342 return os.path.expanduser('~/' + NAG_FILE)
344 def _ParseVersionFile(self):
345 """Parse the local VERSION file.
347 Returns:
348 A Yaml object or None if the file does not exist.
350 return GetVersionObject(isfile=self.isfile, open_fn=self.open)
352 def CheckSupportedVersion(self):
353 """Determines if the app's api_version is supported by the SDK.
355 Uses the api_version field from the AppInfoExternal to determine if
356 the SDK supports that api_version.
358 Raises:
359 SystemExit if the api_version is not supported.
361 version = self._ParseVersionFile()
362 if version is None:
363 logging.error('Could not determine if the SDK supports the api_version '
364 'requested in app.yaml.')
365 return
366 if self.config.api_version not in version['api_versions']:
367 logging.critical('The api_version specified in app.yaml (%s) is not '
368 'supported by this release of the SDK. The supported '
369 'api_versions are %s.',
370 self.config.api_version, version['api_versions'])
371 sys.exit(1)
373 def CheckForUpdates(self):
374 """Queries the server for updates and nags the user if appropriate.
376 Queries the server for the latest SDK version at the same time reporting
377 the local SDK version. The server will respond with a yaml document
378 containing the fields:
379 'release': The name of the release (e.g. 1.2).
380 'timestamp': The time the release was created (YYYY-MM-DD HH:MM AM/PM TZ).
381 'api_versions': A list of api_version strings (e.g. ['1', 'beta']).
383 We will nag the user with increasing severity if:
384 - There is a new release.
385 - There is a new release with a new api_version.
386 - There is a new release that does not support the api_version named in
387 self.config.
389 version = self._ParseVersionFile()
390 if version is None:
391 logging.info('Skipping update check')
392 return
393 logging.info('Checking for updates to the SDK.')
395 try:
396 response = self.server.Send('/api/updatecheck',
397 timeout=UPDATE_CHECK_TIMEOUT,
398 release=version['release'],
399 timestamp=version['timestamp'],
400 api_versions=version['api_versions'])
401 except urllib2.URLError, e:
402 logging.info('Update check failed: %s', e)
403 return
405 latest = yaml.safe_load(response)
406 if version['release'] == latest['release']:
407 logging.info('The SDK is up to date.')
408 return
410 try:
411 this_release = _VersionList(version['release'])
412 except ValueError:
413 logging.warn('Could not parse this release version (%r)',
414 version['release'])
415 else:
416 try:
417 advertised_release = _VersionList(latest['release'])
418 except ValueError:
419 logging.warn('Could not parse advertised release version (%r)',
420 latest['release'])
421 else:
422 if this_release > advertised_release:
423 logging.info('This SDK release is newer than the advertised release.')
424 return
426 api_versions = latest['api_versions']
427 if self.config.api_version not in api_versions:
428 self._Nag(
429 'The api version you are using (%s) is obsolete! You should\n'
430 'upgrade your SDK and test that your code works with the new\n'
431 'api version.' % self.config.api_version,
432 latest, version, force=True)
433 return
435 if self.config.api_version != api_versions[len(api_versions) - 1]:
436 self._Nag(
437 'The api version you are using (%s) is deprecated. You should\n'
438 'upgrade your SDK to try the new functionality.' %
439 self.config.api_version, latest, version)
440 return
442 self._Nag('There is a new release of the SDK available.',
443 latest, version)
445 def _ParseNagFile(self):
446 """Parses the nag file.
448 Returns:
449 A NagFile if the file was present else None.
451 nag_filename = UpdateCheck.MakeNagFilename()
452 if self.isfile(nag_filename):
453 fh = self.open(nag_filename, 'r')
454 try:
455 nag = NagFile.Load(fh)
456 finally:
457 fh.close()
458 return nag
459 return None
461 def _WriteNagFile(self, nag):
462 """Writes the NagFile to the user's nag file.
464 If the destination path does not exist, this method will log an error
465 and fail silently.
467 Args:
468 nag: The NagFile to write.
470 nagfilename = UpdateCheck.MakeNagFilename()
471 try:
472 fh = self.open(nagfilename, 'w')
473 try:
474 fh.write(nag.ToYAML())
475 finally:
476 fh.close()
477 except (OSError, IOError), e:
478 logging.error('Could not write nag file to %s. Error: %s', nagfilename, e)
480 def _Nag(self, msg, latest, version, force=False):
481 """Prints a nag message and updates the nag file's timestamp.
483 Because we don't want to nag the user everytime, we store a simple
484 yaml document in the user's home directory. If the timestamp in this
485 doc is over a week old, we'll nag the user. And when we nag the user,
486 we update the timestamp in this doc.
488 Args:
489 msg: The formatted message to print to the user.
490 latest: The yaml document received from the server.
491 version: The local yaml version document.
492 force: If True, always nag the user, ignoring the nag file.
494 nag = self._ParseNagFile()
495 if nag and not force:
496 last_nag = datetime.datetime.fromtimestamp(nag.timestamp)
497 if datetime.datetime.now() - last_nag < datetime.timedelta(weeks=1):
498 logging.debug('Skipping nag message')
499 return
501 if nag is None:
502 nag = NagFile()
503 nag.timestamp = time.time()
504 self._WriteNagFile(nag)
506 print '****************************************************************'
507 print msg
508 print '-----------'
509 print 'Latest SDK:'
510 print yaml.dump(latest)
511 print '-----------'
512 print 'Your SDK:'
513 print yaml.dump(version)
514 print '-----------'
515 print 'Please visit http://code.google.com/appengine for the latest SDK'
516 print '****************************************************************'
518 def AllowedToCheckForUpdates(self, input_fn=raw_input):
519 """Determines if the user wants to check for updates.
521 On startup, the dev_appserver wants to check for updates to the SDK.
522 Because this action reports usage to Google when the user is not
523 otherwise communicating with Google (e.g. pushing a new app version),
524 the user must opt in.
526 If the user does not have a nag file, we will query the user and
527 save the response in the nag file. Subsequent calls to this function
528 will re-use that response.
530 Args:
531 input_fn: used to collect user input. This is for testing only.
533 Returns:
534 True if the user wants to check for updates. False otherwise.
536 nag = self._ParseNagFile()
537 if nag is None:
538 nag = NagFile()
539 nag.timestamp = time.time()
541 if nag.opt_in is None:
542 answer = input_fn('Allow dev_appserver to check for updates on startup? '
543 '(Y/n): ')
544 answer = answer.strip().lower()
545 if answer == 'n' or answer == 'no':
546 print ('dev_appserver will not check for updates on startup. To '
547 'change this setting, edit %s' % UpdateCheck.MakeNagFilename())
548 nag.opt_in = False
549 else:
550 print ('dev_appserver will check for updates on startup. To change '
551 'this setting, edit %s' % UpdateCheck.MakeNagFilename())
552 nag.opt_in = True
553 self._WriteNagFile(nag)
554 return nag.opt_in
557 class IndexDefinitionUpload(object):
558 """Provides facilities to upload index definitions to the hosting service."""
560 def __init__(self, server, config, definitions):
561 """Creates a new DatastoreIndexUpload.
563 Args:
564 server: The RPC server to use. Should be an instance of HttpRpcServer
565 or TestRpcServer.
566 config: The AppInfoExternal object derived from the app.yaml file.
567 definitions: An IndexDefinitions object.
569 self.server = server
570 self.config = config
571 self.definitions = definitions
573 def DoUpload(self):
574 """Uploads the index definitions."""
575 StatusUpdate('Uploading index definitions.')
576 self.server.Send('/api/datastore/index/add',
577 app_id=self.config.application,
578 version=self.config.version,
579 payload=self.definitions.ToYAML())
582 class CronEntryUpload(object):
583 """Provides facilities to upload cron entries to the hosting service."""
585 def __init__(self, server, config, cron):
586 """Creates a new CronEntryUpload.
588 Args:
589 server: The RPC server to use. Should be an instance of a subclass of
590 AbstractRpcServer
591 config: The AppInfoExternal object derived from the app.yaml file.
592 cron: The CronInfoExternal object loaded from the cron.yaml file.
594 self.server = server
595 self.config = config
596 self.cron = cron
598 def DoUpload(self):
599 """Uploads the cron entries."""
600 StatusUpdate('Uploading cron entries.')
601 self.server.Send('/api/cron/update',
602 app_id=self.config.application,
603 version=self.config.version,
604 payload=self.cron.ToYAML())
607 class QueueEntryUpload(object):
608 """Provides facilities to upload task queue entries to the hosting service."""
610 def __init__(self, server, config, queue):
611 """Creates a new QueueEntryUpload.
613 Args:
614 server: The RPC server to use. Should be an instance of a subclass of
615 AbstractRpcServer
616 config: The AppInfoExternal object derived from the app.yaml file.
617 queue: The QueueInfoExternal object loaded from the queue.yaml file.
619 self.server = server
620 self.config = config
621 self.queue = queue
623 def DoUpload(self):
624 """Uploads the task queue entries."""
625 StatusUpdate('Uploading task queue entries.')
626 self.server.Send('/api/queue/update',
627 app_id=self.config.application,
628 version=self.config.version,
629 payload=self.queue.ToYAML())
632 class DosEntryUpload(object):
633 """Provides facilities to upload dos entries to the hosting service."""
635 def __init__(self, server, config, dos):
636 """Creates a new DosEntryUpload.
638 Args:
639 server: The RPC server to use. Should be an instance of a subclass of
640 AbstractRpcServer.
641 config: The AppInfoExternal object derived from the app.yaml file.
642 dos: The DosInfoExternal object loaded from the dos.yaml file.
644 self.server = server
645 self.config = config
646 self.dos = dos
648 def DoUpload(self):
649 """Uploads the dos entries."""
650 StatusUpdate('Uploading DOS entries.')
651 self.server.Send('/api/dos/update',
652 app_id=self.config.application,
653 version=self.config.version,
654 payload=self.dos.ToYAML())
657 class DefaultVersionSet(object):
658 """Provides facilities to set the default (serving) version."""
660 def __init__(self, server, config):
661 """Creates a new DefaultVersionSet.
663 Args:
664 server: The RPC server to use. Should be an instance of a subclass of
665 AbstractRpcServer.
666 config: The AppInfoExternal object derived from the app.yaml file.
668 self.server = server
669 self.config = config
671 def SetVersion(self):
672 """Sets the default version."""
673 StatusUpdate('Setting default version to %s.' % (self.config.version,))
674 self.server.Send('/api/appversion/setdefault',
675 app_id=self.config.application,
676 version=self.config.version)
679 class IndexOperation(object):
680 """Provide facilities for writing Index operation commands."""
682 def __init__(self, server, config):
683 """Creates a new IndexOperation.
685 Args:
686 server: The RPC server to use. Should be an instance of HttpRpcServer
687 or TestRpcServer.
688 config: appinfo.AppInfoExternal configuration object.
690 self.server = server
691 self.config = config
693 def DoDiff(self, definitions):
694 """Retrieve diff file from the server.
696 Args:
697 definitions: datastore_index.IndexDefinitions as loaded from users
698 index.yaml file.
700 Returns:
701 A pair of datastore_index.IndexDefinitions objects. The first record
702 is the set of indexes that are present in the index.yaml file but missing
703 from the server. The second record is the set of indexes that are
704 present on the server but missing from the index.yaml file (indicating
705 that these indexes should probably be vacuumed).
707 StatusUpdate('Fetching index definitions diff.')
708 response = self.server.Send('/api/datastore/index/diff',
709 app_id=self.config.application,
710 payload=definitions.ToYAML())
711 return datastore_index.ParseMultipleIndexDefinitions(response)
713 def DoDelete(self, definitions):
714 """Delete indexes from the server.
716 Args:
717 definitions: Index definitions to delete from datastore.
719 Returns:
720 A single datstore_index.IndexDefinitions containing indexes that were
721 not deleted, probably because they were already removed. This may
722 be normal behavior as there is a potential race condition between fetching
723 the index-diff and sending deletion confirmation through.
725 StatusUpdate('Deleting selected index definitions.')
726 response = self.server.Send('/api/datastore/index/delete',
727 app_id=self.config.application,
728 payload=definitions.ToYAML())
729 return datastore_index.ParseIndexDefinitions(response)
732 class VacuumIndexesOperation(IndexOperation):
733 """Provide facilities to request the deletion of datastore indexes."""
735 def __init__(self, server, config, force,
736 confirmation_fn=raw_input):
737 """Creates a new VacuumIndexesOperation.
739 Args:
740 server: The RPC server to use. Should be an instance of HttpRpcServer
741 or TestRpcServer.
742 config: appinfo.AppInfoExternal configuration object.
743 force: True to force deletion of indexes, else False.
744 confirmation_fn: Function used for getting input form user.
746 super(VacuumIndexesOperation, self).__init__(server, config)
747 self.force = force
748 self.confirmation_fn = confirmation_fn
750 def GetConfirmation(self, index):
751 """Get confirmation from user to delete an index.
753 This method will enter an input loop until the user provides a
754 response it is expecting. Valid input is one of three responses:
756 y: Confirm deletion of index.
757 n: Do not delete index.
758 a: Delete all indexes without asking for further confirmation.
760 If the user enters nothing at all, the default action is to skip
761 that index and do not delete.
763 If the user selects 'a', as a side effect, the 'force' flag is set.
765 Args:
766 index: Index to confirm.
768 Returns:
769 True if user enters 'y' or 'a'. False if user enter 'n'.
771 while True:
772 print 'This index is no longer defined in your index.yaml file.'
773 print
774 print index.ToYAML()
775 print
777 confirmation = self.confirmation_fn(
778 'Are you sure you want to delete this index? (N/y/a): ')
779 confirmation = confirmation.strip().lower()
781 if confirmation == 'y':
782 return True
783 elif confirmation == 'n' or not confirmation:
784 return False
785 elif confirmation == 'a':
786 self.force = True
787 return True
788 else:
789 print 'Did not understand your response.'
791 def DoVacuum(self, definitions):
792 """Vacuum indexes in datastore.
794 This method will query the server to determine which indexes are not
795 being used according to the user's local index.yaml file. Once it has
796 made this determination, it confirms with the user which unused indexes
797 should be deleted. Once confirmation for each index is receives, it
798 deletes those indexes.
800 Because another user may in theory delete the same indexes at the same
801 time as the user, there is a potential race condition. In this rare cases,
802 some of the indexes previously confirmed for deletion will not be found.
803 The user is notified which indexes these were.
805 Args:
806 definitions: datastore_index.IndexDefinitions as loaded from users
807 index.yaml file.
809 unused_new_indexes, notused_indexes = self.DoDiff(definitions)
811 deletions = datastore_index.IndexDefinitions(indexes=[])
812 if notused_indexes.indexes is not None:
813 for index in notused_indexes.indexes:
814 if self.force or self.GetConfirmation(index):
815 deletions.indexes.append(index)
817 if deletions.indexes:
818 not_deleted = self.DoDelete(deletions)
820 if not_deleted.indexes:
821 not_deleted_count = len(not_deleted.indexes)
822 if not_deleted_count == 1:
823 warning_message = ('An index was not deleted. Most likely this is '
824 'because it no longer exists.\n\n')
825 else:
826 warning_message = ('%d indexes were not deleted. Most likely this '
827 'is because they no longer exist.\n\n'
828 % not_deleted_count)
829 for index in not_deleted.indexes:
830 warning_message += index.ToYAML()
831 logging.warning(warning_message)
834 class LogsRequester(object):
835 """Provide facilities to export request logs."""
837 def __init__(self, server, config, output_file,
838 num_days, append, severity, end, vhost, include_vhost,
839 include_all=None, time_func=time.time):
840 """Constructor.
842 Args:
843 server: The RPC server to use. Should be an instance of HttpRpcServer
844 or TestRpcServer.
845 config: appinfo.AppInfoExternal configuration object.
846 output_file: Output file name.
847 num_days: Number of days worth of logs to export; 0 for all available.
848 append: True if appending to an existing file.
849 severity: App log severity to request (0-4); None for no app logs.
850 end: date object representing last day of logs to return.
851 vhost: The virtual host of log messages to get. None for all hosts.
852 include_vhost: If true, the virtual host is included in log messages.
853 include_all: If true, we add to the log message everything we know
854 about the request.
855 time_func: Method that return a timestamp representing now (for testing).
857 self.server = server
858 self.config = config
859 self.output_file = output_file
860 self.append = append
861 self.num_days = num_days
862 self.severity = severity
863 self.vhost = vhost
864 self.include_vhost = include_vhost
865 self.include_all = include_all
866 self.version_id = self.config.version + '.1'
867 self.sentinel = None
868 self.write_mode = 'w'
869 if self.append:
870 self.sentinel = FindSentinel(self.output_file)
871 self.write_mode = 'a'
873 self.skip_until = False
874 now = PacificDate(time_func())
875 if end < now:
876 self.skip_until = end
877 else:
878 end = now
880 self.valid_dates = None
881 if self.num_days:
882 start = end - datetime.timedelta(self.num_days - 1)
883 self.valid_dates = (start, end)
885 def DownloadLogs(self):
886 """Download the requested logs.
888 This will write the logs to the file designated by
889 self.output_file, or to stdout if the filename is '-'.
890 Multiple roundtrips to the server may be made.
892 StatusUpdate('Downloading request logs for %s %s.' %
893 (self.config.application, self.version_id))
894 tf = tempfile.TemporaryFile()
895 last_offset = None
896 try:
897 while True:
898 try:
899 new_offset = self.RequestLogLines(tf, last_offset)
900 if not new_offset or new_offset == last_offset:
901 break
902 last_offset = new_offset
903 except KeyboardInterrupt:
904 StatusUpdate('Keyboard interrupt; saving data downloaded so far.')
905 break
906 StatusUpdate('Copying request logs to %r.' % self.output_file)
907 if self.output_file == '-':
908 of = sys.stdout
909 else:
910 try:
911 of = open(self.output_file, self.write_mode)
912 except IOError, err:
913 StatusUpdate('Can\'t write %r: %s.' % (self.output_file, err))
914 sys.exit(1)
915 try:
916 line_count = CopyReversedLines(tf, of)
917 finally:
918 of.flush()
919 if of is not sys.stdout:
920 of.close()
921 finally:
922 tf.close()
923 StatusUpdate('Copied %d records.' % line_count)
925 def RequestLogLines(self, tf, offset):
926 """Make a single roundtrip to the server.
928 Args:
929 tf: Writable binary stream to which the log lines returned by
930 the server are written, stripped of headers, and excluding
931 lines skipped due to self.sentinel or self.valid_dates filtering.
932 offset: Offset string for a continued request; None for the first.
934 Returns:
935 The offset string to be used for the next request, if another
936 request should be issued; or None, if not.
938 logging.info('Request with offset %r.', offset)
939 kwds = {'app_id': self.config.application,
940 'version': self.version_id,
941 'limit': 1000,
943 if offset:
944 kwds['offset'] = offset
945 if self.severity is not None:
946 kwds['severity'] = str(self.severity)
947 if self.vhost is not None:
948 kwds['vhost'] = str(self.vhost)
949 if self.include_vhost is not None:
950 kwds['include_vhost'] = str(self.include_vhost)
951 if self.include_all is not None:
952 kwds['include_all'] = str(self.include_all)
953 response = self.server.Send('/api/request_logs', payload=None, **kwds)
954 response = response.replace('\r', '\0')
955 lines = response.splitlines()
956 logging.info('Received %d bytes, %d records.', len(response), len(lines))
957 offset = None
958 if lines and lines[0].startswith('#'):
959 match = re.match(r'^#\s*next_offset=(\S+)\s*$', lines[0])
960 del lines[0]
961 if match:
962 offset = match.group(1)
963 if lines and lines[-1].startswith('#'):
964 del lines[-1]
965 valid_dates = self.valid_dates
966 sentinel = self.sentinel
967 skip_until = self.skip_until
968 len_sentinel = None
969 if sentinel:
970 len_sentinel = len(sentinel)
971 for line in lines:
972 if (sentinel and
973 line.startswith(sentinel) and
974 line[len_sentinel : len_sentinel+1] in ('', '\0')):
975 return None
977 linedate = DateOfLogLine(line)
978 if not linedate:
979 continue
981 if skip_until:
982 if linedate > skip_until:
983 continue
984 else:
985 self.skip_until = skip_until = False
987 if valid_dates and not valid_dates[0] <= linedate <= valid_dates[1]:
988 return None
989 tf.write(line + '\n')
990 if not lines:
991 return None
992 return offset
995 def DateOfLogLine(line):
996 """Returns a date object representing the log line's timestamp.
998 Args:
999 line: a log line string.
1000 Returns:
1001 A date object representing the timestamp or None if parsing fails.
1003 m = re.compile(r'[^[]+\[(\d+/[A-Za-z]+/\d+):[^\d]*').match(line)
1004 if not m:
1005 return None
1006 try:
1007 return datetime.date(*time.strptime(m.group(1), '%d/%b/%Y')[:3])
1008 except ValueError:
1009 return None
1012 def PacificDate(now):
1013 """For a UTC timestamp, return the date in the US/Pacific timezone.
1015 Args:
1016 now: A posix timestamp giving current UTC time.
1018 Returns:
1019 A date object representing what day it is in the US/Pacific timezone.
1021 return datetime.date(*time.gmtime(PacificTime(now))[:3])
1024 def PacificTime(now):
1025 """Helper to return the number of seconds between UTC and Pacific time.
1027 This is needed to compute today's date in Pacific time (more
1028 specifically: Mountain View local time), which is how request logs
1029 are reported. (Google servers always report times in Mountain View
1030 local time, regardless of where they are physically located.)
1032 This takes (post-2006) US DST into account. Pacific time is either
1033 8 hours or 7 hours west of UTC, depending on whether DST is in
1034 effect. Since 2007, US DST starts on the Second Sunday in March
1035 March, and ends on the first Sunday in November. (Reference:
1036 http://aa.usno.navy.mil/faq/docs/daylight_time.php.)
1038 Note that the server doesn't report its local time (the HTTP Date
1039 header uses UTC), and the client's local time is irrelevant.
1041 Args:
1042 now: A posix timestamp giving current UTC time.
1044 Returns:
1045 A pseudo-posix timestamp giving current Pacific time. Passing
1046 this through time.gmtime() will produce a tuple in Pacific local
1047 time.
1049 now -= 8*3600
1050 if IsPacificDST(now):
1051 now += 3600
1052 return now
1055 def IsPacificDST(now):
1056 """Helper for PacificTime to decide whether now is Pacific DST (PDT).
1058 Args:
1059 now: A pseudo-posix timestamp giving current time in PST.
1061 Returns:
1062 True if now falls within the range of DST, False otherwise.
1064 DAY = 24*3600
1065 SUNDAY = 6
1066 pst = time.gmtime(now)
1067 year = pst[0]
1068 assert year >= 2007
1069 begin = calendar.timegm((year, 3, 8, 2, 0, 0, 0, 0, 0))
1070 while time.gmtime(begin).tm_wday != SUNDAY:
1071 begin += DAY
1072 end = calendar.timegm((year, 11, 1, 2, 0, 0, 0, 0, 0))
1073 while time.gmtime(end).tm_wday != SUNDAY:
1074 end += DAY
1075 return begin <= now < end
1078 def CopyReversedLines(instream, outstream, blocksize=2**16):
1079 r"""Copy lines from input stream to output stream in reverse order.
1081 As a special feature, null bytes in the input are turned into
1082 newlines followed by tabs in the output, but these 'sub-lines'
1083 separated by null bytes are not reversed. E.g. If the input is
1084 'A\0B\nC\0D\n', the output is 'C\n\tD\nA\n\tB\n'.
1086 Args:
1087 instream: A seekable stream open for reading in binary mode.
1088 outstream: A stream open for writing; doesn't have to be seekable or binary.
1089 blocksize: Optional block size for buffering, for unit testing.
1091 Returns:
1092 The number of lines copied.
1094 line_count = 0
1095 instream.seek(0, 2)
1096 last_block = instream.tell() // blocksize
1097 spillover = ''
1098 for iblock in xrange(last_block + 1, -1, -1):
1099 instream.seek(iblock * blocksize)
1100 data = instream.read(blocksize)
1101 lines = data.splitlines(True)
1102 lines[-1:] = ''.join(lines[-1:] + [spillover]).splitlines(True)
1103 if lines and not lines[-1].endswith('\n'):
1104 lines[-1] += '\n'
1105 lines.reverse()
1106 if lines and iblock > 0:
1107 spillover = lines.pop()
1108 if lines:
1109 line_count += len(lines)
1110 data = ''.join(lines).replace('\0', '\n\t')
1111 outstream.write(data)
1112 return line_count
1115 def FindSentinel(filename, blocksize=2**16):
1116 """Return the sentinel line from the output file.
1118 Args:
1119 filename: The filename of the output file. (We'll read this file.)
1120 blocksize: Optional block size for buffering, for unit testing.
1122 Returns:
1123 The contents of the last line in the file that doesn't start with
1124 a tab, with its trailing newline stripped; or None if the file
1125 couldn't be opened or no such line could be found by inspecting
1126 the last 'blocksize' bytes of the file.
1128 if filename == '-':
1129 StatusUpdate('Can\'t combine --append with output to stdout.')
1130 sys.exit(2)
1131 try:
1132 fp = open(filename, 'rb')
1133 except IOError, err:
1134 StatusUpdate('Append mode disabled: can\'t read %r: %s.' % (filename, err))
1135 return None
1136 try:
1137 fp.seek(0, 2)
1138 fp.seek(max(0, fp.tell() - blocksize))
1139 lines = fp.readlines()
1140 del lines[:1]
1141 sentinel = None
1142 for line in lines:
1143 if not line.startswith('\t'):
1144 sentinel = line
1145 if not sentinel:
1146 StatusUpdate('Append mode disabled: can\'t find sentinel in %r.' %
1147 filename)
1148 return None
1149 return sentinel.rstrip('\n')
1150 finally:
1151 fp.close()
1154 class UploadBatcher(object):
1155 """Helper to batch file uploads."""
1157 def __init__(self, what, app_id, version, server):
1158 """Constructor.
1160 Args:
1161 what: Either 'file' or 'blob' or 'errorblob' indicating what kind of
1162 objects this batcher uploads. Used in messages and URLs.
1163 app_id: The application ID.
1164 version: The application version string.
1165 server: The RPC server.
1167 assert what in ('file', 'blob', 'errorblob'), repr(what)
1168 self.what = what
1169 self.app_id = app_id
1170 self.version = version
1171 self.server = server
1172 self.single_url = '/api/appversion/add' + what
1173 self.batch_url = self.single_url + 's'
1174 self.batching = True
1175 self.batch = []
1176 self.batch_size = 0
1178 def SendBatch(self):
1179 """Send the current batch on its way.
1181 If successful, resets self.batch and self.batch_size.
1183 Raises:
1184 HTTPError with code=404 if the server doesn't support batching.
1186 boundary = 'boundary'
1187 parts = []
1188 for path, payload, mime_type in self.batch:
1189 while boundary in payload:
1190 boundary += '%04x' % random.randint(0, 0xffff)
1191 assert len(boundary) < 80, 'Unexpected error, please try again.'
1192 part = '\n'.join(['',
1193 'X-Appcfg-File: %s' % urllib.quote(path),
1194 'X-Appcfg-Hash: %s' % _Hash(payload),
1195 'Content-Type: %s' % mime_type,
1196 'Content-Length: %d' % len(payload),
1197 'Content-Transfer-Encoding: 8bit',
1199 payload,
1201 parts.append(part)
1202 parts.insert(0,
1203 'MIME-Version: 1.0\n'
1204 'Content-Type: multipart/mixed; boundary="%s"\n'
1205 '\n'
1206 'This is a message with multiple parts in MIME format.' %
1207 boundary)
1208 parts.append('--\n')
1209 delimiter = '\n--%s' % boundary
1210 payload = delimiter.join(parts)
1211 logging.info('Uploading batch of %d %ss to %s with boundary="%s".',
1212 len(self.batch), self.what, self.batch_url, boundary)
1213 self.server.Send(self.batch_url,
1214 payload=payload,
1215 content_type='message/rfc822',
1216 app_id=self.app_id,
1217 version=self.version)
1218 self.batch = []
1219 self.batch_size = 0
1221 def SendSingleFile(self, path, payload, mime_type):
1222 """Send a single file on its way."""
1223 logging.info('Uploading %s %s (%s bytes, type=%s) to %s.',
1224 self.what, path, len(payload), mime_type, self.single_url)
1225 self.server.Send(self.single_url,
1226 payload=payload,
1227 content_type=mime_type,
1228 path=path,
1229 app_id=self.app_id,
1230 version=self.version)
1232 def Flush(self):
1233 """Flush the current batch.
1235 This first attempts to send the batch as a single request; if that
1236 fails because the server doesn't support batching, the files are
1237 sent one by one, and self.batching is reset to False.
1239 At the end, self.batch and self.batch_size are reset.
1241 if not self.batch:
1242 return
1243 try:
1244 self.SendBatch()
1245 except urllib2.HTTPError, err:
1246 if err.code != 404:
1247 raise
1249 logging.info('Old server detected; turning off %s batching.', self.what)
1250 self.batching = False
1252 for path, payload, mime_type in self.batch:
1253 self.SendSingleFile(path, payload, mime_type)
1255 self.batch = []
1256 self.batch_size = 0
1258 def AddToBatch(self, path, payload, mime_type):
1259 """Batch a file, possibly flushing first, or perhaps upload it directly.
1261 Args:
1262 path: The name of the file.
1263 payload: The contents of the file.
1264 mime_type: The MIME Content-type of the file, or None.
1266 If mime_type is None, application/octet-stream is substituted.
1268 if not mime_type:
1269 mime_type = 'application/octet-stream'
1270 size = len(payload)
1271 if size <= MAX_BATCH_FILE_SIZE:
1272 if (len(self.batch) >= MAX_BATCH_COUNT or
1273 self.batch_size + size > MAX_BATCH_SIZE):
1274 self.Flush()
1275 if self.batching:
1276 logging.info('Adding %s %s (%s bytes, type=%s) to batch.',
1277 self.what, path, size, mime_type)
1278 self.batch.append((path, payload, mime_type))
1279 self.batch_size += size + BATCH_OVERHEAD
1280 return
1281 self.SendSingleFile(path, payload, mime_type)
1284 def _Hash(content):
1285 """Compute the hash of the content.
1287 Args:
1288 content: The data to hash as a string.
1290 Returns:
1291 The string representation of the hash.
1293 h = sha.new(content).hexdigest()
1294 return '%s_%s_%s_%s_%s' % (h[0:8], h[8:16], h[16:24], h[24:32], h[32:40])
1297 class AppVersionUpload(object):
1298 """Provides facilities to upload a new appversion to the hosting service.
1300 Attributes:
1301 server: The AbstractRpcServer to use for the upload.
1302 config: The AppInfoExternal object derived from the app.yaml file.
1303 app_id: The application string from 'config'.
1304 version: The version string from 'config'.
1305 files: A dictionary of files to upload to the server, mapping path to
1306 hash of the file contents.
1307 in_transaction: True iff a transaction with the server has started.
1308 An AppVersionUpload can do only one transaction at a time.
1309 deployed: True iff the Deploy method has been called.
1312 def __init__(self, server, config):
1313 """Creates a new AppVersionUpload.
1315 Args:
1316 server: The RPC server to use. Should be an instance of HttpRpcServer or
1317 TestRpcServer.
1318 config: An AppInfoExternal object that specifies the configuration for
1319 this application.
1321 self.server = server
1322 self.config = config
1323 self.app_id = self.config.application
1324 self.version = self.config.version
1326 self.files = {}
1328 self.in_transaction = False
1329 self.deployed = False
1330 self.batching = True
1331 self.file_batcher = UploadBatcher('file', self.app_id, self.version,
1332 self.server)
1333 self.blob_batcher = UploadBatcher('blob', self.app_id, self.version,
1334 self.server)
1335 self.errorblob_batcher = UploadBatcher('errorblob', self.app_id,
1336 self.version, self.server)
1338 def AddFile(self, path, file_handle):
1339 """Adds the provided file to the list to be pushed to the server.
1341 Args:
1342 path: The path the file should be uploaded as.
1343 file_handle: A stream containing data to upload.
1345 assert not self.in_transaction, 'Already in a transaction.'
1346 assert file_handle is not None
1348 reason = appinfo.ValidFilename(path)
1349 if reason:
1350 logging.error(reason)
1351 return
1353 pos = file_handle.tell()
1354 content_hash = _Hash(file_handle.read())
1355 file_handle.seek(pos, 0)
1357 self.files[path] = content_hash
1359 def Begin(self):
1360 """Begins the transaction, returning a list of files that need uploading.
1362 All calls to AddFile must be made before calling Begin().
1364 Returns:
1365 A list of pathnames for files that should be uploaded using UploadFile()
1366 before Commit() can be called.
1368 assert not self.in_transaction, 'Already in a transaction.'
1370 StatusUpdate('Initiating update.')
1371 self.server.Send('/api/appversion/create', app_id=self.app_id,
1372 version=self.version, payload=self.config.ToYAML())
1373 self.in_transaction = True
1375 files_to_clone = []
1376 blobs_to_clone = []
1377 errorblobs = {}
1378 for path, content_hash in self.files.iteritems():
1379 match_found = False
1381 mime_type = GetMimeTypeIfStaticFile(self.config, path)
1382 if mime_type is not None:
1383 blobs_to_clone.append((path, content_hash, mime_type))
1384 match_found = True
1386 (mime_type, error_code) = LookupErrorBlob(self.config, path)
1387 if mime_type is not None:
1388 errorblobs[path] = content_hash
1389 match_found = True
1391 if not match_found:
1392 files_to_clone.append((path, content_hash))
1394 files_to_upload = {}
1396 def CloneFiles(url, files, file_type):
1397 """Sends files to the given url.
1399 Args:
1400 url: the server URL to use.
1401 files: a list of files
1402 file_type: the type of the files
1404 if not files:
1405 return
1407 StatusUpdate('Cloning %d %s file%s.' %
1408 (len(files), file_type, len(files) != 1 and 's' or ''))
1409 for i in xrange(0, len(files), MAX_FILES_TO_CLONE):
1410 if i > 0 and i % MAX_FILES_TO_CLONE == 0:
1411 StatusUpdate('Cloned %d files.' % i)
1413 chunk = files[i:min(len(files), i + MAX_FILES_TO_CLONE)]
1414 result = self.server.Send(url,
1415 app_id=self.app_id, version=self.version,
1416 payload=BuildClonePostBody(chunk))
1417 if result:
1418 files_to_upload.update(dict(
1419 (f, self.files[f]) for f in result.split(LIST_DELIMITER)))
1421 CloneFiles('/api/appversion/cloneblobs', blobs_to_clone, 'static')
1422 CloneFiles('/api/appversion/clonefiles', files_to_clone, 'application')
1424 logging.debug('Files to upload: %s', files_to_upload)
1426 for (path, content_hash) in errorblobs.iteritems():
1427 files_to_upload[path] = content_hash
1428 self.files = files_to_upload
1429 return sorted(files_to_upload.iterkeys())
1431 def UploadFile(self, path, file_handle):
1432 """Uploads a file to the hosting service.
1434 Must only be called after Begin().
1435 The path provided must be one of those that were returned by Begin().
1437 Args:
1438 path: The path the file is being uploaded as.
1439 file_handle: A file-like object containing the data to upload.
1441 Raises:
1442 KeyError: The provided file is not amongst those to be uploaded.
1444 assert self.in_transaction, 'Begin() must be called before UploadFile().'
1445 if path not in self.files:
1446 raise KeyError('File \'%s\' is not in the list of files to be uploaded.'
1447 % path)
1449 del self.files[path]
1451 match_found = False
1452 mime_type = GetMimeTypeIfStaticFile(self.config, path)
1453 payload = file_handle.read()
1454 if mime_type is not None:
1455 self.blob_batcher.AddToBatch(path, payload, mime_type)
1456 match_found = True
1458 (mime_type, error_code) = LookupErrorBlob(self.config, path)
1459 if mime_type is not None:
1460 self.errorblob_batcher.AddToBatch(error_code, payload, mime_type)
1461 match_found = True
1463 if not match_found:
1464 self.file_batcher.AddToBatch(path, payload, None)
1467 def Precompile(self):
1468 """Handle bytecode precompilation."""
1469 StatusUpdate('Precompilation starting.')
1470 files = []
1471 while True:
1472 if files:
1473 StatusUpdate('Precompilation: %d files left.' % len(files))
1474 files = self.PrecompileBatch(files)
1475 if not files:
1476 break
1477 StatusUpdate('Precompilation completed.')
1479 def PrecompileBatch(self, files):
1480 """Precompile a batch of files.
1482 Args:
1483 files: Either an empty list (for the initial request) or a list
1484 of files to be precompiled.
1486 Returns:
1487 Either an empty list (if no more files need to be precompiled)
1488 or a list of files to be precompiled subsequently.
1490 payload = LIST_DELIMITER.join(files)
1491 response = self.server.Send('/api/appversion/precompile',
1492 app_id=self.app_id,
1493 version=self.version,
1494 payload=payload)
1495 if not response:
1496 return []
1497 return response.split(LIST_DELIMITER)
1499 def Commit(self):
1500 """Commits the transaction, making the new app version available.
1502 All the files returned by Begin() must have been uploaded with UploadFile()
1503 before Commit() can be called.
1505 This tries the new 'deploy' method; if that fails it uses the old 'commit'.
1507 Raises:
1508 Exception: Some required files were not uploaded.
1510 assert self.in_transaction, 'Begin() must be called before Commit().'
1511 if self.files:
1512 raise Exception('Not all required files have been uploaded.')
1514 try:
1515 self.Deploy()
1516 if not RetryWithBackoff(1, 2, 60, 20, self.IsReady):
1517 logging.warning('Version still not ready to serve, aborting.')
1518 raise Exception('Version not ready.')
1519 self.StartServing()
1520 except urllib2.HTTPError, e:
1521 if e.code != 404:
1522 raise
1523 StatusUpdate('Closing update.')
1524 self.server.Send('/api/appversion/commit', app_id=self.app_id,
1525 version=self.version)
1526 self.in_transaction = False
1528 def Deploy(self):
1529 """Deploys the new app version but does not make it default.
1531 All the files returned by Begin() must have been uploaded with UploadFile()
1532 before Deploy() can be called.
1534 Raises:
1535 Exception: Some required files were not uploaded.
1537 assert self.in_transaction, 'Begin() must be called before Deploy().'
1538 if self.files:
1539 raise Exception('Not all required files have been uploaded.')
1541 StatusUpdate('Deploying new version.')
1542 self.server.Send('/api/appversion/deploy', app_id=self.app_id,
1543 version=self.version)
1544 self.deployed = True
1546 def IsReady(self):
1547 """Check if the new app version is ready to serve traffic.
1549 Raises:
1550 Exception: Deploy has not yet been called.
1552 Returns:
1553 True if the server returned the app is ready to serve.
1555 assert self.deployed, 'Deploy() must be called before IsReady().'
1557 StatusUpdate('Checking if new version is ready to serve.')
1558 result = self.server.Send('/api/appversion/isready', app_id=self.app_id,
1559 version=self.version)
1560 return result == '1'
1562 def StartServing(self):
1563 """Start serving with the newly created version.
1565 Raises:
1566 Exception: Deploy has not yet been called.
1568 assert self.deployed, 'Deploy() must be called before IsReady().'
1570 StatusUpdate('Closing update: new version is ready to start serving.')
1571 self.server.Send('/api/appversion/startserving',
1572 app_id=self.app_id, version=self.version)
1573 self.in_transaction = False
1575 def Rollback(self):
1576 """Rolls back the transaction if one is in progress."""
1577 if not self.in_transaction:
1578 return
1579 StatusUpdate('Rolling back the update.')
1580 self.server.Send('/api/appversion/rollback', app_id=self.app_id,
1581 version=self.version)
1582 self.in_transaction = False
1583 self.files = {}
1585 def DoUpload(self, paths, max_size, openfunc):
1586 """Uploads a new appversion with the given config and files to the server.
1588 Args:
1589 paths: An iterator that yields the relative paths of the files to upload.
1590 max_size: The maximum size file to upload.
1591 openfunc: A function that takes a path and returns a file-like object.
1593 logging.info('Reading app configuration.')
1595 path = ''
1596 try:
1597 StatusUpdate('Scanning files on local disk.')
1598 num_files = 0
1599 for path in paths:
1600 file_handle = openfunc(path)
1601 try:
1602 file_length = GetFileLength(file_handle)
1603 if file_length > max_size:
1604 logging.error('Ignoring file \'%s\': Too long '
1605 '(max %d bytes, file is %d bytes)',
1606 path, max_size, file_length)
1607 else:
1608 logging.info('Processing file \'%s\'', path)
1609 self.AddFile(path, file_handle)
1610 finally:
1611 file_handle.close()
1612 num_files += 1
1613 if num_files % 500 == 0:
1614 StatusUpdate('Scanned %d files.' % num_files)
1615 except KeyboardInterrupt:
1616 logging.info('User interrupted. Aborting.')
1617 raise
1618 except EnvironmentError, e:
1619 logging.error('An error occurred processing file \'%s\': %s. Aborting.',
1620 path, e)
1621 raise
1623 try:
1624 missing_files = self.Begin()
1625 if missing_files:
1626 StatusUpdate('Uploading %d files and blobs.' % len(missing_files))
1627 num_files = 0
1628 for missing_file in missing_files:
1629 file_handle = openfunc(missing_file)
1630 try:
1631 self.UploadFile(missing_file, file_handle)
1632 finally:
1633 file_handle.close()
1634 num_files += 1
1635 if num_files % 500 == 0:
1636 StatusUpdate('Processed %d out of %s.' %
1637 (num_files, len(missing_files)))
1638 self.file_batcher.Flush()
1639 self.blob_batcher.Flush()
1640 self.errorblob_batcher.Flush()
1641 StatusUpdate('Uploaded %d files and blobs' % num_files)
1643 if (self.config.derived_file_type and
1644 appinfo.PYTHON_PRECOMPILED in self.config.derived_file_type):
1645 self.Precompile()
1647 self.Commit()
1649 except KeyboardInterrupt:
1650 logging.info('User interrupted. Aborting.')
1651 self.Rollback()
1652 raise
1653 except urllib2.HTTPError, err:
1654 logging.info('HTTP Error (%s)', err)
1655 self.Rollback()
1656 raise
1657 except:
1658 logging.exception('An unexpected error occurred. Aborting.')
1659 self.Rollback()
1660 raise
1662 logging.info('Done!')
1665 def FileIterator(base, skip_files, separator=os.path.sep):
1666 """Walks a directory tree, returning all the files. Follows symlinks.
1668 Args:
1669 base: The base path to search for files under.
1670 skip_files: A regular expression object for files/directories to skip.
1671 separator: Path separator used by the running system's platform.
1673 Yields:
1674 Paths of files found, relative to base.
1676 dirs = ['']
1677 while dirs:
1678 current_dir = dirs.pop()
1679 for entry in os.listdir(os.path.join(base, current_dir)):
1680 name = os.path.join(current_dir, entry)
1681 fullname = os.path.join(base, name)
1682 if separator == '\\':
1683 name = name.replace('\\', '/')
1684 if os.path.isfile(fullname):
1685 if skip_files.match(name):
1686 logging.info('Ignoring file \'%s\': File matches ignore regex.', name)
1687 else:
1688 yield name
1689 elif os.path.isdir(fullname):
1690 if skip_files.match(name):
1691 logging.info(
1692 'Ignoring directory \'%s\': Directory matches ignore regex.',
1693 name)
1694 else:
1695 dirs.append(name)
1698 def GetFileLength(fh):
1699 """Returns the length of the file represented by fh.
1701 This function is capable of finding the length of any seekable stream,
1702 unlike os.fstat, which only works on file streams.
1704 Args:
1705 fh: The stream to get the length of.
1707 Returns:
1708 The length of the stream.
1710 pos = fh.tell()
1711 fh.seek(0, 2)
1712 length = fh.tell()
1713 fh.seek(pos, 0)
1714 return length
1717 def GetUserAgent(get_version=GetVersionObject,
1718 get_platform=appengine_rpc.GetPlatformToken):
1719 """Determines the value of the 'User-agent' header to use for HTTP requests.
1721 If the 'APPCFG_SDK_NAME' environment variable is present, that will be
1722 used as the first product token in the user-agent.
1724 Args:
1725 get_version: Used for testing.
1726 get_platform: Used for testing.
1728 Returns:
1729 String containing the 'user-agent' header value, which includes the SDK
1730 version, the platform information, and the version of Python;
1731 e.g., 'appcfg_py/1.0.1 Darwin/9.2.0 Python/2.5.2'.
1733 product_tokens = []
1735 sdk_name = os.environ.get('APPCFG_SDK_NAME')
1736 if sdk_name:
1737 product_tokens.append(sdk_name)
1738 else:
1739 version = get_version()
1740 if version is None:
1741 release = 'unknown'
1742 else:
1743 release = version['release']
1745 product_tokens.append('appcfg_py/%s' % release)
1747 product_tokens.append(get_platform())
1749 python_version = '.'.join(str(i) for i in sys.version_info)
1750 product_tokens.append('Python/%s' % python_version)
1752 return ' '.join(product_tokens)
1755 def GetSourceName(get_version=GetVersionObject):
1756 """Gets the name of this source version."""
1757 version = get_version()
1758 if version is None:
1759 release = 'unknown'
1760 else:
1761 release = version['release']
1762 return 'Google-appcfg-%s' % (release,)
1765 class AppCfgApp(object):
1766 """Singleton class to wrap AppCfg tool functionality.
1768 This class is responsible for parsing the command line and executing
1769 the desired action on behalf of the user. Processing files and
1770 communicating with the server is handled by other classes.
1772 Attributes:
1773 actions: A dictionary mapping action names to Action objects.
1774 action: The Action specified on the command line.
1775 parser: An instance of optparse.OptionParser.
1776 options: The command line options parsed by 'parser'.
1777 argv: The original command line as a list.
1778 args: The positional command line args left over after parsing the options.
1779 raw_input_fn: Function used for getting raw user input, like email.
1780 password_input_fn: Function used for getting user password.
1781 error_fh: Unexpected HTTPErrors are printed to this file handle.
1783 Attributes for testing:
1784 parser_class: The class to use for parsing the command line. Because
1785 OptionsParser will exit the program when there is a parse failure, it
1786 is nice to subclass OptionsParser and catch the error before exiting.
1789 def __init__(self, argv, parser_class=optparse.OptionParser,
1790 rpc_server_class=appengine_rpc.HttpRpcServer,
1791 raw_input_fn=raw_input,
1792 password_input_fn=getpass.getpass,
1793 error_fh=sys.stderr,
1794 update_check_class=UpdateCheck):
1795 """Initializer. Parses the cmdline and selects the Action to use.
1797 Initializes all of the attributes described in the class docstring.
1798 Prints help or error messages if there is an error parsing the cmdline.
1800 Args:
1801 argv: The list of arguments passed to this program.
1802 parser_class: Options parser to use for this application.
1803 rpc_server_class: RPC server class to use for this application.
1804 raw_input_fn: Function used for getting user email.
1805 password_input_fn: Function used for getting user password.
1806 error_fh: Unexpected HTTPErrors are printed to this file handle.
1807 update_check_class: UpdateCheck class (can be replaced for testing).
1809 self.parser_class = parser_class
1810 self.argv = argv
1811 self.rpc_server_class = rpc_server_class
1812 self.raw_input_fn = raw_input_fn
1813 self.password_input_fn = password_input_fn
1814 self.error_fh = error_fh
1815 self.update_check_class = update_check_class
1817 self.parser = self._GetOptionParser()
1818 for action in self.actions.itervalues():
1819 action.options(self, self.parser)
1821 self.options, self.args = self.parser.parse_args(argv[1:])
1823 if len(self.args) < 1:
1824 self._PrintHelpAndExit()
1825 if self.args[0] not in self.actions:
1826 self.parser.error('Unknown action \'%s\'\n%s' %
1827 (self.args[0], self.parser.get_description()))
1828 action_name = self.args.pop(0)
1829 self.action = self.actions[action_name]
1831 self.parser, self.options = self._MakeSpecificParser(self.action)
1833 if self.options.help:
1834 self._PrintHelpAndExit()
1836 if self.options.verbose == 2:
1837 logging.getLogger().setLevel(logging.INFO)
1838 elif self.options.verbose == 3:
1839 logging.getLogger().setLevel(logging.DEBUG)
1841 global verbosity
1842 verbosity = self.options.verbose
1844 def Run(self):
1845 """Executes the requested action.
1847 Catches any HTTPErrors raised by the action and prints them to stderr.
1849 Returns:
1850 1 on error, 0 if successful.
1852 try:
1853 self.action(self)
1854 except urllib2.HTTPError, e:
1855 body = e.read()
1856 print >>self.error_fh, ('Error %d: --- begin server output ---\n'
1857 '%s\n--- end server output ---' %
1858 (e.code, body.rstrip('\n')))
1859 return 1
1860 except yaml_errors.EventListenerError, e:
1861 print >>self.error_fh, ('Error parsing yaml file:\n%s' % e)
1862 return 1
1863 return 0
1865 def _GetActionDescriptions(self):
1866 """Returns a formatted string containing the short_descs for all actions."""
1867 action_names = self.actions.keys()
1868 action_names.sort()
1869 desc = ''
1870 for action_name in action_names:
1871 desc += ' %s: %s\n' % (action_name, self.actions[action_name].short_desc)
1872 return desc
1874 def _GetOptionParser(self):
1875 """Creates an OptionParser with generic usage and description strings.
1877 Returns:
1878 An OptionParser instance.
1881 class Formatter(optparse.IndentedHelpFormatter):
1882 """Custom help formatter that does not reformat the description."""
1884 def format_description(self, description):
1885 """Very simple formatter."""
1886 return description + '\n'
1888 desc = self._GetActionDescriptions()
1889 desc = ('Action must be one of:\n%s'
1890 'Use \'help <action>\' for a detailed description.') % desc
1892 parser = self.parser_class(usage='%prog [options] <action>',
1893 description=desc,
1894 formatter=Formatter(),
1895 conflict_handler='resolve')
1896 parser.add_option('-h', '--help', action='store_true',
1897 dest='help', help='Show the help message and exit.')
1898 parser.add_option('-q', '--quiet', action='store_const', const=0,
1899 dest='verbose', help='Print errors only.')
1900 parser.add_option('-v', '--verbose', action='store_const', const=2,
1901 dest='verbose', default=1,
1902 help='Print info level logs.')
1903 parser.add_option('--noisy', action='store_const', const=3,
1904 dest='verbose', help='Print all logs.')
1905 parser.add_option('-s', '--server', action='store', dest='server',
1906 default='appengine.google.com',
1907 metavar='SERVER', help='The server to connect to.')
1908 parser.add_option('--secure', action='store_true', dest='secure',
1909 default=True, help=optparse.SUPPRESS_HELP)
1910 parser.add_option('--insecure', action='store_false', dest='secure',
1911 help='Use HTTP when communicating with the server.')
1912 parser.add_option('-e', '--email', action='store', dest='email',
1913 metavar='EMAIL', default=None,
1914 help='The username to use. Will prompt if omitted.')
1915 parser.add_option('-H', '--host', action='store', dest='host',
1916 metavar='HOST', default=None,
1917 help='Overrides the Host header sent with all RPCs.')
1918 parser.add_option('--no_cookies', action='store_false',
1919 dest='save_cookies', default=True,
1920 help='Do not save authentication cookies to local disk.')
1921 parser.add_option('--passin', action='store_true',
1922 dest='passin', default=False,
1923 help='Read the login password from stdin.')
1924 parser.add_option('-A', '--application', action='store', dest='app_id',
1925 help='Override application from app.yaml file.')
1926 parser.add_option('-V', '--version', action='store', dest='version',
1927 help='Override (major) version from app.yaml file.')
1928 return parser
1930 def _MakeSpecificParser(self, action):
1931 """Creates a new parser with documentation specific to 'action'.
1933 Args:
1934 action: An Action instance to be used when initializing the new parser.
1936 Returns:
1937 A tuple containing:
1938 parser: An instance of OptionsParser customized to 'action'.
1939 options: The command line options after re-parsing.
1941 parser = self._GetOptionParser()
1942 parser.set_usage(action.usage)
1943 parser.set_description('%s\n%s' % (action.short_desc, action.long_desc))
1944 action.options(self, parser)
1945 options, unused_args = parser.parse_args(self.argv[1:])
1946 return parser, options
1948 def _PrintHelpAndExit(self, exit_code=2):
1949 """Prints the parser's help message and exits the program.
1951 Args:
1952 exit_code: The integer code to pass to sys.exit().
1954 self.parser.print_help()
1955 sys.exit(exit_code)
1957 def _GetRpcServer(self):
1958 """Returns an instance of an AbstractRpcServer.
1960 Returns:
1961 A new AbstractRpcServer, on which RPC calls can be made.
1964 def GetUserCredentials():
1965 """Prompts the user for a username and password."""
1966 email = self.options.email
1967 if email is None:
1968 email = self.raw_input_fn('Email: ')
1970 password_prompt = 'Password for %s: ' % email
1971 if self.options.passin:
1972 password = self.raw_input_fn(password_prompt)
1973 else:
1974 password = self.password_input_fn(password_prompt)
1976 return (email, password)
1978 StatusUpdate('Server: %s.' % self.options.server)
1980 if self.options.host and self.options.host == 'localhost':
1981 email = self.options.email
1982 if email is None:
1983 email = 'test@example.com'
1984 logging.info('Using debug user %s. Override with --email', email)
1985 server = self.rpc_server_class(
1986 self.options.server,
1987 lambda: (email, 'password'),
1988 GetUserAgent(),
1989 GetSourceName(),
1990 host_override=self.options.host,
1991 save_cookies=self.options.save_cookies,
1993 secure=False)
1994 server.authenticated = True
1995 return server
1997 if self.options.passin:
1998 auth_tries = 1
1999 else:
2000 auth_tries = 3
2002 return self.rpc_server_class(self.options.server, GetUserCredentials,
2003 GetUserAgent(), GetSourceName(),
2004 host_override=self.options.host,
2005 save_cookies=self.options.save_cookies,
2006 auth_tries=auth_tries,
2007 account_type='HOSTED_OR_GOOGLE',
2008 secure=self.options.secure)
2010 def _FindYaml(self, basepath, file_name):
2011 """Find yaml files in application directory.
2013 Args:
2014 basepath: Base application directory.
2015 file_name: Filename without extension to search for.
2017 Returns:
2018 Path to located yaml file if one exists, else None.
2020 if not os.path.isdir(basepath):
2021 self.parser.error('Not a directory: %s' % basepath)
2023 for yaml_file in (file_name + '.yaml', file_name + '.yml'):
2024 yaml_path = os.path.join(basepath, yaml_file)
2025 if os.path.isfile(yaml_path):
2026 return yaml_path
2028 return None
2030 def _ParseAppYaml(self, basepath):
2031 """Parses the app.yaml file.
2033 Args:
2034 basepath: the directory of the application.
2036 Returns:
2037 An AppInfoExternal object.
2039 appyaml_filename = self._FindYaml(basepath, 'app')
2040 if appyaml_filename is None:
2041 self.parser.error('Directory does not contain an app.yaml '
2042 'configuration file.')
2044 fh = open(appyaml_filename, 'r')
2045 try:
2046 appyaml = appinfo.LoadSingleAppInfo(fh)
2047 finally:
2048 fh.close()
2049 orig_application = appyaml.application
2050 orig_version = appyaml.version
2051 if self.options.app_id:
2052 appyaml.application = self.options.app_id
2053 if self.options.version:
2054 appyaml.version = self.options.version
2055 msg = 'Application: %s' % appyaml.application
2056 if appyaml.application != orig_application:
2057 msg += ' (was: %s)' % orig_application
2058 msg += '; version: %s' % appyaml.version
2059 if appyaml.version != orig_version:
2060 msg += ' (was: %s)' % orig_version
2061 msg += '.'
2062 StatusUpdate(msg)
2063 return appyaml
2065 def _ParseYamlFile(self, basepath, basename, parser):
2066 """Parses the a yaml file.
2068 Args:
2069 basepath: the directory of the application.
2070 basename: the base name of the file (with the '.yaml' stripped off).
2071 parser: the function or method used to parse the file.
2073 Returns:
2074 A single parsed yaml file or None if the file does not exist.
2076 file_name = self._FindYaml(basepath, basename)
2077 if file_name is not None:
2078 fh = open(file_name, 'r')
2079 try:
2080 defns = parser(fh)
2081 finally:
2082 fh.close()
2083 return defns
2084 return None
2086 def _ParseIndexYaml(self, basepath):
2087 """Parses the index.yaml file.
2089 Args:
2090 basepath: the directory of the application.
2092 Returns:
2093 A single parsed yaml file or None if the file does not exist.
2095 return self._ParseYamlFile(basepath, 'index',
2096 datastore_index.ParseIndexDefinitions)
2098 def _ParseCronYaml(self, basepath):
2099 """Parses the cron.yaml file.
2101 Args:
2102 basepath: the directory of the application.
2104 Returns:
2105 A CronInfoExternal object or None if the file does not exist.
2107 return self._ParseYamlFile(basepath, 'cron', croninfo.LoadSingleCron)
2109 def _ParseQueueYaml(self, basepath):
2110 """Parses the queue.yaml file.
2112 Args:
2113 basepath: the directory of the application.
2115 Returns:
2116 A CronInfoExternal object or None if the file does not exist.
2118 return self._ParseYamlFile(basepath, 'queue', queueinfo.LoadSingleQueue)
2120 def _ParseDosYaml(self, basepath):
2121 """Parses the dos.yaml file.
2123 Args:
2124 basepath: the directory of the application.
2126 Returns:
2127 A DosInfoExternal object or None if the file does not exist.
2129 return self._ParseYamlFile(basepath, 'dos', dosinfo.LoadSingleDos)
2131 def Help(self):
2132 """Prints help for a specific action.
2134 Expects self.args[0] to contain the name of the action in question.
2135 Exits the program after printing the help message.
2137 if len(self.args) != 1 or self.args[0] not in self.actions:
2138 self.parser.error('Expected a single action argument. Must be one of:\n' +
2139 self._GetActionDescriptions())
2141 action = self.actions[self.args[0]]
2142 self.parser, unused_options = self._MakeSpecificParser(action)
2143 self._PrintHelpAndExit(exit_code=0)
2145 def Update(self):
2146 """Updates and deploys a new appversion."""
2147 if len(self.args) != 1:
2148 self.parser.error('Expected a single <directory> argument.')
2150 basepath = self.args[0]
2151 appyaml = self._ParseAppYaml(basepath)
2152 rpc_server = self._GetRpcServer()
2154 updatecheck = self.update_check_class(rpc_server, appyaml)
2155 updatecheck.CheckForUpdates()
2157 appversion = AppVersionUpload(rpc_server, appyaml)
2158 appversion.DoUpload(FileIterator(basepath, appyaml.skip_files),
2159 self.options.max_size,
2160 lambda path: open(os.path.join(basepath, path), 'rb'))
2162 index_defs = self._ParseIndexYaml(basepath)
2163 if index_defs:
2164 index_upload = IndexDefinitionUpload(rpc_server, appyaml, index_defs)
2165 try:
2166 index_upload.DoUpload()
2167 except urllib2.HTTPError, e:
2168 StatusUpdate('Error %d: --- begin server output ---\n'
2169 '%s\n--- end server output ---' %
2170 (e.code, e.read().rstrip('\n')))
2171 print >> self.error_fh, (
2172 'Your app was updated, but there was an error updating your '
2173 'indexes. Please retry later with appcfg.py update_indexes.')
2175 cron_entries = self._ParseCronYaml(basepath)
2176 if cron_entries:
2177 cron_upload = CronEntryUpload(rpc_server, appyaml, cron_entries)
2178 cron_upload.DoUpload()
2180 queue_entries = self._ParseQueueYaml(basepath)
2181 if queue_entries:
2182 queue_upload = QueueEntryUpload(rpc_server, appyaml, queue_entries)
2183 queue_upload.DoUpload()
2185 dos_entries = self._ParseDosYaml(basepath)
2186 if dos_entries:
2187 dos_upload = DosEntryUpload(rpc_server, appyaml, dos_entries)
2188 dos_upload.DoUpload()
2190 def _UpdateOptions(self, parser):
2191 """Adds update-specific options to 'parser'.
2193 Args:
2194 parser: An instance of OptionsParser.
2196 parser.add_option('-S', '--max_size', type='int', dest='max_size',
2197 default=10485760, metavar='SIZE',
2198 help='Maximum size of a file to upload.')
2200 def VacuumIndexes(self):
2201 """Deletes unused indexes."""
2202 if len(self.args) != 1:
2203 self.parser.error('Expected a single <directory> argument.')
2205 basepath = self.args[0]
2206 config = self._ParseAppYaml(basepath)
2208 index_defs = self._ParseIndexYaml(basepath)
2209 if index_defs is None:
2210 index_defs = datastore_index.IndexDefinitions()
2212 rpc_server = self._GetRpcServer()
2213 vacuum = VacuumIndexesOperation(rpc_server,
2214 config,
2215 self.options.force_delete)
2216 vacuum.DoVacuum(index_defs)
2218 def _VacuumIndexesOptions(self, parser):
2219 """Adds vacuum_indexes-specific options to 'parser'.
2221 Args:
2222 parser: An instance of OptionsParser.
2224 parser.add_option('-f', '--force', action='store_true', dest='force_delete',
2225 default=False,
2226 help='Force deletion without being prompted.')
2228 def UpdateCron(self):
2229 """Updates any new or changed cron definitions."""
2230 if len(self.args) != 1:
2231 self.parser.error('Expected a single <directory> argument.')
2233 basepath = self.args[0]
2234 appyaml = self._ParseAppYaml(basepath)
2235 rpc_server = self._GetRpcServer()
2237 cron_entries = self._ParseCronYaml(basepath)
2238 if cron_entries:
2239 cron_upload = CronEntryUpload(rpc_server, appyaml, cron_entries)
2240 cron_upload.DoUpload()
2242 def UpdateIndexes(self):
2243 """Updates indexes."""
2244 if len(self.args) != 1:
2245 self.parser.error('Expected a single <directory> argument.')
2247 basepath = self.args[0]
2248 appyaml = self._ParseAppYaml(basepath)
2249 rpc_server = self._GetRpcServer()
2251 index_defs = self._ParseIndexYaml(basepath)
2252 if index_defs:
2253 index_upload = IndexDefinitionUpload(rpc_server, appyaml, index_defs)
2254 index_upload.DoUpload()
2256 def UpdateQueues(self):
2257 """Updates any new or changed task queue definitions."""
2258 if len(self.args) != 1:
2259 self.parser.error('Expected a single <directory> argument.')
2261 basepath = self.args[0]
2262 appyaml = self._ParseAppYaml(basepath)
2263 rpc_server = self._GetRpcServer()
2265 queue_entries = self._ParseQueueYaml(basepath)
2266 if queue_entries:
2267 queue_upload = QueueEntryUpload(rpc_server, appyaml, queue_entries)
2268 queue_upload.DoUpload()
2270 def UpdateDos(self):
2271 """Updates any new or changed dos definitions."""
2272 if len(self.args) != 1:
2273 self.parser.error('Expected a single <directory> argument.')
2275 basepath = self.args[0]
2276 appyaml = self._ParseAppYaml(basepath)
2277 rpc_server = self._GetRpcServer()
2279 dos_entries = self._ParseDosYaml(basepath)
2280 if dos_entries:
2281 dos_upload = DosEntryUpload(rpc_server, appyaml, dos_entries)
2282 dos_upload.DoUpload()
2284 def Rollback(self):
2285 """Does a rollback of any existing transaction for this app version."""
2286 if len(self.args) != 1:
2287 self.parser.error('Expected a single <directory> argument.')
2289 basepath = self.args[0]
2290 appyaml = self._ParseAppYaml(basepath)
2292 appversion = AppVersionUpload(self._GetRpcServer(), appyaml)
2293 appversion.in_transaction = True
2294 appversion.Rollback()
2296 def SetDefaultVersion(self):
2297 """Sets the default version."""
2298 if len(self.args) != 1:
2299 self.parser.error('Expected a single <directory> argument.')
2301 basepath = self.args[0]
2302 appyaml = self._ParseAppYaml(basepath)
2304 version_setter = DefaultVersionSet(self._GetRpcServer(), appyaml)
2305 version_setter.SetVersion()
2307 def RequestLogs(self):
2308 """Write request logs to a file."""
2309 if len(self.args) != 2:
2310 self.parser.error(
2311 'Expected a <directory> argument and an <output_file> argument.')
2312 if (self.options.severity is not None and
2313 not 0 <= self.options.severity <= MAX_LOG_LEVEL):
2314 self.parser.error(
2315 'Severity range is 0 (DEBUG) through %s (CRITICAL).' % MAX_LOG_LEVEL)
2317 if self.options.num_days is None:
2318 self.options.num_days = int(not self.options.append)
2320 try:
2321 end_date = self._ParseEndDate(self.options.end_date)
2322 except (TypeError, ValueError):
2323 self.parser.error('End date must be in the format YYYY-MM-DD.')
2325 basepath = self.args[0]
2326 appyaml = self._ParseAppYaml(basepath)
2327 rpc_server = self._GetRpcServer()
2328 logs_requester = LogsRequester(rpc_server, appyaml, self.args[1],
2329 self.options.num_days,
2330 self.options.append,
2331 self.options.severity,
2332 end_date,
2333 self.options.vhost,
2334 self.options.include_vhost,
2335 self.options.include_all)
2336 logs_requester.DownloadLogs()
2338 def _ParseEndDate(self, date, time_func=time.time):
2339 """Translates an ISO 8601 date to a date object.
2341 Args:
2342 date: A date string as YYYY-MM-DD.
2343 time_func: time.time() function for testing.
2345 Returns:
2346 A date object representing the last day of logs to get.
2347 If no date is given, returns today in the US/Pacific timezone.
2349 if not date:
2350 return PacificDate(time_func())
2351 return datetime.date(*[int(i) for i in date.split('-')])
2353 def _RequestLogsOptions(self, parser):
2354 """Adds request_logs-specific options to 'parser'.
2356 Args:
2357 parser: An instance of OptionsParser.
2359 parser.add_option('-n', '--num_days', type='int', dest='num_days',
2360 action='store', default=None,
2361 help='Number of days worth of log data to get. '
2362 'The cut-off point is midnight US/Pacific. '
2363 'Use 0 to get all available logs. '
2364 'Default is 1, unless --append is also given; '
2365 'then the default is 0.')
2366 parser.add_option('-a', '--append', dest='append',
2367 action='store_true', default=False,
2368 help='Append to existing file.')
2369 parser.add_option('--severity', type='int', dest='severity',
2370 action='store', default=None,
2371 help='Severity of app-level log messages to get. '
2372 'The range is 0 (DEBUG) through 4 (CRITICAL). '
2373 'If omitted, only request logs are returned.')
2374 parser.add_option('--vhost', type='string', dest='vhost',
2375 action='store', default=None,
2376 help='The virtual host of log messages to get. '
2377 'If omitted, all log messages are returned.')
2378 parser.add_option('--include_vhost', dest='include_vhost',
2379 action='store_true', default=False,
2380 help='Include virtual host in log messages.')
2381 parser.add_option('--include_all', dest='include_all',
2382 action='store_true', default=None,
2383 help='Include everything in log messages.')
2384 parser.add_option('--end_date', dest='end_date',
2385 action='store', default='',
2386 help='End date (as YYYY-MM-DD) of period for log data. '
2387 'Defaults to today.')
2389 def CronInfo(self, now=None, output=sys.stdout):
2390 """Displays information about cron definitions.
2392 Args:
2393 now: used for testing.
2394 output: Used for testing.
2396 if len(self.args) != 1:
2397 self.parser.error('Expected a single <directory> argument.')
2398 if now is None:
2399 now = datetime.datetime.now()
2401 basepath = self.args[0]
2402 cron_entries = self._ParseCronYaml(basepath)
2403 if cron_entries and cron_entries.cron:
2404 for entry in cron_entries.cron:
2405 description = entry.description
2406 if not description:
2407 description = '<no description>'
2408 print >>output, '\n%s:\nURL: %s\nSchedule: %s' % (description,
2409 entry.url,
2410 entry.schedule)
2411 schedule = groctimespecification.GrocTimeSpecification(entry.schedule)
2412 matches = schedule.GetMatches(now, self.options.num_runs)
2413 for match in matches:
2414 print >>output, '%s, %s from now' % (
2415 match.strftime('%Y-%m-%d %H:%M:%S'), match - now)
2417 def _CronInfoOptions(self, parser):
2418 """Adds cron_info-specific options to 'parser'.
2420 Args:
2421 parser: An instance of OptionsParser.
2423 parser.add_option('-n', '--num_runs', type='int', dest='num_runs',
2424 action='store', default=5,
2425 help='Number of runs of each cron job to display'
2426 'Default is 5')
2428 def _CheckRequiredLoadOptions(self):
2429 """Checks that upload/download options are present."""
2430 for option in ['filename',]:
2431 if getattr(self.options, option) is None:
2432 self.parser.error('Option \'%s\' is required.' % option)
2433 if not self.options.url:
2434 self.parser.error('You must have google.appengine.ext.remote_api.handler '
2435 'assigned to an endpoint in app.yaml, or provide '
2436 'the url of the handler via the \'url\' option.')
2438 def InferRemoteApiUrl(self, appyaml):
2439 """Uses app.yaml to determine the remote_api endpoint.
2441 Args:
2442 appyaml: A parsed app.yaml file.
2444 Returns:
2445 The url of the remote_api endpoint as a string, or None
2447 handlers = appyaml.handlers
2448 handler_suffix = 'remote_api/handler.py'
2449 app_id = appyaml.application
2450 for handler in handlers:
2451 if hasattr(handler, 'script') and handler.script:
2452 if handler.script.endswith(handler_suffix):
2453 server = self.options.server
2454 if server == 'appengine.google.com':
2455 return 'http://%s.appspot.com%s' % (app_id, handler.url)
2456 else:
2457 return 'http://%s%s' % (server, handler.url)
2458 return None
2460 def RunBulkloader(self, arg_dict):
2461 """Invokes the bulkloader with the given keyword arguments.
2463 Args:
2464 arg_dict: Dictionary of arguments to pass to bulkloader.Run().
2466 try:
2467 import sqlite3
2468 except ImportError:
2469 logging.error('upload_data action requires SQLite3 and the python '
2470 'sqlite3 module (included in python since 2.5).')
2471 sys.exit(1)
2473 sys.exit(bulkloader.Run(arg_dict))
2475 def _SetupLoad(self):
2476 """Performs common verification and set up for upload and download."""
2477 if len(self.args) != 1 and not self.options.url:
2478 self.parser.error('Expected either --url or a single <directory> '
2479 'argument.')
2481 if len(self.args) == 1:
2482 basepath = self.args[0]
2483 appyaml = self._ParseAppYaml(basepath)
2485 self.options.app_id = appyaml.application
2487 if not self.options.url:
2488 url = self.InferRemoteApiUrl(appyaml)
2489 if url is not None:
2490 self.options.url = url
2492 self._CheckRequiredLoadOptions()
2494 if self.options.batch_size < 1:
2495 self.parser.error('batch_size must be 1 or larger.')
2497 if verbosity == 1:
2498 logging.getLogger().setLevel(logging.INFO)
2499 self.options.debug = False
2500 else:
2501 logging.getLogger().setLevel(logging.DEBUG)
2502 self.options.debug = True
2504 def _MakeLoaderArgs(self):
2505 args = dict([(arg_name, getattr(self.options, arg_name, None)) for
2506 arg_name in (
2507 'url',
2508 'filename',
2509 'batch_size',
2510 'kind',
2511 'num_threads',
2512 'bandwidth_limit',
2513 'rps_limit',
2514 'http_limit',
2515 'db_filename',
2516 'config_file',
2517 'auth_domain',
2518 'has_header',
2519 'loader_opts',
2520 'log_file',
2521 'passin',
2522 'email',
2523 'debug',
2524 'exporter_opts',
2525 'mapper_opts',
2526 'result_db_filename',
2527 'mapper_opts',
2528 'dry_run',
2529 'dump',
2530 'restore',
2531 'namespace',
2532 'create_config',
2534 args['application'] = self.options.app_id
2535 return args
2537 def PerformDownload(self, run_fn=None):
2538 """Performs a datastore download via the bulkloader.
2540 Args:
2541 run_fn: Function to invoke the bulkloader, used for testing.
2543 if run_fn is None:
2544 run_fn = self.RunBulkloader
2545 self._SetupLoad()
2547 StatusUpdate('Downloading data records.')
2549 args = self._MakeLoaderArgs()
2550 args['download'] = bool(args['config_file'])
2551 args['has_header'] = False
2552 args['map'] = False
2553 args['dump'] = not args['config_file']
2554 args['restore'] = False
2555 args['create_config'] = False
2557 run_fn(args)
2559 def PerformUpload(self, run_fn=None):
2560 """Performs a datastore upload via the bulkloader.
2562 Args:
2563 run_fn: Function to invoke the bulkloader, used for testing.
2565 if run_fn is None:
2566 run_fn = self.RunBulkloader
2567 self._SetupLoad()
2569 StatusUpdate('Uploading data records.')
2571 args = self._MakeLoaderArgs()
2572 args['download'] = False
2573 args['map'] = False
2574 args['dump'] = False
2575 args['restore'] = not args['config_file']
2576 args['create_config'] = False
2578 run_fn(args)
2580 def CreateBulkloadConfig(self, run_fn=None):
2581 """Create a bulkloader config via the bulkloader wizard.
2583 Args:
2584 run_fn: Function to invoke the bulkloader, used for testing.
2586 if run_fn is None:
2587 run_fn = self.RunBulkloader
2588 self._SetupLoad()
2590 StatusUpdate('Creating bulkloader configuration.')
2592 args = self._MakeLoaderArgs()
2593 args['download'] = False
2594 args['has_header'] = False
2595 args['map'] = False
2596 args['dump'] = False
2597 args['restore'] = False
2598 args['create_config'] = True
2600 run_fn(args)
2602 def _PerformLoadOptions(self, parser):
2603 """Adds options common to 'upload_data' and 'download_data'.
2605 Args:
2606 parser: An instance of OptionsParser.
2608 parser.add_option('--filename', type='string', dest='filename',
2609 action='store',
2610 help='The name of the file containing the input data.'
2611 ' (Required)')
2612 parser.add_option('--kind', type='string', dest='kind',
2613 action='store',
2614 help='The kind of the entities to store.')
2615 parser.add_option('--url', type='string', dest='url',
2616 action='store',
2617 help='The location of the remote_api endpoint.')
2618 parser.add_option('--num_threads', type='int', dest='num_threads',
2619 action='store', default=10,
2620 help='Number of threads to upload records with.')
2621 parser.add_option('--batch_size', type='int', dest='batch_size',
2622 action='store', default=10,
2623 help='Number of records to post in each request.')
2624 parser.add_option('--bandwidth_limit', type='int', dest='bandwidth_limit',
2625 action='store', default=250000,
2626 help='The maximum bytes/second bandwidth for transfers.')
2627 parser.add_option('--rps_limit', type='int', dest='rps_limit',
2628 action='store', default=20,
2629 help='The maximum records/second for transfers.')
2630 parser.add_option('--http_limit', type='int', dest='http_limit',
2631 action='store', default=8,
2632 help='The maximum requests/second for transfers.')
2633 parser.add_option('--db_filename', type='string', dest='db_filename',
2634 action='store',
2635 help='Name of the progress database file.')
2636 parser.add_option('--auth_domain', type='string', dest='auth_domain',
2637 action='store', default='gmail.com',
2638 help='The name of the authorization domain to use.')
2639 parser.add_option('--log_file', type='string', dest='log_file',
2640 help='File to write bulkloader logs. If not supplied '
2641 'then a new log file will be created, named: '
2642 'bulkloader-log-TIMESTAMP.')
2643 parser.add_option('--dry_run', action='store_true',
2644 dest='dry_run', default=False,
2645 help='Do not execute any remote_api calls')
2646 parser.add_option('--namespace', type='string', dest='namespace',
2647 action='store', default='',
2648 help='Namespace to use when accessing datastore.')
2650 def _PerformUploadOptions(self, parser):
2651 """Adds 'upload_data' specific options to the 'parser' passed in.
2653 Args:
2654 parser: An instance of OptionsParser.
2656 self._PerformLoadOptions(parser)
2657 parser.add_option('--has_header', dest='has_header',
2658 action='store_true', default=False,
2659 help='Whether the first line of the input file should be'
2660 ' skipped')
2661 parser.add_option('--loader_opts', type='string', dest='loader_opts',
2662 help='A string to pass to the Loader.initialize method.')
2663 parser.add_option('--config_file', type='string', dest='config_file',
2664 action='store',
2665 help='Name of the configuration file.')
2667 def _PerformDownloadOptions(self, parser):
2668 """Adds 'download_data' specific options to the 'parser' passed in.
2670 Args:
2671 parser: An instance of OptionsParser.
2673 self._PerformLoadOptions(parser)
2674 parser.add_option('--exporter_opts', type='string', dest='exporter_opts',
2675 help='A string to pass to the Exporter.initialize method.'
2677 parser.add_option('--result_db_filename', type='string',
2678 dest='result_db_filename',
2679 action='store',
2680 help='Database to write entities to for download.')
2681 parser.add_option('--config_file', type='string', dest='config_file',
2682 action='store',
2683 help='Name of the configuration file.')
2685 def _CreateBulkloadConfigOptions(self, parser):
2686 """Adds 'download_data' specific options to the 'parser' passed in.
2688 Args:
2689 parser: An instance of OptionsParser.
2691 self._PerformLoadOptions(parser)
2693 class Action(object):
2694 """Contains information about a command line action.
2696 Attributes:
2697 function: The name of a function defined on AppCfg or its subclasses
2698 that will perform the appropriate action.
2699 usage: A command line usage string.
2700 short_desc: A one-line description of the action.
2701 long_desc: A detailed description of the action. Whitespace and
2702 formatting will be preserved.
2703 options: A function that will add extra options to a given OptionParser
2704 object.
2707 def __init__(self, function, usage, short_desc, long_desc='',
2708 options=lambda obj, parser: None):
2709 """Initializer for the class attributes."""
2710 self.function = function
2711 self.usage = usage
2712 self.short_desc = short_desc
2713 self.long_desc = long_desc
2714 self.options = options
2716 def __call__(self, appcfg):
2717 """Invoke this Action on the specified AppCfg.
2719 This calls the function of the appropriate name on AppCfg, and
2720 respects polymophic overrides.
2722 Args:
2723 appcfg: The appcfg to use.
2724 Returns:
2725 The result of the function call.
2727 method = getattr(appcfg, self.function)
2728 return method()
2730 actions = {
2732 'help': Action(
2733 function='Help',
2734 usage='%prog help <action>',
2735 short_desc='Print help for a specific action.'),
2737 'update': Action(
2738 function='Update',
2739 usage='%prog [options] update <directory>',
2740 options=_UpdateOptions,
2741 short_desc='Create or update an app version.',
2742 long_desc="""
2743 Specify a directory that contains all of the files required by
2744 the app, and appcfg.py will create/update the app version referenced
2745 in the app.yaml file at the top level of that directory. appcfg.py
2746 will follow symlinks and recursively upload all files to the server.
2747 Temporary or source control files (e.g. foo~, .svn/*) will be skipped."""),
2749 'update_cron': Action(
2750 function='UpdateCron',
2751 usage='%prog [options] update_cron <directory>',
2752 short_desc='Update application cron definitions.',
2753 long_desc="""
2754 The 'update_cron' command will update any new, removed or changed cron
2755 definitions from the optional cron.yaml file."""),
2757 'update_indexes': Action(
2758 function='UpdateIndexes',
2759 usage='%prog [options] update_indexes <directory>',
2760 short_desc='Update application indexes.',
2761 long_desc="""
2762 The 'update_indexes' command will add additional indexes which are not currently
2763 in production as well as restart any indexes that were not completed."""),
2765 'update_queues': Action(
2766 function='UpdateQueues',
2767 usage='%prog [options] update_queues <directory>',
2768 short_desc='Update application task queue definitions.',
2769 long_desc="""
2770 The 'update_queue' command will update any new, removed or changed task queue
2771 definitions from the optional queue.yaml file."""),
2773 'update_dos': Action(
2774 function='UpdateDos',
2775 usage='%prog [options] update_dos <directory>',
2776 short_desc='Update application dos definitions.',
2777 long_desc="""
2778 The 'update_dos' command will update any new, removed or changed dos
2779 definitions from the optional dos.yaml file."""),
2781 'vacuum_indexes': Action(
2782 function='VacuumIndexes',
2783 usage='%prog [options] vacuum_indexes <directory>',
2784 options=_VacuumIndexesOptions,
2785 short_desc='Delete unused indexes from application.',
2786 long_desc="""
2787 The 'vacuum_indexes' command will help clean up indexes which are no longer
2788 in use. It does this by comparing the local index configuration with
2789 indexes that are actually defined on the server. If any indexes on the
2790 server do not exist in the index configuration file, the user is given the
2791 option to delete them."""),
2793 'rollback': Action(
2794 function='Rollback',
2795 usage='%prog [options] rollback <directory>',
2796 short_desc='Rollback an in-progress update.',
2797 long_desc="""
2798 The 'update' command requires a server-side transaction. Use 'rollback'
2799 if you get an error message about another transaction being in progress
2800 and you are sure that there is no such transaction."""),
2802 'request_logs': Action(
2803 function='RequestLogs',
2804 usage='%prog [options] request_logs <directory> <output_file>',
2805 options=_RequestLogsOptions,
2806 short_desc='Write request logs in Apache common log format.',
2807 long_desc="""
2808 The 'request_logs' command exports the request logs from your application
2809 to a file. It will write Apache common log format records ordered
2810 chronologically. If output file is '-' stdout will be written."""),
2812 'cron_info': Action(
2813 function='CronInfo',
2814 usage='%prog [options] cron_info <directory>',
2815 options=_CronInfoOptions,
2816 short_desc='Display information about cron jobs.',
2817 long_desc="""
2818 The 'cron_info' command will display the next 'number' runs (default 5) for
2819 each cron job defined in the cron.yaml file."""),
2821 'upload_data': Action(
2822 function='PerformUpload',
2823 usage='%prog [options] upload_data <directory>',
2824 options=_PerformUploadOptions,
2825 short_desc='Upload data records to datastore.',
2826 long_desc="""
2827 The 'upload_data' command translates input records into datastore entities and
2828 uploads them into your application's datastore."""),
2830 'download_data': Action(
2831 function='PerformDownload',
2832 usage='%prog [options] download_data <directory>',
2833 options=_PerformDownloadOptions,
2834 short_desc='Download entities from datastore.',
2835 long_desc="""
2836 The 'download_data' command downloads datastore entities and writes them to
2837 file as CSV or developer defined format."""),
2839 'create_bulkloader_config': Action(
2840 function='CreateBulkloadConfig',
2841 usage='%prog [options] create_bulkload_config <directory>',
2842 options=_CreateBulkloadConfigOptions,
2843 short_desc='Create a bulkloader.yaml from a running application.',
2844 long_desc="""
2845 The 'create_bulkloader_config' command creates a bulkloader.yaml configuration
2846 template for use with upload_data or download_data."""),
2848 'set_default_version': Action(
2849 function='SetDefaultVersion',
2850 usage='%prog [options] set_default_version <directory>',
2851 short_desc='Set the default (serving) version.',
2852 long_desc="""
2853 The 'set_default_version' command sets the default (serving) version of the app.
2854 Defaults to using the version specified in app.yaml; use the --version flag to
2855 override this."""),
2862 def main(argv):
2863 logging.basicConfig(format=('%(asctime)s %(levelname)s %(filename)s:'
2864 '%(lineno)s %(message)s '))
2865 try:
2866 result = AppCfgApp(argv).Run()
2867 if result:
2868 sys.exit(result)
2869 except KeyboardInterrupt:
2870 StatusUpdate('Interrupted.')
2871 sys.exit(1)
2874 if __name__ == '__main__':
2875 main(sys.argv)