From b05ce89ead637734410b839c3067275f9df1c6a1 Mon Sep 17 00:00:00 2001 From: "richmond@google.com" Date: Wed, 12 Jun 2013 23:19:38 +0000 Subject: [PATCH] App Engine Python SDK version 1.8.1 git-svn-id: http://googleappengine.googlecode.com/svn/trunk@356 80f5ef21-4148-0410-bacc-cfb02402ada8 --- python/RELEASE_NOTES | 99 +- python/VERSION | 4 +- python/demos/php/minishell/app.yaml | 24 + python/demos/php/minishell/frontpage.php | 90 + python/demos/php/minishell/php.ini | 2 + .../php/minishell/reset.php} | 14 +- python/demos/php/minishell/shell.php | 190 + python/demos/php/minishell/static/shell.js | 209 + python/demos/php/minishell/static/style.css | 70 + python/demos/{ => python}/guestbook/app.yaml | 0 python/demos/{ => python}/guestbook/guestbook.py | 0 python/google/appengine/api/apiproxy_stub.py | 5 + python/google/appengine/api/apiproxy_stub_map.py | 8 + .../appengine/api/app_identity/app_identity.py | 10 +- .../api/app_identity/app_identity_service_pb.py | 42 +- .../api/app_identity/app_identity_stub.py | 4 + python/google/appengine/api/appinfo.py | 35 +- python/google/appengine/api/blobstore/blobstore.py | 5 +- .../appengine/api/blobstore/blobstore_stub.py | 63 +- .../appengine/api/capabilities/capability_stub.py | 2 + .../appengine/api/channel/channel_service_pb.py | 396 +- .../appengine/api/channel/channel_service_stub.py | 2 + .../appengine/api/files/file_service_stub.py | 63 +- python/google/appengine/api/images/images_stub.py | 34 +- .../appengine/api/logservice/logservice_stub.py | 2 + python/google/appengine/api/mail.py | 1 + python/google/appengine/api/mail_stub.py | 2 + .../google/appengine/api/memcache/memcache_stub.py | 2 + .../appengine/api/remote_socket/_remote_socket.py | 39 +- .../api/remote_socket/_remote_socket_stub.py | 107 +- python/google/appengine/api/search/search.py | 11 +- .../appengine/api/search/search_service_pb.py | 4 +- .../appengine/api/search/simple_search_stub.py | 33 +- .../api/search/stub/expression_evaluator.py | 2 +- .../google/appengine/api/servers/servers_stub.py | 1 + python/google/appengine/api/urlfetch_stub.py | 2 + python/google/appengine/api/user_service_stub.py | 2 + .../google/appengine/api/xmpp/xmpp_service_stub.py | 2 + python/google/appengine/datastore/datastore_pb.py | 51 +- .../google/appengine/datastore/datastore_query.py | 5 +- python/google/appengine/datastore/datastore_rpc.py | 188 +- .../google/appengine/datastore/datastore_v4a_pb.py | 6797 ++++++++++++++++++++ python/google/appengine/datastore/entity_v4_pb.py | 1529 +++++ .../appengine/ext/analytics/static/analytics_js.js | 49 +- .../google/appengine/ext/appstats/datamodel_pb.py | 4 +- python/google/appengine/ext/appstats/recording.py | 8 - .../appengine/ext/appstats/static/appstats_js.js | 165 +- python/google/appengine/ext/blobstore/blobstore.py | 2 + .../ext/cloudstorage/cloudstorage_stub.py | 169 +- python/google/appengine/ext/cloudstorage/common.py | 40 +- .../appengine/ext/cloudstorage/stub_dispatcher.py | 99 +- .../ext/datastore_admin/backup_handler.py | 2 +- .../appengine/ext/datastore_admin/copy_handler.py | 2 +- .../ext/datastore_admin/static/css/compiled.css | 2 +- .../ext/datastore_admin/static/js/compiled.js | 37 +- .../google/appengine/ext/datastore_admin/utils.py | 80 +- python/google/appengine/ext/db/__init__.py | 6 +- .../appengine/ext/endpoints/api_backend_service.py | 34 +- .../google/appengine/ext/endpoints/api_config.py | 275 +- .../appengine/ext/endpoints/message_parser.py | 2 +- .../appengine/ext/endpoints/users_id_token.py | 139 +- .../google/appengine/ext/mapreduce/base_handler.py | 51 +- python/google/appengine/ext/mapreduce/handlers.py | 316 +- .../appengine/ext/mapreduce/input_readers.py | 12 +- .../google/appengine/ext/mapreduce/key_ranges.py | 5 +- python/google/appengine/ext/mapreduce/model.py | 171 +- .../appengine/ext/mapreduce/property_range.py | 25 +- .../google/appengine/ext/mapreduce/test_support.py | 62 +- python/google/appengine/ext/mapreduce/util.py | 139 +- python/google/appengine/ext/ndb/__init__.py | 2 +- python/google/appengine/ext/ndb/google_imports.py | 7 +- python/google/appengine/ext/ndb/model.py | 105 +- python/google/appengine/ext/ndb/query.py | 170 +- .../ext/remote_api/remote_api_services.py | 11 +- python/google/appengine/tools/appcfg.py | 64 +- python/google/appengine/tools/dev-channel-js.js | 390 +- .../appengine/tools/devappserver2/api_server.py | 31 +- .../appengine/tools/devappserver2/blob_download.py | 60 +- .../tools/devappserver2/blob_download_test.py | 20 +- .../appengine/tools/devappserver2/blob_upload.py | 111 +- .../tools/devappserver2/blob_upload_test.py | 33 +- .../appengine/tools/devappserver2/devappserver2.py | 16 +- .../devappserver2/endpoints/endpoints_server.py | 80 +- .../endpoints/endpoints_server_regtest.py | 28 + .../endpoints/endpoints_server_test.py | 107 +- .../tools/devappserver2/endpoints/errors.py | 217 +- .../endpoints/generated_error_info.py | 78 + .../endpoints/testdata/test_service.py | 18 +- .../tools/devappserver2/go_application.py | 1 + .../appengine/tools/devappserver2/http_runtime.py | 99 +- .../tools/devappserver2/http_runtime_test.py | 157 +- .../tools/devappserver2/php/check_environment.php | 8 + .../appengine/tools/devappserver2/php/runtime.py | 23 +- .../appengine/tools/devappserver2/php_runtime.py | 2 +- .../tools/devappserver2/python/sandbox.py | 9 +- .../google/appengine/tools/devappserver2/server.py | 42 +- python/google/appengine/tools/devappserver2/tee.py | 48 + .../appengine/tools/devappserver2/tee_test.py | 53 + .../appengine/tools/devappserver2/wsgi_server.py | 44 +- .../tools/devappserver2/wsgi_server_test.py | 21 + python/google/net/proto2/proto/descriptor_pb2.py | 119 +- .../google/net/proto2/python/public/descriptor.py | 17 +- .../net/proto2/python/public/text_encoding.py | 72 + .../google/net/proto2/python/public/text_format.py | 102 +- python/lib/cacerts/urlfetch_cacerts.txt | 1480 +++-- python/lib/enum/enum/enum.blueprint | 15 + python/lib/grizzled/grizzled/grizzled.blueprint | 15 + python/lib/prettytable/prettytable/v0_7_2/BUILD | 11 + python/lib/prettytable/prettytable/v0_7_2/LICENSE | 30 + python/lib/prettytable/prettytable/v0_7_2/README | 498 ++ .../prettytable/prettytable/v0_7_2/README.google | 16 + .../lib/prettytable/prettytable/v0_7_2/__init__.py | 1475 +++++ python/lib/protorpc/protorpc/definition.py | 7 + python/lib/protorpc/protorpc/descriptor.py | 9 + python/lib/protorpc/protorpc/google_imports.py | 18 + python/lib/protorpc/protorpc/messages.py | 10 +- python/lib/protorpc/protorpc/protobuf.py | 5 +- python/lib/protorpc/protorpc/protojson.py | 8 +- python/lib/protorpc/protorpc/protourlencode.py | 5 +- python/lib/protorpc/protorpc/registry.py | 5 +- python/lib/protorpc/protorpc/webapp/forms.py | 6 +- .../protorpc/protorpc/webapp/service_handlers.py | 15 +- python/lib/protorpc/protorpc/wsgi/service.py | 5 - python/lib/protorpc/protorpc/wsgi/util.py | 1 - python/lib/sqlcmd/sqlcmd/sqlcmd.blueprint | 15 + .../api/app_identity/AppIdentityException.php | 2 +- .../api/app_identity/AppIdentityService.php | 94 +- .../api/app_identity/AppIdentityServiceTest.php | 171 +- .../api/app_identity/PublicCertificate.php | 2 +- .../api/app_identity/app_identity_service_pb.php | 39 + .../appengine/api/capabilities/Capability.php | 90 - .../appengine/api/capabilities/CapabilityTest.php | 233 - .../api/capabilities/capability_service_pb.php | 384 -- .../CloudStorageException.php} | 4 +- .../CloudStorageTools.php} | 287 +- .../CloudStorageToolsTest.php} | 374 +- .../appengine/api/images/images_service_pb.php | 2880 +++++++++ .../sdk/google/appengine/api/mail/AdminMessage.php | 24 +- .../sdk/google/appengine/api/mail/BaseMessage.php | 92 +- .../php/sdk/google/appengine/api/mail/Message.php | 45 +- .../sdk/google/appengine/api/mail/MessageTest.php | 8 + .../google/appengine/api/taskqueue/PushTask.php | 52 +- .../google/appengine/api/urlfetch_service_pb.php | 1103 ++++ .../google/appengine/api/users/NotAllowedError.php | 25 - .../appengine/api/users/RedirectTooLongError.php | 25 - python/php/sdk/google/appengine/api/users/User.php | 91 +- .../sdk/google/appengine/api/users/UserService.php | 74 +- .../google/appengine/api/users/UserServiceTest.php | 45 +- .../sdk/google/appengine/api/users/UserTest.php | 7 +- .../api/users/{Error.php => UsersException.php} | 4 +- .../sdk/google/appengine/base/capabilities_pb.php | 489 -- .../google/appengine/datastore/datastore_v3_pb.php | 43 + .../cloud_storage_streams/CloudStorageClient.php | 340 + .../CloudStorageDeleteClient.php | 55 + .../CloudStorageReadClient.php | 281 + .../CloudStorageStreamWrapper.php | 275 + .../CloudStorageStreamWrapperTest.php | 710 ++ .../CloudStorageUrlStatClient.php | 87 + .../CloudStorageWriteClient.php | 239 + .../ext/cloud_storage_streams/HttpResponse.php | 67 + .../ext/session/CloudSqlSessionHandler.php | 267 - .../ext/session/CloudSqlSessionHandlerTest.php | 112 - .../ext/session/MemcacheSessionHandler.php | 6 +- .../php/sdk/google/appengine/runtime/Memcache.php | 16 +- .../php/sdk/google/appengine/runtime/Memcached.php | 74 +- python/php/sdk/google/appengine/runtime/Setup.php | 13 + .../sdk/google/appengine/testing/ApiProxyMock.php | 3 + .../UserNotFoundError.php => util/string_util.php} | 20 +- 168 files changed, 22608 insertions(+), 4705 deletions(-) create mode 100644 python/demos/php/minishell/app.yaml create mode 100644 python/demos/php/minishell/frontpage.php create mode 100644 python/demos/php/minishell/php.ini rename python/{php/sdk/google/appengine/api/capabilities/UnknownCapabilityError.php => demos/php/minishell/reset.php} (76%) create mode 100644 python/demos/php/minishell/shell.php create mode 100644 python/demos/php/minishell/static/shell.js create mode 100644 python/demos/php/minishell/static/style.css rename python/demos/{ => python}/guestbook/app.yaml (100%) rename python/demos/{ => python}/guestbook/guestbook.py (100%) create mode 100644 python/google/appengine/datastore/datastore_v4a_pb.py create mode 100644 python/google/appengine/datastore/entity_v4_pb.py rewrite python/google/appengine/ext/analytics/static/analytics_js.js (83%) rewrite python/google/appengine/ext/appstats/static/appstats_js.js (73%) rewrite python/google/appengine/ext/datastore_admin/static/js/compiled.js (95%) create mode 100644 python/google/appengine/tools/devappserver2/endpoints/generated_error_info.py create mode 100644 python/google/appengine/tools/devappserver2/tee.py create mode 100644 python/google/appengine/tools/devappserver2/tee_test.py create mode 100644 python/google/net/proto2/python/public/text_encoding.py create mode 100644 python/lib/enum/enum/enum.blueprint create mode 100644 python/lib/grizzled/grizzled/grizzled.blueprint create mode 100644 python/lib/prettytable/prettytable/v0_7_2/BUILD create mode 100644 python/lib/prettytable/prettytable/v0_7_2/LICENSE create mode 100644 python/lib/prettytable/prettytable/v0_7_2/README create mode 100644 python/lib/prettytable/prettytable/v0_7_2/README.google create mode 100644 python/lib/prettytable/prettytable/v0_7_2/__init__.py create mode 100644 python/lib/protorpc/protorpc/google_imports.py create mode 100644 python/lib/sqlcmd/sqlcmd/sqlcmd.blueprint delete mode 100644 python/php/sdk/google/appengine/api/capabilities/Capability.php delete mode 100644 python/php/sdk/google/appengine/api/capabilities/CapabilityTest.php delete mode 100644 python/php/sdk/google/appengine/api/capabilities/capability_service_pb.php rename python/php/sdk/google/appengine/api/{blobstore/BlobstoreException.php => cloud_storage/CloudStorageException.php} (87%) rename python/php/sdk/google/appengine/api/{blobstore/BlobstoreService.php => cloud_storage/CloudStorageTools.php} (51%) rename python/php/sdk/google/appengine/api/{blobstore/BlobstoreServiceTest.php => cloud_storage/CloudStorageToolsTest.php} (50%) create mode 100644 python/php/sdk/google/appengine/api/images/images_service_pb.php create mode 100644 python/php/sdk/google/appengine/api/urlfetch_service_pb.php delete mode 100644 python/php/sdk/google/appengine/api/users/NotAllowedError.php delete mode 100644 python/php/sdk/google/appengine/api/users/RedirectTooLongError.php rename python/php/sdk/google/appengine/api/users/{Error.php => UsersException.php} (90%) delete mode 100644 python/php/sdk/google/appengine/base/capabilities_pb.php create mode 100644 python/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageClient.php create mode 100644 python/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageDeleteClient.php create mode 100644 python/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageReadClient.php create mode 100644 python/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageStreamWrapper.php create mode 100644 python/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageStreamWrapperTest.php create mode 100644 python/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageUrlStatClient.php create mode 100644 python/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageWriteClient.php create mode 100644 python/php/sdk/google/appengine/ext/cloud_storage_streams/HttpResponse.php delete mode 100644 python/php/sdk/google/appengine/ext/session/CloudSqlSessionHandler.php delete mode 100644 python/php/sdk/google/appengine/ext/session/CloudSqlSessionHandlerTest.php rename python/php/sdk/google/appengine/{api/users/UserNotFoundError.php => util/string_util.php} (56%) diff --git a/python/RELEASE_NOTES b/python/RELEASE_NOTES index b3f3ca97..f6bdeb55 100644 --- a/python/RELEASE_NOTES +++ b/python/RELEASE_NOTES @@ -1,7 +1,104 @@ Copyright 2008 Google Inc. All rights reserved. -App Engine Python SDK - Release Notes +App Engine SDK - Release Notes + +Version 1.8.1 + +Python +=============================== +- The Task Queue async API is now a GA feature. The asynchronous methods + improve utilization by allowing your app to add, lease and delete multiple + tasks in parallel. +- Cloud Console projects are now created by default whenever a new App Engine + app is created. This is a Preview feature. +- In an upcoming release the Experimental Google Cloud Storage API Functions + will be decommissioned. This API and its Experimental status is documented at + the following link: + https://developers.google.com/appengine/docs/python/googlestorage/functions +- The Google Cloud Storage library will replace Google Cloud Storage API and is + now available as a Preview feature. More information can be found at + https://code.google.com/p/appengine-gcs-client/ +- Bandwidth between App Engine and Google Cloud Storage is currently free of + charge (this may change in the future for certain levels of service). +- The Search API has graduated from Experimental to Preview. Apps that have + billing enabled can exceed the free quota levels and will be charged for + usage above these levels. +- Estimated number of search results will only be accurate if it is less than + or equal to the number of results requested. By default this can be + overridden by setting number_found_accuracy QueryOption in the Search API. +- Dates, atoms, and number fields can now be found by searching without a field + restriction in the Search API. +- A quoted empty string now returns atom fields with empty values for the + Search API. +- Snippet and count functions are no longer allowed in sort expressions for the + Search API. +- The Search API now has improved error messages for user errors and internal + errors. +- App Engine now supports deployment of applications via the Git tool. Once you + complete the initial setup steps, you will be ready to deploy apps with the + same ease you push code to a git repository using + "% git push appengine master". This is a Limited Preview feature. You may + request access via the following link: + https://docs.google.com/a/google.com/forms/d/ + 1aLaAOZb_cXFiVqzLAMvPV9kh0FmvlLRUbwD-LBbLuUI/viewform +- The Datastore now assigns scattered auto ids by default. Legacy auto ids + are still available via the 'auto_id_policy' option in app.yaml. +- The Sockets API now allows client code to call get/set options against + sockets. Previously, calls raised "Not Implemented" exceptions. For supported + options, calls to getsockopt will return a mock value and calls to setsockopt + will be silently ignored. Errors will continue to be raised for unsupported + options. The currently supported options are: SO_KEEPALIVE, SO_DEBUG, + TCP_NODELAY, SO_LINGER, SO_OOBINLINE, SO_SNDBUF, SO_RCVBUF, and SO_REUSEADDR. +- Updated skip_files documentation to reflect the new default. + https://developers.google.com/appengine/docs/python/config/ + appconfig#Skipping_Files +- The ndb library now supports distinct queries. This is a Preview feature. + https://code.google.com/p/appengine-ndb-experiment/issues/detail?id=229 +- Fixed an issue with Google Cloud Storage objects not being compatible across + various APIs on the dev_appserver. +- Fixed an issue with the namespace not being displayed when a user attempts + to select a namespace in the Admin Console. + https://code.google.com/p/googleappengine/issues/detail?id=8164 +- Fixed an issue in the Admin Console Logs page to correctly display 'Until' + instead of 'Since' for logs search criteria. + https://code.google.com/p/googleappengine/issues/detail?id=8659 + +PHP +=============================== +- Enabled the Mcrypt extension. + https://code.google.com/p/googleappengine/issues/detail?id=9332 +- Enabled the iconv extension. + https://code.google.com/p/googleappengine/issues/detail?id=9340 +- Enabled the mbstring extension. + https://code.google.com/p/googleappengine/issues/detail?id=9342 +- It is now possible to include/require files from Google Cloud Storage using + the google_app_engine.allow_include_gs_buckets ini setting. +- Basic support for url_stat() with Cloud Storage Streams is now provided. + This means that many standard filesystem functions such as is_file(), + is_readable(), is_writeable(), filesize(), etc now work with Cloud Storage + objects. +- Honor the default_stream_context for Google Cloud Storage streams when + writing new objects. +- Added deleteImageServingUrl() method to the CloudStorageTools class. +- The createGsKey() method of CloudStorageTools has been made private. +- Fixed a bug where $_SERVER['PHP_SELF'] was including the query string. +- App Engine now supports deployment of applications via the Git tool. Once you + complete the initial setup steps, you will be ready to deploy apps with the + same ease you push code to a git repository using + "% git push appengine master". This is a Limited Preview feature. You may + request access via the following link: + https://docs.google.com/a/google.com/forms/d/ + 1aLaAOZb_cXFiVqzLAMvPV9kh0FmvlLRUbwD-LBbLuUI/viewform +- Updated skip_files documentation to reflect the new default. + https://developers.google.com/appengine/docs/php/config/ + appconfig#Skipping_Files +- Removed the NotAllowedError, RedirectTooLongError, and UserNotFoundError + exception classes from the Users API. Their use is replaced by throwing a + UsersException or InvalidArgumentException as appropriate. +- Removed the Capability API. +- Fixed an issue with Google Cloud Storage objects not being compatible across + various APIs on the dev_appserver. Version 1.8.0 =============================== diff --git a/python/VERSION b/python/VERSION index 0e33fa52..ae63f808 100644 --- a/python/VERSION +++ b/python/VERSION @@ -1,5 +1,5 @@ -release: "1.8.0" -timestamp: 1367368689 +release: "1.8.1" +timestamp: 1368601370 api_versions: ['1'] supported_api_versions: python: diff --git a/python/demos/php/minishell/app.yaml b/python/demos/php/minishell/app.yaml new file mode 100644 index 00000000..dfe57b84 --- /dev/null +++ b/python/demos/php/minishell/app.yaml @@ -0,0 +1,24 @@ +application: shell-php +version: 1 +runtime: php +api_version: 1 +threadsafe: false + +handlers: +- url: /static + static_dir: static + expiration: 30d + +- url: /favicon.ico + static_files: static/favicon.ico + upload: static/favicon.ico + expiration: 30d + +- url: /shell.do + script: shell.php + +- url: /reset.do + script: reset.php + +- url: .* + script: frontpage.php diff --git a/python/demos/php/minishell/frontpage.php b/python/demos/php/minishell/frontpage.php new file mode 100644 index 00000000..82a18e58 --- /dev/null +++ b/python/demos/php/minishell/frontpage.php @@ -0,0 +1,90 @@ + + + + + + Interactive Shell + + + + + + +

Interactive server-side PHP shell for +Google App Engine. + +

+ + + + + +
+ + + + + + +
+ +

+ +

+ + getNickname(); ?> + ">log out + + log in + + | Reset Session + | Shift-Enter for newline + | Ctrl-Up/Down for history + | + +

+ + + + + diff --git a/python/demos/php/minishell/php.ini b/python/demos/php/minishell/php.ini new file mode 100644 index 00000000..cb429f2c --- /dev/null +++ b/python/demos/php/minishell/php.ini @@ -0,0 +1,2 @@ +; Re-enable these soft disabled functions for minishell users. +google_app_engine.enable_functions = "phpinfo, php_uname, php_sapi_name" diff --git a/python/php/sdk/google/appengine/api/capabilities/UnknownCapabilityError.php b/python/demos/php/minishell/reset.php similarity index 76% rename from python/php/sdk/google/appengine/api/capabilities/UnknownCapabilityError.php rename to python/demos/php/minishell/reset.php index ac46e130..6d5656e2 100644 --- a/python/php/sdk/google/appengine/api/capabilities/UnknownCapabilityError.php +++ b/python/demos/php/minishell/reset.php @@ -15,14 +15,10 @@ * limitations under the License. */ /** + * Handles the /reset.do url. + * It deletes all the session variables, hence resetting the environment. */ +session_start(); +session_unset(); -namespace google\appengine\api\capabilities; - -/** - * Thrown when the user requests the status of a capability that does not exist. - */ - -class UnknownCapabilityError extends \Exception { -} - +header("Location: /"); diff --git a/python/demos/php/minishell/shell.php b/python/demos/php/minishell/shell.php new file mode 100644 index 00000000..4e3fedb6 --- /dev/null +++ b/python/demos/php/minishell/shell.php @@ -0,0 +1,190 @@ +locals = serialize(array()); + $this->storeGlobals(); + $this->functions = get_defined_functions(); + $this->classes = get_declared_classes(); + $this->storeConstants(); + } + + /** Stores a serialized version of the globals. */ + function storeGlobals() { + $this->globals = serialize($GLOBALS); + } + + /** Replace the globals with the serialized stored ones. */ + function loadGlobals() { + $GLOBALS = unserialize($this->globals); + } + + /** Stores a serialized version of the passed locals. */ + function storeLocals($locals) { + foreach (array("_shell_statement", + "_shell_session") as $nonLocal) { + unset($locals[$nonLocal]); + } + $this->locals = serialize($locals); + } + + /** Returns an array with the locals. */ + function getLocals() { + return unserialize($this->locals); + } + + /** + * Stores a statement if it declares a function, a class or if it is a + * use statment or a require/include statement. + */ + function storeStatementIfNeeded($statement) { + $nonSerializableTokens = array(T_CLASS, T_FUNCTION, + T_REQUIRE, T_REQUIRE_ONCE, + T_INCLUDE, T_INCLUDE_ONCE); + foreach(token_get_all("") as $token) { + if (in_array($token[0], $nonSerializableTokens)) { + array_push($this->statements, $statement); + } else if ($token[0] == T_USE) { + array_push($this->use_statements, $statement); + } + } + } + + /** Stores a serialized version of the constants. */ + function storeConstants() { + $this->constants = serialize(get_defined_constants()); + } + + /** Replace the constants with the serialized stored ones. */ + function loadConstants() { + $constants = unserialize($this->constants); + foreach(array_diff($constants, get_defined_constants()) as $constant=>$value) { + define($constant, $value); + } + } + + static function scrubOutput($output) { + return htmlentities($output); + } + + /** Evaluate all saved statements.*/ + function loadStatements() { + foreach ($this->statements as $statement) { + ob_start(['Session', 'scrubOutput']); + eval($statement); + ob_clean(); + } + } + + /** Prepend all the use statements to the given statement. */ + function prependUseStatements($statement) { + return implode("\n", $this->use_statements) . "\n$statement"; + } + + /** Method to initialize user scope.*/ + function start() { + // Must goes first, or otherwise the unserialized objects will be incomplete. + $this->loadStatements(); + $this->loadGlobals(); + $this->functions = get_defined_functions(); + $this->classes = get_declared_classes(); + $this->loadConstants(); + } + + /** Method to save user scope.*/ + function end($statement, $locals) { + $this->storeGlobals(); + $this->storeLocals($locals); + $this->storeStatementIfNeeded($statement); + $this->storeConstants(); + } +} + +/** + * Handler to catch exceptions raised when evaluation the code. + * We just return the error and not the line, as they are not meaningful in this + * context. + */ +function error_handler($errno, $errstr, $errfile, $errline) { + echo $errstr, "\n"; +} + +/** + * Handler to catch fatal errors (like function not defined) and print them + * nicely. + */ +function shutdown_handler() { + $error = error_get_last(); + if($error !== NULL){ + echo $error["message"], "\n"; + } +} + +/** + * Executes a statement for the given session. + * All locals must be prefixed with _shell_, to avoid messing up with the user's + * local. + */ +function shell($_shell_statement, $_shell_session) { + $_shell_session->start(); + header("Content-Type: text/html; charset=utf-8"); + extract($_shell_session->getLocals(), EXTR_SKIP); + // Disable all error reporting, otherwise it mess with the output. + error_reporting(0); + // Errors are handled with an error handler and a fatal error handler, because + // exceptions are not catchable when evaluating code. + register_shutdown_function('shutdown_handler'); + set_error_handler('error_handler'); + ob_start(['Session', 'scrubOutput']); + eval($_shell_session->prependUseStatements($_shell_statement)); + ob_end_flush(); + $_shell_session->end($_shell_statement, get_defined_vars()); +} + +session_start(); +if (!isset($_SESSION["session"])) { + $_SESSION["session"] = new Session(); +} + +if (isset($_SESSION['token']) && ($_GET['token'] === $_SESSION['token'])) { + // Append a semi-colon just in case the statement doen't have one. An extra + // semi-colon makes no harm. + shell($_GET["statement"] . ";", $_SESSION["session"]); +} else if (!isset($_SESSION['token'])) { + syslog(LOG_ERR, 'Missing session token'); + echo "Session token missing - Please reset your session."; +} else { + syslog(LOG_ERR, 'Mismatch session token.'); + echo "Invalid session token - Please reset your session."; +} diff --git a/python/demos/php/minishell/static/shell.js b/python/demos/php/minishell/static/shell.js new file mode 100644 index 00000000..691c65d7 --- /dev/null +++ b/python/demos/php/minishell/static/shell.js @@ -0,0 +1,209 @@ +// Copyright 2007 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * @fileoverview + * Javascript code for the interactive AJAX shell. + * + TODO(skreft): add codesite url. + * Part of .... + * + * Includes a function (shell.runStatement) that sends the current php + * statement in the shell prompt text box to the server, and a callback + * (shell.done) that displays the results when the XmlHttpRequest returns. + * + * Also includes cross-browser code (shell.getXmlHttpRequest) to get an + * XmlHttpRequest. + */ + +/** + * Shell namespace. + * @type {Object} + */ +var shell = {}; + +/** + * The shell history. history is an array of strings, ordered oldest to + * newest. historyCursor is the current history element that the user is on. + * + * The last history element is the statement that the user is currently + * typing. When a statement is run, it's frozen in the history, a new history + * element is added to the end of the array for the new statement, and + * historyCursor is updated to point to the new element. + * + * @type {Array} + */ +shell.history = ['']; + +/** + * See {shell.history} + * @type {number} + */ +shell.historyCursor = 0; + +/** + * A constant for the XmlHttpRequest 'done' state. + * @type Number + */ +shell.DONE_STATE = 4; + +/** + * A cross-browser function to get an XmlHttpRequest object. + * + * @this {shell} + * @return {XmlHttpRequest?} a new XmlHttpRequest. + */ +shell.getXmlHttpRequest = function() { + if (window.XMLHttpRequest) { + return new XMLHttpRequest(); + } else if (window.ActiveXObject) { + try { + return new ActiveXObject('Msxml2.XMLHTTP'); + } catch (e) { + return new ActiveXObject('Microsoft.XMLHTTP'); + } + } + + return null; +}; + +/** + * This is the prompt textarea's onkeydown handler. Depending on the key that + * was pressed, it will run the statement, navigate the history, or update the + * current statement in the history. + * + * @this {shell} + * @param {Event} event the keypress event. + * @return {Boolean} false to tell the browser not to submit the form. + */ +shell.onPromptKeyDown = function(event) { + var statement = document.getElementById('statement'); + + if (this.historyCursor == this.history.length - 1) { + // we're on the current statement. update it in the history before doing + // anything. + this.history[this.historyCursor] = statement.value; + } + + // should we pull something from the history? + if (event.ctrlKey && event.keyCode == 38 /* up arrow */) { + if (this.historyCursor > 0) { + statement.value = this.history[--this.historyCursor]; + } + return false; + } else if (event.ctrlKey && event.keyCode == 40 /* down arrow */) { + if (this.historyCursor < this.history.length - 1) { + statement.value = this.history[++this.historyCursor]; + } + return false; + } else if (!event.altKey) { + // probably changing the statement. update it in the history. + this.historyCursor = this.history.length - 1; + this.history[this.historyCursor] = statement.value; + } + + // should we submit? + if (event.keyCode == 13 /* enter */ && !event.altKey && !event.shiftKey) { + return this.runStatement(); + } +}; + +/** + * The XmlHttpRequest callback. If the request succeeds, it adds the command + * and its resulting output to the shell history div. + * + * @this {shell} + * @param {XmlHttpRequest} req the XmlHttpRequest we used to send the current + * statement to the server. + */ +shell.done = function(req) { + if (req.readyState == this.DONE_STATE) { + var statement = document.getElementById('statement'); + statement.className = 'prompt'; + + // add the command to the shell output + var output = document.getElementById('output'); + + var value = statement.value; + var last_char = value[value.length - 1]; + if (last_char != ';' && last_char != '}') { + value += ';'; + } + output.value += '\n>>> ' + value; + statement.value = ''; + + // add a new history element + this.history.push(''); + this.historyCursor = this.history.length - 1; + + // add the command's result + var result = req.responseText.replace(/^\s*|\s*$/g, ''); // trim whitespace + + // unescape the result. + var e = document.createElement('div'); + e.innerHTML = result; + var unescaped = e.childNodes.length === 0 ? "" : e.childNodes[0].nodeValue; + + if (result != '') + output.value += '\n' + unescaped; + + // scroll to the bottom + output.scrollTop = output.scrollHeight; + if (output.createTextRange) { + var range = output.createTextRange(); + range.collapse(false); + range.select(); + } + } +}; + +/** + * This is the form's onsubmit handler. It sends the php statement to the + * server, and registers shell.done() as the callback to run when it returns. + * + * @this {shell} + * @return {Boolean} false to tell the browser not to submit the form. + */ +shell.runStatement = function() { + var form = document.getElementById('form'); + + // build a XmlHttpRequest + var req = this.getXmlHttpRequest(); + if (!req) { + document.getElementById('ajax-status').innerHTML = + "Your browser doesn't support AJAX. :("; + return false; + } + + req.onreadystatechange = function() { shell.done(req); }; + + // build the query parameter string + var params = ''; + for (i = 0; i < form.elements.length; i++) { + var elem = form.elements[i]; + if (elem.type != 'submit' && elem.type != 'button' && elem.id != 'caret') { + var value = escape(elem.value).replace(/\+/g, '%2B'); // escape ignores + + params += '&' + elem.name + '=' + value; + } + } + + // send the request and tell the user. + document.getElementById('statement').className = 'prompt processing'; + req.open(form.method, form.action + '?' + params, true); + req.setRequestHeader('Content-type', + 'application/x-www-form-urlencoded;charset=UTF-8'); + req.send(null); + + return false; +}; diff --git a/python/demos/php/minishell/static/style.css b/python/demos/php/minishell/static/style.css new file mode 100644 index 00000000..a09ce991 --- /dev/null +++ b/python/demos/php/minishell/static/style.css @@ -0,0 +1,70 @@ +body { + font-family: monospace; + font-size: 10pt; +} + +p { + margin: 0.5em; +} + +a img { + border: none; +} + +.prompt, #output { + width: 45em; + border: 1px solid silver; + background-color: #f5f5f5; + font-size: 10pt; + margin: 0.5em; + padding: 0.5em; + padding-right: 0em; + overflow-x: hidden; +} + +#toolbar { + margin-left: 0.5em; + padding-left: 0.5em; +} + +#caret { + width: 2.5em; + margin-right: 0px; + padding-right: 0px; + border-right: 0px; +} + +#statement { + width: 43em; + margin-left: -1em; + padding-left: 0px; + border-left: 0px; + background-position: top right; + background-repeat: no-repeat; +} + +.processing { + background-image: url("/static/spinner.gif"); +} + +#ajax-status { + font-weight: bold; +} + +.message { + color: #8AD; + font-weight: bold; + font-style: italic; +} + +.error { + color: #F44; +} + +.username { + font-weight: bold; +} + +#logo { + vertical-align: middle; +} diff --git a/python/demos/guestbook/app.yaml b/python/demos/python/guestbook/app.yaml similarity index 100% rename from python/demos/guestbook/app.yaml rename to python/demos/python/guestbook/app.yaml diff --git a/python/demos/guestbook/guestbook.py b/python/demos/python/guestbook/guestbook.py similarity index 100% rename from python/demos/guestbook/guestbook.py rename to python/demos/python/guestbook/guestbook.py diff --git a/python/google/appengine/api/apiproxy_stub.py b/python/google/appengine/api/apiproxy_stub.py index c261e510..f27b6096 100644 --- a/python/google/appengine/api/apiproxy_stub.py +++ b/python/google/appengine/api/apiproxy_stub.py @@ -54,6 +54,11 @@ class APIProxyStub(object): _ACCEPTS_REQUEST_ID = False + + + + THREADSAFE = False + def __init__(self, service_name, max_request_size=MAX_REQUEST_SIZE, request_data=None): """Constructor. diff --git a/python/google/appengine/api/apiproxy_stub_map.py b/python/google/appengine/api/apiproxy_stub_map.py index 633eb5c0..614008f5 100644 --- a/python/google/appengine/api/apiproxy_stub_map.py +++ b/python/google/appengine/api/apiproxy_stub_map.py @@ -289,6 +289,14 @@ class APIProxyStubMap(object): """ return self.__stub_map.get(service, self.__default_stub) + def _CopyStubMap(self): + """Get a copy of the stub map. For testing only. + + Returns: + Get a shallow copy of the stub map. + """ + return dict(self.__stub_map) + def MakeSyncCall(self, service, call, request, response): """The APIProxy entry point. diff --git a/python/google/appengine/api/app_identity/app_identity.py b/python/google/appengine/api/app_identity/app_identity.py index 6b5daa20..535f64cc 100644 --- a/python/google/appengine/api/app_identity/app_identity.py +++ b/python/google/appengine/api/app_identity/app_identity.py @@ -409,7 +409,13 @@ def make_get_access_token_call(rpc, scopes, service_account_id=None): for scope in scopes: request.add_scope(scope) if service_account_id: - request.set_service_account_id(service_account_id) + if isinstance(service_account_id, (int, long)): + request.set_service_account_id(service_account_id) + elif isinstance(service_account_id, basestring): + request.set_service_account_name(service_account_id) + else: + raise TypeError() + response = app_identity_service_pb.GetAccessTokenResponse() def get_access_token_result(rpc): @@ -480,7 +486,7 @@ def get_access_token(scopes, service_account_id=None): memcache_key = _MEMCACHE_KEY_PREFIX + str(scopes) if service_account_id: - memcache_key += ',%d' % service_account_id + memcache_key += ',%s' % service_account_id memcache_value = memcache.get(memcache_key, namespace=_MEMCACHE_NAMESPACE) if memcache_value: access_token, expires_at = memcache_value diff --git a/python/google/appengine/api/app_identity/app_identity_service_pb.py b/python/google/appengine/api/app_identity/app_identity_service_pb.py index d4a5924f..f6124206 100644 --- a/python/google/appengine/api/app_identity/app_identity_service_pb.py +++ b/python/google/appengine/api/app_identity/app_identity_service_pb.py @@ -171,7 +171,7 @@ class AppIdentityServiceError(ProtocolBuffer.ProtocolMessage): _STYLE_CONTENT_TYPE = """""" _PROTO_DESCRIPTOR_NAME = 'apphosting.AppIdentityServiceError' _SERIALIZED_DESCRIPTOR = array.array('B') - _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WjZhcHBob3N0aW5nL2FwaS9hcHBfaWRlbnRpdHkvYXBwX2lkZW50aXR5X3NlcnZpY2UucHJvdG8KImFwcGhvc3RpbmcuQXBwSWRlbnRpdHlTZXJ2aWNlRXJyb3JzeglFcnJvckNvZGWLAZIBB1NVQ0NFU1OYAQCMAYsBkgENVU5LTk9XTl9TQ09QRZgBCYwBiwGSAQ5CTE9CX1RPT19MQVJHRZgB6AeMAYsBkgERREVBRExJTkVfRVhDRUVERUSYAekHjAGLAZIBD05PVF9BX1ZBTElEX0FQUJgB6geMAYsBkgENVU5LTk9XTl9FUlJPUpgB6weMAYsBkgEZR0FJQU1JTlRfTk9UX0lOSVRJQUlMSVpFRJgB7AeMAYsBkgELTk9UX0FMTE9XRUSYAe0HjAGLAZIBD05PVF9JTVBMRU1FTlRFRJgB7geMAXS6Ae8KCjZhcHBob3N0aW5nL2FwaS9hcHBfaWRlbnRpdHkvYXBwX2lkZW50aXR5X3NlcnZpY2UucHJvdG8SCmFwcGhvc3Rpbmci5gEKF0FwcElkZW50aXR5U2VydmljZUVycm9yIsoBCglFcnJvckNvZGUSCwoHU1VDQ0VTUxAAEhEKDVVOS05PV05fU0NPUEUQCRITCg5CTE9CX1RPT19MQVJHRRDoBxIWChFERUFETElORV9FWENFRURFRBDpBxIUCg9OT1RfQV9WQUxJRF9BUFAQ6gcSEgoNVU5LTk9XTl9FUlJPUhDrBxIeChlHQUlBTUlOVF9OT1RfSU5JVElBSUxJWkVEEOwHEhAKC05PVF9BTExPV0VEEO0HEhQKD05PVF9JTVBMRU1FTlRFRBDuByIqChFTaWduRm9yQXBwUmVxdWVzdBIVCg1ieXRlc190b19zaWduGAEgASgMIj8KElNpZ25Gb3JBcHBSZXNwb25zZRIQCghrZXlfbmFtZRgBIAEoCRIXCg9zaWduYXR1cmVfYnl0ZXMYAiABKAwiIwohR2V0UHVibGljQ2VydGlmaWNhdGVGb3JBcHBSZXF1ZXN0IkMKEVB1YmxpY0NlcnRpZmljYXRlEhAKCGtleV9uYW1lGAEgASgJEhwKFHg1MDlfY2VydGlmaWNhdGVfcGVtGAIgASgJIo0BCiJHZXRQdWJsaWNDZXJ0aWZpY2F0ZUZvckFwcFJlc3BvbnNlEj4KF3B1YmxpY19jZXJ0aWZpY2F0ZV9saXN0GAEgAygLMh0uYXBwaG9zdGluZy5QdWJsaWNDZXJ0aWZpY2F0ZRInCh9tYXhfY2xpZW50X2NhY2hlX3RpbWVfaW5fc2Vjb25kGAIgASgDIh4KHEdldFNlcnZpY2VBY2NvdW50TmFtZVJlcXVlc3QiPQodR2V0U2VydmljZUFjY291bnROYW1lUmVzcG9uc2USHAoUc2VydmljZV9hY2NvdW50X25hbWUYASABKAkiQgoVR2V0QWNjZXNzVG9rZW5SZXF1ZXN0Eg0KBXNjb3BlGAEgAygJEhoKEnNlcnZpY2VfYWNjb3VudF9pZBgCIAEoAyJHChZHZXRBY2Nlc3NUb2tlblJlc3BvbnNlEhQKDGFjY2Vzc190b2tlbhgBIAEoCRIXCg9leHBpcmF0aW9uX3RpbWUYAiABKAMyqgMKDlNpZ25pbmdTZXJ2aWNlEk0KClNpZ25Gb3JBcHASHS5hcHBob3N0aW5nLlNpZ25Gb3JBcHBSZXF1ZXN0Gh4uYXBwaG9zdGluZy5TaWduRm9yQXBwUmVzcG9uc2UiABJ+ChtHZXRQdWJsaWNDZXJ0aWZpY2F0ZXNGb3JBcHASLS5hcHBob3N0aW5nLkdldFB1YmxpY0NlcnRpZmljYXRlRm9yQXBwUmVxdWVzdBouLmFwcGhvc3RpbmcuR2V0UHVibGljQ2VydGlmaWNhdGVGb3JBcHBSZXNwb25zZSIAEm4KFUdldFNlcnZpY2VBY2NvdW50TmFtZRIoLmFwcGhvc3RpbmcuR2V0U2VydmljZUFjY291bnROYW1lUmVxdWVzdBopLmFwcGhvc3RpbmcuR2V0U2VydmljZUFjY291bnROYW1lUmVzcG9uc2UiABJZCg5HZXRBY2Nlc3NUb2tlbhIhLmFwcGhvc3RpbmcuR2V0QWNjZXNzVG9rZW5SZXF1ZXN0GiIuYXBwaG9zdGluZy5HZXRBY2Nlc3NUb2tlblJlc3BvbnNlIgBCQAokY29tLmdvb2dsZS5hcHBlbmdpbmUuYXBpLmFwcGlkZW50aXR5IAEoAkIUQXBwSWRlbnRpdHlTZXJ2aWNlUGI=")) + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WjZhcHBob3N0aW5nL2FwaS9hcHBfaWRlbnRpdHkvYXBwX2lkZW50aXR5X3NlcnZpY2UucHJvdG8KImFwcGhvc3RpbmcuQXBwSWRlbnRpdHlTZXJ2aWNlRXJyb3JzeglFcnJvckNvZGWLAZIBB1NVQ0NFU1OYAQCMAYsBkgENVU5LTk9XTl9TQ09QRZgBCYwBiwGSAQ5CTE9CX1RPT19MQVJHRZgB6AeMAYsBkgERREVBRExJTkVfRVhDRUVERUSYAekHjAGLAZIBD05PVF9BX1ZBTElEX0FQUJgB6geMAYsBkgENVU5LTk9XTl9FUlJPUpgB6weMAYsBkgEZR0FJQU1JTlRfTk9UX0lOSVRJQUlMSVpFRJgB7AeMAYsBkgELTk9UX0FMTE9XRUSYAe0HjAGLAZIBD05PVF9JTVBMRU1FTlRFRJgB7geMAXS6AY0LCjZhcHBob3N0aW5nL2FwaS9hcHBfaWRlbnRpdHkvYXBwX2lkZW50aXR5X3NlcnZpY2UucHJvdG8SCmFwcGhvc3Rpbmci5gEKF0FwcElkZW50aXR5U2VydmljZUVycm9yIsoBCglFcnJvckNvZGUSCwoHU1VDQ0VTUxAAEhEKDVVOS05PV05fU0NPUEUQCRITCg5CTE9CX1RPT19MQVJHRRDoBxIWChFERUFETElORV9FWENFRURFRBDpBxIUCg9OT1RfQV9WQUxJRF9BUFAQ6gcSEgoNVU5LTk9XTl9FUlJPUhDrBxIeChlHQUlBTUlOVF9OT1RfSU5JVElBSUxJWkVEEOwHEhAKC05PVF9BTExPV0VEEO0HEhQKD05PVF9JTVBMRU1FTlRFRBDuByIqChFTaWduRm9yQXBwUmVxdWVzdBIVCg1ieXRlc190b19zaWduGAEgASgMIj8KElNpZ25Gb3JBcHBSZXNwb25zZRIQCghrZXlfbmFtZRgBIAEoCRIXCg9zaWduYXR1cmVfYnl0ZXMYAiABKAwiIwohR2V0UHVibGljQ2VydGlmaWNhdGVGb3JBcHBSZXF1ZXN0IkMKEVB1YmxpY0NlcnRpZmljYXRlEhAKCGtleV9uYW1lGAEgASgJEhwKFHg1MDlfY2VydGlmaWNhdGVfcGVtGAIgASgJIo0BCiJHZXRQdWJsaWNDZXJ0aWZpY2F0ZUZvckFwcFJlc3BvbnNlEj4KF3B1YmxpY19jZXJ0aWZpY2F0ZV9saXN0GAEgAygLMh0uYXBwaG9zdGluZy5QdWJsaWNDZXJ0aWZpY2F0ZRInCh9tYXhfY2xpZW50X2NhY2hlX3RpbWVfaW5fc2Vjb25kGAIgASgDIh4KHEdldFNlcnZpY2VBY2NvdW50TmFtZVJlcXVlc3QiPQodR2V0U2VydmljZUFjY291bnROYW1lUmVzcG9uc2USHAoUc2VydmljZV9hY2NvdW50X25hbWUYASABKAkiYAoVR2V0QWNjZXNzVG9rZW5SZXF1ZXN0Eg0KBXNjb3BlGAEgAygJEhoKEnNlcnZpY2VfYWNjb3VudF9pZBgCIAEoAxIcChRzZXJ2aWNlX2FjY291bnRfbmFtZRgDIAEoCSJHChZHZXRBY2Nlc3NUb2tlblJlc3BvbnNlEhQKDGFjY2Vzc190b2tlbhgBIAEoCRIXCg9leHBpcmF0aW9uX3RpbWUYAiABKAMyqgMKDlNpZ25pbmdTZXJ2aWNlEk0KClNpZ25Gb3JBcHASHS5hcHBob3N0aW5nLlNpZ25Gb3JBcHBSZXF1ZXN0Gh4uYXBwaG9zdGluZy5TaWduRm9yQXBwUmVzcG9uc2UiABJ+ChtHZXRQdWJsaWNDZXJ0aWZpY2F0ZXNGb3JBcHASLS5hcHBob3N0aW5nLkdldFB1YmxpY0NlcnRpZmljYXRlRm9yQXBwUmVxdWVzdBouLmFwcGhvc3RpbmcuR2V0UHVibGljQ2VydGlmaWNhdGVGb3JBcHBSZXNwb25zZSIAEm4KFUdldFNlcnZpY2VBY2NvdW50TmFtZRIoLmFwcGhvc3RpbmcuR2V0U2VydmljZUFjY291bnROYW1lUmVxdWVzdBopLmFwcGhvc3RpbmcuR2V0U2VydmljZUFjY291bnROYW1lUmVzcG9uc2UiABJZCg5HZXRBY2Nlc3NUb2tlbhIhLmFwcGhvc3RpbmcuR2V0QWNjZXNzVG9rZW5SZXF1ZXN0GiIuYXBwaG9zdGluZy5HZXRBY2Nlc3NUb2tlblJlc3BvbnNlIgBCQAokY29tLmdvb2dsZS5hcHBlbmdpbmUuYXBpLmFwcGlkZW50aXR5IAEoAkIUQXBwSWRlbnRpdHlTZXJ2aWNlUGI=")) if _net_proto___parse__python is not None: _net_proto___parse__python.RegisterType( _SERIALIZED_DESCRIPTOR.tostring()) @@ -1136,6 +1136,8 @@ class GetServiceAccountNameResponse(ProtocolBuffer.ProtocolMessage): class GetAccessTokenRequest(ProtocolBuffer.ProtocolMessage): has_service_account_id_ = 0 service_account_id_ = 0 + has_service_account_name_ = 0 + service_account_name_ = "" def __init__(self, contents=None): self.scope_ = [] @@ -1169,11 +1171,25 @@ class GetAccessTokenRequest(ProtocolBuffer.ProtocolMessage): def has_service_account_id(self): return self.has_service_account_id_ + def service_account_name(self): return self.service_account_name_ + + def set_service_account_name(self, x): + self.has_service_account_name_ = 1 + self.service_account_name_ = x + + def clear_service_account_name(self): + if self.has_service_account_name_: + self.has_service_account_name_ = 0 + self.service_account_name_ = "" + + def has_service_account_name(self): return self.has_service_account_name_ + def MergeFrom(self, x): assert x is not self for i in xrange(x.scope_size()): self.add_scope(x.scope(i)) if (x.has_service_account_id()): self.set_service_account_id(x.service_account_id()) + if (x.has_service_account_name()): self.set_service_account_name(x.service_account_name()) if _net_proto___parse__python is not None: def _CMergeFromString(self, s): @@ -1209,6 +1225,8 @@ class GetAccessTokenRequest(ProtocolBuffer.ProtocolMessage): if e1 != e2: return 0 if self.has_service_account_id_ != x.has_service_account_id_: return 0 if self.has_service_account_id_ and self.service_account_id_ != x.service_account_id_: return 0 + if self.has_service_account_name_ != x.has_service_account_name_: return 0 + if self.has_service_account_name_ and self.service_account_name_ != x.service_account_name_: return 0 return 1 def IsInitialized(self, debug_strs=None): @@ -1220,6 +1238,7 @@ class GetAccessTokenRequest(ProtocolBuffer.ProtocolMessage): n += 1 * len(self.scope_) for i in xrange(len(self.scope_)): n += self.lengthString(len(self.scope_[i])) if (self.has_service_account_id_): n += 1 + self.lengthVarInt64(self.service_account_id_) + if (self.has_service_account_name_): n += 1 + self.lengthString(len(self.service_account_name_)) return n def ByteSizePartial(self): @@ -1227,11 +1246,13 @@ class GetAccessTokenRequest(ProtocolBuffer.ProtocolMessage): n += 1 * len(self.scope_) for i in xrange(len(self.scope_)): n += self.lengthString(len(self.scope_[i])) if (self.has_service_account_id_): n += 1 + self.lengthVarInt64(self.service_account_id_) + if (self.has_service_account_name_): n += 1 + self.lengthString(len(self.service_account_name_)) return n def Clear(self): self.clear_scope() self.clear_service_account_id() + self.clear_service_account_name() def OutputUnchecked(self, out): for i in xrange(len(self.scope_)): @@ -1240,6 +1261,9 @@ class GetAccessTokenRequest(ProtocolBuffer.ProtocolMessage): if (self.has_service_account_id_): out.putVarInt32(16) out.putVarInt64(self.service_account_id_) + if (self.has_service_account_name_): + out.putVarInt32(26) + out.putPrefixedString(self.service_account_name_) def OutputPartial(self, out): for i in xrange(len(self.scope_)): @@ -1248,6 +1272,9 @@ class GetAccessTokenRequest(ProtocolBuffer.ProtocolMessage): if (self.has_service_account_id_): out.putVarInt32(16) out.putVarInt64(self.service_account_id_) + if (self.has_service_account_name_): + out.putVarInt32(26) + out.putPrefixedString(self.service_account_name_) def TryMerge(self, d): while d.avail() > 0: @@ -1258,6 +1285,9 @@ class GetAccessTokenRequest(ProtocolBuffer.ProtocolMessage): if tt == 16: self.set_service_account_id(d.getVarInt64()) continue + if tt == 26: + self.set_service_account_name(d.getPrefixedString()) + continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError @@ -1273,6 +1303,7 @@ class GetAccessTokenRequest(ProtocolBuffer.ProtocolMessage): res+=prefix+("scope%s: %s\n" % (elm, self.DebugFormatString(e))) cnt+=1 if self.has_service_account_id_: res+=prefix+("service_account_id: %s\n" % self.DebugFormatInt64(self.service_account_id_)) + if self.has_service_account_name_: res+=prefix+("service_account_name: %s\n" % self.DebugFormatString(self.service_account_name_)) return res @@ -1281,25 +1312,28 @@ class GetAccessTokenRequest(ProtocolBuffer.ProtocolMessage): kscope = 1 kservice_account_id = 2 + kservice_account_name = 3 _TEXT = _BuildTagLookupTable({ 0: "ErrorCode", 1: "scope", 2: "service_account_id", - }, 2) + 3: "service_account_name", + }, 3) _TYPES = _BuildTagLookupTable({ 0: ProtocolBuffer.Encoder.NUMERIC, 1: ProtocolBuffer.Encoder.STRING, 2: ProtocolBuffer.Encoder.NUMERIC, - }, 2, ProtocolBuffer.Encoder.MAX_TYPE) + 3: ProtocolBuffer.Encoder.STRING, + }, 3, ProtocolBuffer.Encoder.MAX_TYPE) _STYLE = """""" _STYLE_CONTENT_TYPE = """""" _PROTO_DESCRIPTOR_NAME = 'apphosting.GetAccessTokenRequest' _SERIALIZED_DESCRIPTOR = array.array('B') - _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WjZhcHBob3N0aW5nL2FwaS9hcHBfaWRlbnRpdHkvYXBwX2lkZW50aXR5X3NlcnZpY2UucHJvdG8KIGFwcGhvc3RpbmcuR2V0QWNjZXNzVG9rZW5SZXF1ZXN0ExoFc2NvcGUgASgCMAk4AxQTGhJzZXJ2aWNlX2FjY291bnRfaWQgAigAMAM4ARTCASJhcHBob3N0aW5nLkFwcElkZW50aXR5U2VydmljZUVycm9y")) + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WjZhcHBob3N0aW5nL2FwaS9hcHBfaWRlbnRpdHkvYXBwX2lkZW50aXR5X3NlcnZpY2UucHJvdG8KIGFwcGhvc3RpbmcuR2V0QWNjZXNzVG9rZW5SZXF1ZXN0ExoFc2NvcGUgASgCMAk4AxQTGhJzZXJ2aWNlX2FjY291bnRfaWQgAigAMAM4ARQTGhRzZXJ2aWNlX2FjY291bnRfbmFtZSADKAIwCTgBFMIBImFwcGhvc3RpbmcuQXBwSWRlbnRpdHlTZXJ2aWNlRXJyb3I=")) if _net_proto___parse__python is not None: _net_proto___parse__python.RegisterType( _SERIALIZED_DESCRIPTOR.tostring()) diff --git a/python/google/appengine/api/app_identity/app_identity_stub.py b/python/google/appengine/api/app_identity/app_identity_stub.py index ce720ae1..17f67e3a 100644 --- a/python/google/appengine/api/app_identity/app_identity_stub.py +++ b/python/google/appengine/api/app_identity/app_identity_stub.py @@ -97,6 +97,8 @@ class AppIdentityServiceStub(apiproxy_stub.APIProxyStub): Provides stub functions which allow a developer to test integration before deployment. """ + THREADSAFE = True + def __init__(self, service_name='app_identity_service'): """Constructor.""" super(AppIdentityServiceStub, self).__init__(service_name) @@ -136,6 +138,8 @@ class AppIdentityServiceStub(apiproxy_stub.APIProxyStub): service_account_id = request.service_account_id() if service_account_id: token += '.%d' % service_account_id + if request.has_service_account_name(): + token += '.%s' % request.service_account_name() response.set_access_token('InvalidToken:%s:%s' % (token, time.time() % 100)) response.set_expiration_time(int(time.time()) + 1800) diff --git a/python/google/appengine/api/appinfo.py b/python/google/appengine/api/appinfo.py index e2e3e4b1..6173947d 100644 --- a/python/google/appengine/api/appinfo.py +++ b/python/google/appengine/api/appinfo.py @@ -134,8 +134,9 @@ SERVER_VERSION_ID_RE_STRING = (r'^(?!-)[a-z\d\-]{0,%d}[a-z\d]$' % (SERVER_VERSION_ID_MAX_LEN - 1)) _IDLE_INSTANCES_REGEX = r'^([\d]+|automatic)$' -_INSTANCES_REGEX = r'^[\d]+$' -_INSTANCE_CLASS_REGEX = r'^([sS](1|2|4|8|4_1G))$' + +_INSTANCES_REGEX = r'^[1-9][\d]*$' +_INSTANCE_CLASS_REGEX = r'^([fF](1|2|4|4_1G)|[bB](1|2|4|8|4_1G))$' @@ -167,6 +168,9 @@ LOGIN_ADMIN = 'admin' AUTH_FAIL_ACTION_REDIRECT = 'redirect' AUTH_FAIL_ACTION_UNAUTHORIZED = 'unauthorized' +DATASTORE_ID_POLICY_LEGACY = 'legacy' +DATASTORE_ID_POLICY_DEFAULT = 'default' + SECURE_HTTP = 'never' SECURE_HTTPS = 'always' SECURE_HTTP_OR_HTTPS = 'optional' @@ -232,6 +236,7 @@ ADMIN_CONSOLE = 'admin_console' ERROR_HANDLERS = 'error_handlers' BACKENDS = 'backends' THREADSAFE = 'threadsafe' +DATASTORE_AUTO_ID_POLICY = 'auto_id_policy' API_CONFIG = 'api_config' CODE_LOCK = 'code_lock' ENV_VARIABLES = 'env_variables' @@ -1473,6 +1478,10 @@ class AppInfoExternal(validation.Validated): BACKENDS: validation.Optional(validation.Repeated( backendinfo.BackendEntry)), THREADSAFE: validation.Optional(bool), + DATASTORE_AUTO_ID_POLICY: validation.Optional( + validation.Options(DATASTORE_ID_POLICY_LEGACY, + DATASTORE_ID_POLICY_DEFAULT), + default=DATASTORE_ID_POLICY_DEFAULT), API_CONFIG: validation.Optional(ApiConfigHandler), CODE_LOCK: validation.Optional(bool), ENV_VARIABLES: validation.Optional(EnvironmentVariables), @@ -1523,6 +1532,21 @@ class AppInfoExternal(validation.Validated): raise appinfo_errors.MissingThreadsafe( 'threadsafe must be present and set to either "yes" or "no"') + + if self.auto_id_policy == DATASTORE_ID_POLICY_LEGACY: + datastore_auto_ids_url = ('http://developers.google.com/' + 'appengine/docs/python/datastore/' + 'entities#Kinds_and_Identifiers') + appcfg_auto_ids_url = ('http://developers.google.com/appengine/docs/' + 'python/config/appconfig') + logging.warning( + "You have set the datastore auto_id_policy to 'legacy'. It is " + "recommended that you select 'default' instead.\n" + "Legacy auto ids are deprecated. You can continue to allocate\n" + "legacy ids manually using the allocate_ids() API functions.\n" + "For more information see:\n" + + datastore_auto_ids_url + '\n' + appcfg_auto_ids_url + '\n') + if self.libraries: if self.runtime != 'python27' and not self._skip_runtime_checks: raise appinfo_errors.RuntimeDoesNotSupportLibraries( @@ -1797,10 +1821,11 @@ def ParseExpiration(expiration): -_file_path_positive_re = re.compile(r'^[ 0-9a-zA-Z\._\+/\$-]{1,256}$') + +_file_path_positive_re = re.compile(r'^[ 0-9a-zA-Z\._\+/@\$-]{1,256}$') -_file_path_negative_1_re = re.compile(r'\.\.|^\./|\.$|/\./|^-|^_ah/') +_file_path_negative_1_re = re.compile(r'\.\.|^\./|\.$|/\./|^-|^_ah/|^/') _file_path_negative_2_re = re.compile(r'//|/$') @@ -1814,7 +1839,7 @@ def ValidFilename(filename): """Determines if filename is valid. filename must be a valid pathname. - - It must contain only letters, numbers, _, +, /, $, ., and -. + - It must contain only letters, numbers, @, _, +, /, $, ., and -. - It must be less than 256 chars. - It must not contain "/./", "/../", or "//". - It must not end in "/". diff --git a/python/google/appengine/api/blobstore/blobstore.py b/python/google/appengine/api/blobstore/blobstore.py index 36aaaeec..9887b0d5 100644 --- a/python/google/appengine/api/blobstore/blobstore.py +++ b/python/google/appengine/api/blobstore/blobstore.py @@ -50,6 +50,7 @@ __all__ = ['BLOB_INFO_KIND', 'MAX_BLOB_FETCH_SIZE', 'UPLOAD_INFO_CREATION_HEADER', 'CLOUD_STORAGE_OBJECT_HEADER', + 'GS_PREFIX', 'BlobFetchSizeTooLargeError', 'BlobKey', 'BlobNotFoundError', @@ -83,6 +84,8 @@ BLOB_RANGE_HEADER = 'X-AppEngine-BlobRange' MAX_BLOB_FETCH_SIZE = (1 << 20) - (1 << 15) +GS_PREFIX = '/gs/' + UPLOAD_INFO_CREATION_HEADER = 'X-AppEngine-Upload-Creation' @@ -500,7 +503,7 @@ def create_gs_key_async(filename, rpc=None): if not isinstance(filename, basestring): raise TypeError('filename must be str: %s' % filename) - if not filename.startswith('/gs/'): + if not filename.startswith(GS_PREFIX): raise ValueError('filename must start with "/gs/": %s' % filename) if not '/' in filename[4:]: raise ValueError('filename must have the format ' diff --git a/python/google/appengine/api/blobstore/blobstore_stub.py b/python/google/appengine/api/blobstore/blobstore_stub.py index 4094ea71..5c0ef1f2 100644 --- a/python/google/appengine/api/blobstore/blobstore_stub.py +++ b/python/google/appengine/api/blobstore/blobstore_stub.py @@ -65,6 +65,7 @@ class ConfigurationError(Error): _UPLOAD_SESSION_KIND = '__BlobUploadSession__' + _GS_INFO_KIND = '__GsFileInfo__' @@ -194,6 +195,15 @@ class BlobstoreServiceStub(apiproxy_stub.APIProxyStub): self.__next_session_id = 1 self.__uploader_path = uploader_path + @classmethod + def ToDatastoreBlobKey(cls, blobkey): + """Given a string blobkey, return its db.Key.""" + kind = blobstore.BLOB_INFO_KIND + if blobkey.startswith(cls.GS_BLOBKEY_PREFIX): + kind = _GS_INFO_KIND + return datastore_types.Key.from_path(kind, + blobkey, + namespace='') @property def storage(self): """Access BlobStorage used by service stub. @@ -284,6 +294,22 @@ class BlobstoreServiceStub(apiproxy_stub.APIProxyStub): response.set_url('%s://%s/%s%s' % (protocol, host, self.__uploader_path, session)) + @classmethod + def DeleteBlob(cls, blobkey, storage): + """Delete a blob. + + Args: + blobkey: blobkey in str. + storage: blobstore storage stub. + """ + datastore.Delete(cls.ToDatastoreBlobKey(blobkey)) + + blobinfo = datastore_types.Key.from_path(blobstore.BLOB_INFO_KIND, + blobkey, + namespace='') + datastore.Delete(blobinfo) + storage.DeleteBlob(blobkey) + def _Dynamic_DeleteBlob(self, request, response, unused_request_id): """Delete a blob by its blob-key. @@ -294,18 +320,8 @@ class BlobstoreServiceStub(apiproxy_stub.APIProxyStub): request: A fully initialized DeleteBlobRequest instance. response: Not used but should be a VoidProto. """ - for blob_key in request.blob_key_list(): - if blob_key.startswith(self.GS_BLOBKEY_PREFIX): - key = datastore_types.Key.from_path(_GS_INFO_KIND, - str(blob_key), - namespace='') - else: - key = datastore_types.Key.from_path(blobstore.BLOB_INFO_KIND, - str(blob_key), - namespace='') - - datastore.Delete(key) - self.__storage.DeleteBlob(blob_key) + for blobkey in request.blob_key_list(): + self.DeleteBlob(blobkey, self.__storage) def _Dynamic_FetchData(self, request, response, unused_request_id): """Fetch a blob fragment from a blob by its blob-key. @@ -345,18 +361,16 @@ class BlobstoreServiceStub(apiproxy_stub.APIProxyStub): blobstore_service_pb.BlobstoreServiceError.BLOB_FETCH_SIZE_TOO_LARGE) - blob_key = request.blob_key() - blob_info_key = datastore.Key.from_path(blobstore.BLOB_INFO_KIND, - blob_key, - namespace='') + blobkey = request.blob_key() + info_key = self.ToDatastoreBlobKey(blobkey) try: - datastore.Get(blob_info_key) - except datastore_errors.EntityNotFoundError, err: + datastore.Get(info_key) + except datastore_errors.EntityNotFoundError: raise apiproxy_errors.ApplicationError( blobstore_service_pb.BlobstoreServiceError.BLOB_NOT_FOUND) - blob_file = self.__storage.OpenBlob(blob_key) + blob_file = self.__storage.OpenBlob(blobkey) blob_file.seek(start_index) response.set_data(blob_file.read(fetch_size)) @@ -378,10 +392,16 @@ class BlobstoreServiceStub(apiproxy_stub.APIProxyStub): accept encoded blob keys will need to be able to support Google Storage files or blobstore files based on decoding this key. + Any stub that creates GS files should use this function to convert + a gs filename to a blobkey. The created blobkey should be used both + as its _GS_FILE_INFO entity's key name and as the storage key to + store its content in blobstore. This ensures the GS files created + can be operated by other APIs. + Note this encoding is easily reversible and is not encryption. Args: - filename: gs filename of form '/gs/bucket/filename' + filename: gs filename of form 'bucket/filename' Returns: blobkey string of encoded filename. @@ -401,8 +421,9 @@ class BlobstoreServiceStub(apiproxy_stub.APIProxyStub): instance. response: A CreateEncodedGoogleStorageKeyResponse instance. """ + filename = request.filename()[len(blobstore.GS_PREFIX):] response.set_blob_key( - self.CreateEncodedGoogleStorageKey(request.filename())) + self.CreateEncodedGoogleStorageKey(filename)) def CreateBlob(self, blob_key, content): """Create new blob and put in storage and Datastore. diff --git a/python/google/appengine/api/capabilities/capability_stub.py b/python/google/appengine/api/capabilities/capability_stub.py index e2989c08..bfd5da1f 100644 --- a/python/google/appengine/api/capabilities/capability_stub.py +++ b/python/google/appengine/api/capabilities/capability_stub.py @@ -51,6 +51,8 @@ SUPPORTED_CAPABILITIES = ( class CapabilityServiceStub(apiproxy_stub.APIProxyStub): """Python only capability service stub.""" + THREADSAFE = True + def __init__(self, service_name='capability_service'): """Constructor. diff --git a/python/google/appengine/api/channel/channel_service_pb.py b/python/google/appengine/api/channel/channel_service_pb.py index 35ed70f6..da61a464 100644 --- a/python/google/appengine/api/channel/channel_service_pb.py +++ b/python/google/appengine/api/channel/channel_service_pb.py @@ -519,401 +519,7 @@ class SendMessageRequest(ProtocolBuffer.ProtocolMessage): _STYLE = """""" _STYLE_CONTENT_TYPE = """""" _PROTO_DESCRIPTOR_NAME = 'apphosting.SendMessageRequest' -class ChannelPresenceRequest(ProtocolBuffer.ProtocolMessage): - - def __init__(self, contents=None): - self.application_key_ = [] - if contents is not None: self.MergeFromString(contents) - - def application_key_size(self): return len(self.application_key_) - def application_key_list(self): return self.application_key_ - - def application_key(self, i): - return self.application_key_[i] - - def set_application_key(self, i, x): - self.application_key_[i] = x - - def add_application_key(self, x): - self.application_key_.append(x) - - def clear_application_key(self): - self.application_key_ = [] - - - def MergeFrom(self, x): - assert x is not self - for i in xrange(x.application_key_size()): self.add_application_key(x.application_key(i)) - - def Equals(self, x): - if x is self: return 1 - if len(self.application_key_) != len(x.application_key_): return 0 - for e1, e2 in zip(self.application_key_, x.application_key_): - if e1 != e2: return 0 - return 1 - - def IsInitialized(self, debug_strs=None): - initialized = 1 - return initialized - - def ByteSize(self): - n = 0 - n += 1 * len(self.application_key_) - for i in xrange(len(self.application_key_)): n += self.lengthString(len(self.application_key_[i])) - return n - - def ByteSizePartial(self): - n = 0 - n += 1 * len(self.application_key_) - for i in xrange(len(self.application_key_)): n += self.lengthString(len(self.application_key_[i])) - return n - - def Clear(self): - self.clear_application_key() - - def OutputUnchecked(self, out): - for i in xrange(len(self.application_key_)): - out.putVarInt32(10) - out.putPrefixedString(self.application_key_[i]) - - def OutputPartial(self, out): - for i in xrange(len(self.application_key_)): - out.putVarInt32(10) - out.putPrefixedString(self.application_key_[i]) - - def TryMerge(self, d): - while d.avail() > 0: - tt = d.getVarInt32() - if tt == 10: - self.add_application_key(d.getPrefixedString()) - continue - - - if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError - d.skipData(tt) - - - def __str__(self, prefix="", printElemNumber=0): - res="" - cnt=0 - for e in self.application_key_: - elm="" - if printElemNumber: elm="(%d)" % cnt - res+=prefix+("application_key%s: %s\n" % (elm, self.DebugFormatString(e))) - cnt+=1 - return res - - - def _BuildTagLookupTable(sparse, maxtag, default=None): - return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) - - kapplication_key = 1 - - _TEXT = _BuildTagLookupTable({ - 0: "ErrorCode", - 1: "application_key", - }, 1) - - _TYPES = _BuildTagLookupTable({ - 0: ProtocolBuffer.Encoder.NUMERIC, - 1: ProtocolBuffer.Encoder.STRING, - }, 1, ProtocolBuffer.Encoder.MAX_TYPE) - - - _STYLE = """""" - _STYLE_CONTENT_TYPE = """""" - _PROTO_DESCRIPTOR_NAME = 'apphosting.ChannelPresenceRequest' -class ChannelPresenceResponse_QueryResult(ProtocolBuffer.ProtocolMessage): - has_application_key_ = 0 - application_key_ = "" - has_is_available_ = 0 - is_available_ = 0 - has_error_code_ = 0 - error_code_ = 0 - - def __init__(self, contents=None): - if contents is not None: self.MergeFromString(contents) - - def application_key(self): return self.application_key_ - - def set_application_key(self, x): - self.has_application_key_ = 1 - self.application_key_ = x - - def clear_application_key(self): - if self.has_application_key_: - self.has_application_key_ = 0 - self.application_key_ = "" - - def has_application_key(self): return self.has_application_key_ - - def is_available(self): return self.is_available_ - - def set_is_available(self, x): - self.has_is_available_ = 1 - self.is_available_ = x - - def clear_is_available(self): - if self.has_is_available_: - self.has_is_available_ = 0 - self.is_available_ = 0 - - def has_is_available(self): return self.has_is_available_ - - def error_code(self): return self.error_code_ - - def set_error_code(self, x): - self.has_error_code_ = 1 - self.error_code_ = x - - def clear_error_code(self): - if self.has_error_code_: - self.has_error_code_ = 0 - self.error_code_ = 0 - - def has_error_code(self): return self.has_error_code_ - - - def MergeFrom(self, x): - assert x is not self - if (x.has_application_key()): self.set_application_key(x.application_key()) - if (x.has_is_available()): self.set_is_available(x.is_available()) - if (x.has_error_code()): self.set_error_code(x.error_code()) - - def Equals(self, x): - if x is self: return 1 - if self.has_application_key_ != x.has_application_key_: return 0 - if self.has_application_key_ and self.application_key_ != x.application_key_: return 0 - if self.has_is_available_ != x.has_is_available_: return 0 - if self.has_is_available_ and self.is_available_ != x.is_available_: return 0 - if self.has_error_code_ != x.has_error_code_: return 0 - if self.has_error_code_ and self.error_code_ != x.error_code_: return 0 - return 1 - - def IsInitialized(self, debug_strs=None): - initialized = 1 - if (not self.has_application_key_): - initialized = 0 - if debug_strs is not None: - debug_strs.append('Required field: application_key not set.') - if (not self.has_is_available_): - initialized = 0 - if debug_strs is not None: - debug_strs.append('Required field: is_available not set.') - if (not self.has_error_code_): - initialized = 0 - if debug_strs is not None: - debug_strs.append('Required field: error_code not set.') - return initialized - - def ByteSize(self): - n = 0 - n += self.lengthString(len(self.application_key_)) - n += self.lengthVarInt64(self.error_code_) - return n + 4 - - def ByteSizePartial(self): - n = 0 - if (self.has_application_key_): - n += 1 - n += self.lengthString(len(self.application_key_)) - if (self.has_is_available_): - n += 2 - if (self.has_error_code_): - n += 1 - n += self.lengthVarInt64(self.error_code_) - return n - - def Clear(self): - self.clear_application_key() - self.clear_is_available() - self.clear_error_code() - - def OutputUnchecked(self, out): - out.putVarInt32(10) - out.putPrefixedString(self.application_key_) - out.putVarInt32(16) - out.putBoolean(self.is_available_) - out.putVarInt32(24) - out.putVarInt32(self.error_code_) - - def OutputPartial(self, out): - if (self.has_application_key_): - out.putVarInt32(10) - out.putPrefixedString(self.application_key_) - if (self.has_is_available_): - out.putVarInt32(16) - out.putBoolean(self.is_available_) - if (self.has_error_code_): - out.putVarInt32(24) - out.putVarInt32(self.error_code_) - - def TryMerge(self, d): - while d.avail() > 0: - tt = d.getVarInt32() - if tt == 10: - self.set_application_key(d.getPrefixedString()) - continue - if tt == 16: - self.set_is_available(d.getBoolean()) - continue - if tt == 24: - self.set_error_code(d.getVarInt32()) - continue - - - if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError - d.skipData(tt) - - - def __str__(self, prefix="", printElemNumber=0): - res="" - if self.has_application_key_: res+=prefix+("application_key: %s\n" % self.DebugFormatString(self.application_key_)) - if self.has_is_available_: res+=prefix+("is_available: %s\n" % self.DebugFormatBool(self.is_available_)) - if self.has_error_code_: res+=prefix+("error_code: %s\n" % self.DebugFormatInt32(self.error_code_)) - return res - - - def _BuildTagLookupTable(sparse, maxtag, default=None): - return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) - - kapplication_key = 1 - kis_available = 2 - kerror_code = 3 - - _TEXT = _BuildTagLookupTable({ - 0: "ErrorCode", - 1: "application_key", - 2: "is_available", - 3: "error_code", - }, 3) - - _TYPES = _BuildTagLookupTable({ - 0: ProtocolBuffer.Encoder.NUMERIC, - 1: ProtocolBuffer.Encoder.STRING, - 2: ProtocolBuffer.Encoder.NUMERIC, - 3: ProtocolBuffer.Encoder.NUMERIC, - }, 3, ProtocolBuffer.Encoder.MAX_TYPE) - - - _STYLE = """""" - _STYLE_CONTENT_TYPE = """""" - _PROTO_DESCRIPTOR_NAME = 'apphosting.ChannelPresenceResponse_QueryResult' -class ChannelPresenceResponse(ProtocolBuffer.ProtocolMessage): - - def __init__(self, contents=None): - self.query_result_ = [] - if contents is not None: self.MergeFromString(contents) - - def query_result_size(self): return len(self.query_result_) - def query_result_list(self): return self.query_result_ - - def query_result(self, i): - return self.query_result_[i] - - def mutable_query_result(self, i): - return self.query_result_[i] - - def add_query_result(self): - x = ChannelPresenceResponse_QueryResult() - self.query_result_.append(x) - return x - - def clear_query_result(self): - self.query_result_ = [] - - def MergeFrom(self, x): - assert x is not self - for i in xrange(x.query_result_size()): self.add_query_result().CopyFrom(x.query_result(i)) - - def Equals(self, x): - if x is self: return 1 - if len(self.query_result_) != len(x.query_result_): return 0 - for e1, e2 in zip(self.query_result_, x.query_result_): - if e1 != e2: return 0 - return 1 - - def IsInitialized(self, debug_strs=None): - initialized = 1 - for p in self.query_result_: - if not p.IsInitialized(debug_strs): initialized=0 - return initialized - - def ByteSize(self): - n = 0 - n += 1 * len(self.query_result_) - for i in xrange(len(self.query_result_)): n += self.lengthString(self.query_result_[i].ByteSize()) - return n - - def ByteSizePartial(self): - n = 0 - n += 1 * len(self.query_result_) - for i in xrange(len(self.query_result_)): n += self.lengthString(self.query_result_[i].ByteSizePartial()) - return n - - def Clear(self): - self.clear_query_result() - - def OutputUnchecked(self, out): - for i in xrange(len(self.query_result_)): - out.putVarInt32(10) - out.putVarInt32(self.query_result_[i].ByteSize()) - self.query_result_[i].OutputUnchecked(out) - - def OutputPartial(self, out): - for i in xrange(len(self.query_result_)): - out.putVarInt32(10) - out.putVarInt32(self.query_result_[i].ByteSizePartial()) - self.query_result_[i].OutputPartial(out) - - def TryMerge(self, d): - while d.avail() > 0: - tt = d.getVarInt32() - if tt == 10: - length = d.getVarInt32() - tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) - d.skip(length) - self.add_query_result().TryMerge(tmp) - continue - - - if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError - d.skipData(tt) - - - def __str__(self, prefix="", printElemNumber=0): - res="" - cnt=0 - for e in self.query_result_: - elm="" - if printElemNumber: elm="(%d)" % cnt - res+=prefix+("query_result%s <\n" % elm) - res+=e.__str__(prefix + " ", printElemNumber) - res+=prefix+">\n" - cnt+=1 - return res - - - def _BuildTagLookupTable(sparse, maxtag, default=None): - return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) - - kquery_result = 1 - - _TEXT = _BuildTagLookupTable({ - 0: "ErrorCode", - 1: "query_result", - }, 1) - - _TYPES = _BuildTagLookupTable({ - 0: ProtocolBuffer.Encoder.NUMERIC, - 1: ProtocolBuffer.Encoder.STRING, - }, 1, ProtocolBuffer.Encoder.MAX_TYPE) - - - _STYLE = """""" - _STYLE_CONTENT_TYPE = """""" - _PROTO_DESCRIPTOR_NAME = 'apphosting.ChannelPresenceResponse' if _extension_runtime: pass -__all__ = ['ChannelServiceError','CreateChannelRequest','CreateChannelResponse','SendMessageRequest','ChannelPresenceRequest','ChannelPresenceResponse_QueryResult','ChannelPresenceResponse'] +__all__ = ['ChannelServiceError','CreateChannelRequest','CreateChannelResponse','SendMessageRequest'] diff --git a/python/google/appengine/api/channel/channel_service_stub.py b/python/google/appengine/api/channel/channel_service_stub.py index 894ff8f0..2e774035 100644 --- a/python/google/appengine/api/channel/channel_service_stub.py +++ b/python/google/appengine/api/channel/channel_service_stub.py @@ -57,6 +57,8 @@ class ChannelServiceStub(apiproxy_stub.APIProxyStub): Instead it queues messages internally. """ + THREADSAFE = True + diff --git a/python/google/appengine/api/files/file_service_stub.py b/python/google/appengine/api/files/file_service_stub.py index 42dc554c..d0e48260 100644 --- a/python/google/appengine/api/files/file_service_stub.py +++ b/python/google/appengine/api/files/file_service_stub.py @@ -41,6 +41,7 @@ from google.appengine.api.files import file as files from google.appengine.api.files import file_service_pb from google.appengine.api.files import gs from google.appengine.ext import blobstore +from google.appengine.ext.cloudstorage import cloudstorage_stub from google.appengine.runtime import apiproxy_errors @@ -76,7 +77,7 @@ class _GoogleStorageUpload(tuple): """Stores information about a writable Google Storage file.""" buf = property(lambda self: self[0]) content_type = property(lambda self: self[1]) - key = property(lambda self: self[2]) + gs_filename = property(lambda self: self[2]) class GoogleStorage(object): @@ -84,8 +85,8 @@ class GoogleStorage(object): - def _Upload(self, buf, content_type, key): - return _GoogleStorageUpload([buf, content_type, key]) + def _Upload(self, buf, content_type, gs_filename): + return _GoogleStorageUpload([buf, content_type, gs_filename]) def __init__(self, blob_storage): """Constructor. @@ -95,10 +96,20 @@ class GoogleStorage(object): apphosting.api.blobstore.blobstore_stub.BlobStorage instance. """ self.blob_storage = blob_storage + self.gs_stub = cloudstorage_stub.CloudStorageStub(self.blob_storage) self.uploads = {} self.finalized = set() self.sequence_keys = {} + def remove_gs_prefix(self, gs_filename): + return gs_filename[len('/gs'):] + + def add_gs_prefix(self, gs_filename): + return '/gs' + gs_filename + + def get_blobkey(self, gs_filename): + return blobstore.create_gs_key(gs_filename) + def has_upload(self, filename): """Checks if there is an upload at this filename.""" return filename in self.uploads @@ -108,25 +119,15 @@ class GoogleStorage(object): upload = self.uploads[filename] self.finalized.add(filename) upload.buf.seek(0) - self.blob_storage.StoreBlob(self.get_blob_key(upload.key), upload.buf) - del self.sequence_keys[filename] - + content = upload.buf.read() + blobkey = self.gs_stub.post_start_creation( + self.remove_gs_prefix(upload.gs_filename), + {'content-type': upload.content_type}) + assert blobkey == self.get_blobkey(upload.gs_filename) + self.gs_stub.put_continue_creation( + blobkey, content, (0, len(content) - 1), True) - encoded_key = blobstore.create_gs_key(upload.key) - file_info = datastore.Entity(GS_INFO_KIND, - name=encoded_key, - namespace='') - file_info['creation'] = _now_function() - file_info['filename'] = upload.key - file_info['size'] = upload.buf.len - file_info['content_type'] = upload.content_type - file_info['storage_key'] = self.get_blob_key(upload.key) - datastore.Put(file_info) - - @staticmethod - def get_blob_key(key): - """Converts a Google Storage key into a base64 encoded blob key/filename.""" - return base64.urlsafe_b64encode(key) + del self.sequence_keys[filename] def is_finalized(self, filename): """Checks if file is already finalized.""" @@ -172,7 +173,7 @@ class GoogleStorage(object): datastore.Delete( datastore.Key.from_path(GS_INFO_KIND, - blobstore.create_gs_key(gs_filename), + self.get_blobkey(gs_filename), namespace='')) return writable_name @@ -187,22 +188,24 @@ class GoogleStorage(object): self.sequence_keys[filename] = sequence_key self.uploads[filename].buf.write(data) - def stat(self, filename): + def stat(self, gs_filename): """ Returns: file info for a finalized file with given filename """ - blob_key = blobstore.create_gs_key(filename) + blob_key = self.get_blobkey(gs_filename) try: - return datastore.Get( + fileinfo = datastore.Get( datastore.Key.from_path(GS_INFO_KIND, blob_key, namespace='')) + fileinfo['filename'] = self.add_gs_prefix(fileinfo['filename']) + return fileinfo except datastore_errors.EntityNotFoundError: raise raise_error(file_service_pb.FileServiceErrors.EXISTENCE_ERROR, - filename) + gs_filename) - def get_reader(self, filename): + def get_reader(self, gs_filename): try: - return self.blob_storage.OpenBlob(self.get_blob_key(filename)) + return self.blob_storage.OpenBlob(self.get_blobkey(gs_filename)) except IOError: return None @@ -217,7 +220,7 @@ class GoogleStorage(object): A list of fully qualified filenames under a certain path sorted by in char order. """ - path = request.path() + path = self.remove_gs_prefix(request.path()) prefix = request.prefix() if request.has_prefix() else '' q = datastore.Query(GS_INFO_KIND, namespace='') @@ -234,7 +237,7 @@ class GoogleStorage(object): for gs_file_info in q.Get(max_keys): filename = gs_file_info['filename'] if filename.startswith(fully_qualified_name): - response.add_filenames(filename) + response.add_filenames(self.add_gs_prefix(filename)) else: break diff --git a/python/google/appengine/api/images/images_stub.py b/python/google/appengine/api/images/images_stub.py index 8e52fe68..b899a832 100644 --- a/python/google/appengine/api/images/images_stub.py +++ b/python/google/appengine/api/images/images_stub.py @@ -53,11 +53,11 @@ except ImportError: from google.appengine.api import apiproxy_stub from google.appengine.api import apiproxy_stub_map -from google.appengine.api import blobstore from google.appengine.api import datastore from google.appengine.api import datastore_errors from google.appengine.api import datastore_types from google.appengine.api import images +from google.appengine.api.blobstore import blobstore_stub from google.appengine.api.images import images_blob_stub from google.appengine.api.images import images_service_pb from google.appengine.runtime import apiproxy_errors @@ -335,7 +335,6 @@ class ImagesServiceStub(apiproxy_stub.APIProxyStub): image = image.convert("RGB") image.save(image_string, image_encoding) - return image_string.getvalue() def _OpenImageData(self, image_data): @@ -397,36 +396,21 @@ class ImagesServiceStub(apiproxy_stub.APIProxyStub): def _OpenBlob(self, blob_key): """Create an Image from the blob data read from blob_key.""" - storage_key = None - try: - gs_info = datastore.Get( - datastore.Key.from_path(GS_INFO_KIND, - blob_key, - namespace='')) - storage_key = gs_info['storage_key'] - except datastore_errors.EntityNotFoundError: - pass - - if not storage_key: - try: - key = datastore_types.Key.from_path(blobstore.BLOB_INFO_KIND, - blob_key, - namespace='') - datastore.Get(key) - storage_key = blob_key - except datastore_errors.Error: + _ = datastore.Get( + blobstore_stub.BlobstoreServiceStub.ToDatastoreBlobKey(blob_key)) + except datastore_errors.Error: - logging.exception('Blob with key %r does not exist', blob_key) - raise apiproxy_errors.ApplicationError( - images_service_pb.ImagesServiceError.UNSPECIFIED_ERROR) + logging.exception("Blob with key %r does not exist", blob_key) + raise apiproxy_errors.ApplicationError( + images_service_pb.ImagesServiceError.UNSPECIFIED_ERROR) - blobstore_stub = apiproxy_stub_map.apiproxy.GetStub("blobstore") + blobstore_storage = apiproxy_stub_map.apiproxy.GetStub("blobstore") try: - blob_file = blobstore_stub.storage.OpenBlob(storage_key) + blob_file = blobstore_storage.storage.OpenBlob(blob_key) except IOError: logging.exception("Could not get file for blob_key %r", blob_key) diff --git a/python/google/appengine/api/logservice/logservice_stub.py b/python/google/appengine/api/logservice/logservice_stub.py index 5676f576..7b88889d 100644 --- a/python/google/appengine/api/logservice/logservice_stub.py +++ b/python/google/appengine/api/logservice/logservice_stub.py @@ -68,6 +68,8 @@ CREATE TABLE IF NOT EXISTS AppLogs ( class LogServiceStub(apiproxy_stub.APIProxyStub): """Python stub for Log Service service.""" + THREADSAFE = True + _ACCEPTS_REQUEST_ID = True diff --git a/python/google/appengine/api/mail.py b/python/google/appengine/api/mail.py index 7b5d76e3..f046215c 100644 --- a/python/google/appengine/api/mail.py +++ b/python/google/appengine/api/mail.py @@ -178,6 +178,7 @@ EXTENSION_BLACKLIST = [ HEADER_WHITELIST = frozenset([ + 'Auto-Submitted', 'In-Reply-To', 'List-Id', 'List-Unsubscribe', diff --git a/python/google/appengine/api/mail_stub.py b/python/google/appengine/api/mail_stub.py index 81c38101..e8df17d7 100644 --- a/python/google/appengine/api/mail_stub.py +++ b/python/google/appengine/api/mail_stub.py @@ -58,6 +58,8 @@ class MailServiceStub(apiproxy_stub.APIProxyStub): password: Password for SMTP server user. """ + THREADSAFE = True + def __init__(self, host=None, port=25, diff --git a/python/google/appengine/api/memcache/memcache_stub.py b/python/google/appengine/api/memcache/memcache_stub.py index af2ddf8f..e07cf6df 100644 --- a/python/google/appengine/api/memcache/memcache_stub.py +++ b/python/google/appengine/api/memcache/memcache_stub.py @@ -114,6 +114,8 @@ class MemcacheServiceStub(apiproxy_stub.APIProxyStub): external servers. """ + THREADSAFE = True + def __init__(self, gettime=time.time, service_name='memcache'): """Initializer. diff --git a/python/google/appengine/api/remote_socket/_remote_socket.py b/python/google/appengine/api/remote_socket/_remote_socket.py index aec65ab7..95c28bc0 100644 --- a/python/google/appengine/api/remote_socket/_remote_socket.py +++ b/python/google/appengine/api/remote_socket/_remote_socket.py @@ -155,6 +155,10 @@ _ADDRESS_FAMILY_LENGTH_MAP = { } +class SocketApiNotImplementedError(NotImplementedError, error): + pass + + def _SystemExceptionFromAppError(e): app_error = e.application_error if app_error in (RemoteSocketServiceError.SYSTEM_ERROR, @@ -267,7 +271,7 @@ def gethostbyname_ex(host): def gethostbyaddr(addr): - raise NotImplementedError() + raise SocketApiNotImplementedError() def gethostname(): @@ -279,7 +283,7 @@ def gethostname(): def getprotobyname(protocolname): - raise NotImplementedError() + raise SocketApiNotImplementedError() def getservbyname(servicename, protocolname=None): @@ -293,7 +297,7 @@ def getservbyname(servicename, protocolname=None): def getservbyport(portnumber, protocolname=0): - raise NotImplementedError() + raise SocketApiNotImplementedError() @@ -366,7 +370,7 @@ def getaddrinfo(host, service, family=AF_UNSPEC, socktype=0, proto=0, flags=0): def getnameinfo(): - raise NotImplementedError() + raise SocketApiNotImplementedError() def getdefaulttimeout(): @@ -556,6 +560,7 @@ class socket(object): self._bound = False self._listen = False self._connected = False + self._connect_in_progress = False self._shutdown_read = False self._shutdown_write = False self._setsockopt = [] @@ -772,6 +777,8 @@ class socket(object): if translated_e.errno == errno.EISCONN: self._bound = True self._connected = True + elif translated_e.errno == errno.EINPROGRESS: + self._connect_in_progress = True raise translated_e self._bound = True @@ -799,7 +806,7 @@ class socket(object): self._CreateSocket() if not self._socket_descriptor: raise error(errno.EBADF, os.strerror(errno.EBADF)) - if not self._connected: + if not (self._connected or self._connect_in_progress): raise error(errno.ENOTCONN, os.strerror(errno.ENOTCONN)) request = remote_socket_service_pb.GetPeerNameRequest() @@ -813,6 +820,10 @@ class socket(object): except apiproxy_errors.ApplicationError, e: raise _SystemExceptionFromAppError(e) + if self._connect_in_progress: + self._connect_in_progress = False + self._connected = True + return ( inet_ntop(self.family, reply.peer_ip().packed_address()), reply.peer_ip().port()) @@ -863,7 +874,7 @@ class socket(object): See recv() for documentation about the flags. """ - raise NotImplementedError() + raise SocketApiNotImplementedError() def recvfrom(self, buffersize, flags=0): """recvfrom(buffersize[, flags]) -> (data, address info) @@ -880,7 +891,7 @@ class socket(object): request.set_data_size(buffersize) request.set_flags(flags) if self.type == SOCK_STREAM: - if not self._connected: + if not (self._connected or self._connect_in_progress): raise error(errno.ENOTCONN, os.strerror(errno.ENOTCONN)) if self._shutdown_read: request.set_timeout_seconds(0.0) @@ -896,6 +907,10 @@ class socket(object): if not self._shutdown_read or e.errno != errno.EAGAIN: raise e + if self._connect_in_progress: + self._connect_in_progress = False + self._connected = True + address = None if reply.has_received_from(): address = ( @@ -913,7 +928,7 @@ class socket(object): sender's address info. """ - raise NotImplementedError() + raise SocketApiNotImplementedError() def send(self, data, flags=0): """send(data[, flags]) -> count @@ -971,7 +986,7 @@ class socket(object): raise error(errno.ENOTCONN, os.strerror(errno.ENOTCONN)) self._SetProtoFromAddr(request.mutable_send_to(), address) else: - if not self._connected: + if not (self._connected or self._connect_in_progress): raise error(errno.ENOTCONN, os.strerror(errno.ENOTCONN)) if self.gettimeout() is not None: @@ -984,6 +999,10 @@ class socket(object): except apiproxy_errors.ApplicationError, e: raise _SystemExceptionFromAppError(e) + if self._connect_in_progress: + self._connect_in_progress = False + self._connected = True + nbytes = reply.data_sent() assert nbytes >= 0 if self.type == SOCK_STREAM: @@ -1062,7 +1081,7 @@ class socket(object): try: apiproxy_stub_map.MakeSyncCall( - 'remote_socket', 'SetSocketOption', request, reply) + 'remote_socket', 'SetSocketOptions', request, reply) except apiproxy_errors.ApplicationError, e: raise _SystemExceptionFromAppError(e) diff --git a/python/google/appengine/api/remote_socket/_remote_socket_stub.py b/python/google/appengine/api/remote_socket/_remote_socket_stub.py index 9c070fb7..5a8e53bf 100644 --- a/python/google/appengine/api/remote_socket/_remote_socket_stub.py +++ b/python/google/appengine/api/remote_socket/_remote_socket_stub.py @@ -22,8 +22,10 @@ A stub version of the Remote Socket API for the dev_appserver. from __future__ import with_statement +import binascii import errno import os +import re import select import socket import threading @@ -33,6 +35,7 @@ import uuid from google.appengine.api import apiproxy_stub from google.appengine.api.remote_socket import _remote_socket_addr from google.appengine.api.remote_socket import remote_socket_service_pb +from google.appengine.api.remote_socket.remote_socket_service_pb import RemoteSocketServiceError from google.appengine.runtime import apiproxy_errors @@ -52,16 +55,16 @@ def TranslateSystemErrors(method): return method(self, *args, **kwargs) except socket.gaierror, e: raise apiproxy_errors.ApplicationError( - remote_socket_service_pb.RemoteSocketServiceError.GAI_ERROR, + RemoteSocketServiceError.GAI_ERROR, 'system_error:%u error_detail:"%s"' % (e.errno, e.strerror)) except socket.timeout, e: raise apiproxy_errors.ApplicationError( - remote_socket_service_pb.RemoteSocketServiceError.SYSTEM_ERROR, + RemoteSocketServiceError.SYSTEM_ERROR, 'system_error:%u error_detail:"%s"' % (errno.EAGAIN, os.strerror(errno.EAGAIN))) except socket.error, e: raise apiproxy_errors.ApplicationError( - remote_socket_service_pb.RemoteSocketServiceError.SYSTEM_ERROR, + RemoteSocketServiceError.SYSTEM_ERROR, 'system_error:%u error_detail:"%s"' % (e.errno, e.strerror)) return WrappedMethod @@ -89,9 +92,23 @@ class SocketState(object): self.timeout = timeout + +_MOCK_SOCKET_OPTIONS = ( + 'SOL_SOCKET:SO_KEEPALIVE=00000000,' + 'SOL_SOCKET:SO_DEBUG=80000000,' + 'SOL_TCP:TCP_NODELAY=00000000,' + 'SOL_SOCKET:SO_LINGER=0000000000000000,' + 'SOL_SOCKET:SO_OOBINLINE=00000000,' + 'SOL_SOCKET:SO_SNDBUF=00002000,' + 'SOL_SOCKET:SO_RCVBUF=00002000,' + 'SOL_SOCKET:SO_REUSEADDR=01000000') + + class RemoteSocketServiceStub(apiproxy_stub.APIProxyStub): """Stub implementation of the Remote Socket API.""" + THREADSAFE = True + _AF_MAP = { socket.AF_INET: remote_socket_service_pb.CreateSocketRequest.IPv4, socket.AF_INET6: remote_socket_service_pb.CreateSocketRequest.IPv6, @@ -111,7 +128,10 @@ class RemoteSocketServiceStub(apiproxy_stub.APIProxyStub): socket.SHUT_RDWR), } - def __init__(self, service_name='remote_socket', get_time=time.time): + def __init__(self, + service_name='remote_socket', + get_time=time.time, + mock_options_spec=_MOCK_SOCKET_OPTIONS): """Initializer. Args: @@ -122,19 +142,23 @@ class RemoteSocketServiceStub(apiproxy_stub.APIProxyStub): super(RemoteSocketServiceStub, self).__init__(service_name) self._descriptor_to_socket_state = {} self._time = get_time + self._mock_options = _MockSocketOptions(mock_options_spec) + + def ResetMockOptions(self, mock_options_spec): + self._mock_options = _MockSocketOptions(mock_options_spec) def _LookupSocket(self, descriptor): with self._mutex: val = self._descriptor_to_socket_state.get(descriptor) if not val: raise apiproxy_errors.ApplicationError( - remote_socket_service_pb.RemoteSocketServiceError.SOCKET_CLOSED) + RemoteSocketServiceError.SOCKET_CLOSED) now = self._time() if val.last_accessed_time < now - 120: del self._descriptor_to_socket_state[descriptor] raise apiproxy_errors.ApplicationError( - remote_socket_service_pb.RemoteSocketServiceError.SOCKET_CLOSED) + RemoteSocketServiceError.SOCKET_CLOSED) val.last_accessed_time = now return val @@ -146,7 +170,7 @@ class RemoteSocketServiceStub(apiproxy_stub.APIProxyStub): self._TRANSLATED_AF_MAP[family], ap_proto.packed_address()) except ValueError: raise apiproxy_errors.ApplicationError( - remote_socket_service_pb.RemoteSocketServiceError.INVALID_REQUEST, + RemoteSocketServiceError.INVALID_REQUEST, 'Invalid Address.') return (addr, ap_proto.port()) @@ -176,7 +200,7 @@ class RemoteSocketServiceStub(apiproxy_stub.APIProxyStub): family, request.proxy_external_ip()) if not self._BindAllowed(addr, port): raise apiproxy_errors.ApplicationError( - remote_socket_service_pb.RemoteSocketServiceError.PERMISSION_DENIED, + RemoteSocketServiceError.PERMISSION_DENIED, 'Attempt to bind port without permission.') sock.bind((addr, port)) if request.has_remote_ip(): @@ -199,7 +223,7 @@ class RemoteSocketServiceStub(apiproxy_stub.APIProxyStub): state.family, request.proxy_external_ip()) if not self._BindAllowed(addr, port): raise apiproxy_errors.ApplicationError( - remote_socket_service_pb.RemoteSocketServiceError.PERMISSION_DENIED, + RemoteSocketServiceError.PERMISSION_DENIED, 'Attempt to bind port without permission.') state.sock.bind((addr, port)) self._AddressPortTupleToProto(state.family, state.sock.getsockname(), @@ -233,12 +257,27 @@ class RemoteSocketServiceStub(apiproxy_stub.APIProxyStub): ret.set_value( state.sock.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR, 1024)) else: - raise apiproxy_errors.ApplicationError( - remote_socket_service_pb.RemoteSocketServiceError.INVALID_REQUEST, - 'Invalid GetSocketOption level/option.') + value = self._mock_options.GetMockValue(opt.level(), opt.option()) + if value is None: + raise apiproxy_errors.ApplicationError( + RemoteSocketServiceError.PERMISSION_DENIED, + 'Attempt to get blocked socket option.') + + ret = response.add_options() + ret.set_level(opt.level()) + ret.set_option(opt.option()) + ret.set_value(value) + + def _Dynamic_SetSocketOptions(self, request, response): - raise NotImplementedError() + self._LookupSocket(request.socket_descriptor()) + for opt in request.options_list(): + value = self._mock_options.GetMockValue(opt.level(), opt.option()) + if value is None: + raise apiproxy_errors.ApplicationError( + RemoteSocketServiceError.PERMISSION_DENIED, + 'Attempt to set blocked socket option.') @TranslateSystemErrors def _Dynamic_GetSocketName(self, request, response): @@ -262,12 +301,12 @@ class RemoteSocketServiceStub(apiproxy_stub.APIProxyStub): if state.protocol == socket.SOCK_STREAM: if request.stream_offset() != state.stream_offset: raise apiproxy_errors.ApplicationError( - remote_socket_service_pb.RemoteSocketServiceError.INVALID_REQUEST, + RemoteSocketServiceError.INVALID_REQUEST, 'Invalid stream_offset.') flags = request.flags() if flags != 0: raise apiproxy_errors.ApplicationError( - remote_socket_service_pb.RemoteSocketServiceError.INVALID_REQUEST, + RemoteSocketServiceError.INVALID_REQUEST, 'Invalid flags.') if request.has_send_to(): data_sent = state.sock.sendto( @@ -345,7 +384,7 @@ class RemoteSocketServiceStub(apiproxy_stub.APIProxyStub): if events & ~(remote_socket_service_pb.PollEvent.SOCKET_POLLIN| remote_socket_service_pb.PollEvent.SOCKET_POLLOUT): raise apiproxy_errors.ApplicationError( - remote_socket_service_pb.RemoteSocketServiceError.INVALID_REQUEST, + RemoteSocketServiceError.INVALID_REQUEST, 'Invalid requested_events.') if events & remote_socket_service_pb.PollEvent.SOCKET_POLLIN: rfds.append(state.sock) @@ -367,3 +406,39 @@ class RemoteSocketServiceStub(apiproxy_stub.APIProxyStub): o.set_observed_events( o.observed_events()| remote_socket_service_pb.PollEvent.SOCKET_POLLOUT) + + + + + + + + +class _MockSocketOptions(object): + + def __init__(self, mock_options_spec): + self._mock_options = {} + + option_spec_re = re.compile(r'^(\w+):(\w+)=(\w+)$') + for mock_option_spec in mock_options_spec.split(','): + if not mock_option_spec: + continue + + m = option_spec_re.match(mock_option_spec) + if m is None: + raise Exception('option specification malformed. ' + 'expected :=. Saw "%s"' + % mock_option_spec) + + level, name, value = m.groups() + + numeric_level = getattr(remote_socket_service_pb.SocketOption, + 'SOCKET_' + level) + numeric_name = getattr(remote_socket_service_pb.SocketOption, + 'SOCKET_' + name) + raw_value = binascii.a2b_hex(value) + + self._mock_options[(numeric_level, numeric_name)] = raw_value + + def GetMockValue(self, level, name): + return self._mock_options.get((level, name), None) diff --git a/python/google/appengine/api/search/search.py b/python/google/appengine/api/search/search.py index 8d36a2aa..cff794c0 100644 --- a/python/google/appengine/api/search/search.py +++ b/python/google/appengine/api/search/search.py @@ -1996,7 +1996,7 @@ class QueryOptions(object): QueryOptions(limit=page_size, offset=next_page)) """ - def __init__(self, limit=20, number_found_accuracy=100, cursor=None, + def __init__(self, limit=20, number_found_accuracy=None, cursor=None, offset=None, sort_options=None, returned_fields=None, ids_only=False, snippeted_fields=None, returned_expressions=None): @@ -2143,7 +2143,7 @@ def _CopyQueryOptionsObjectToProtocolBuffer(query, options, params): """Copies a QueryOptions object to a SearchParams proto buff.""" offset = 0 web_safe_string = None - cursor_type = search_service_pb.SearchParams.NONE + cursor_type = None offset = options.offset if options.cursor: cursor = options.cursor @@ -2168,11 +2168,12 @@ def _CopyQueryOptionsToProtocolBuffer( if offset: params.set_offset(offset) params.set_limit(limit) - params.set_matched_count_accuracy(number_found_accuracy) + if number_found_accuracy is not None: + params.set_matched_count_accuracy(number_found_accuracy) if cursor: params.set_cursor(cursor.encode('utf-8')) - - params.set_cursor_type(cursor_type) + if cursor_type is not None: + params.set_cursor_type(cursor_type) if ids_only: params.set_keys_only(ids_only) if returned_fields or snippeted_fields or returned_expressions: diff --git a/python/google/appengine/api/search/search_service_pb.py b/python/google/appengine/api/search/search_service_pb.py index 6f2aa7ca..2c22b587 100644 --- a/python/google/appengine/api/search/search_service_pb.py +++ b/python/google/appengine/api/search/search_service_pb.py @@ -3876,7 +3876,7 @@ class SearchParams(ProtocolBuffer.ProtocolMessage): has_limit_ = 0 limit_ = 20 has_matched_count_accuracy_ = 0 - matched_count_accuracy_ = 100 + matched_count_accuracy_ = 0 has_scorer_spec_ = 0 scorer_spec_ = None has_field_spec_ = 0 @@ -3974,7 +3974,7 @@ class SearchParams(ProtocolBuffer.ProtocolMessage): def clear_matched_count_accuracy(self): if self.has_matched_count_accuracy_: self.has_matched_count_accuracy_ = 0 - self.matched_count_accuracy_ = 100 + self.matched_count_accuracy_ = 0 def has_matched_count_accuracy(self): return self.has_matched_count_accuracy_ diff --git a/python/google/appengine/api/search/simple_search_stub.py b/python/google/appengine/api/search/simple_search_stub.py index 90d8240d..f7bc7277 100644 --- a/python/google/appengine/api/search/simple_search_stub.py +++ b/python/google/appengine/api/search/simple_search_stub.py @@ -814,33 +814,26 @@ class SearchServiceStub(apiproxy_stub.APIProxyStub): self._FillSearchResponse(results, position_range, params.cursor_type(), _ScoreRequested(params), response) - def _CopyBaseDocument(self, doc, doc_copy): + def _CopyDocument(self, doc, doc_copy, field_names, ids_only=None): + """Copies Document, doc, to doc_copy restricting fields to field_names.""" doc_copy.set_id(doc.id()) - if doc.has_order_id(): - doc_copy.set_order_id(doc.order_id()) + if ids_only: + return if doc.has_language(): doc_copy.set_language(doc.language()) - - def _CopyDocument(self, doc, doc_copy, field_spec=None, ids_only=None): - """Copies Document, doc, to doc_copy restricting fields to field_spec.""" - if ids_only: - self._CopyBaseDocument(doc, doc_copy) - elif field_spec and field_spec.name_list(): - self._CopyBaseDocument(doc, doc_copy) - for field in doc.field_list(): - if field.name() in field_spec.name_list(): - doc_copy.add_field().CopyFrom(field) - else: - doc_copy.CopyFrom(doc) + for field in doc.field_list(): + if not field_names or field.name() in field_names: + doc_copy.add_field().CopyFrom(field) + doc_copy.set_order_id(doc.order_id()) def _FillSearchResponse(self, results, position_range, cursor_type, score, - response, field_spec=None, ids_only=None): + response, field_names=None, ids_only=None): """Fills the SearchResponse with a selection of results.""" for i in position_range: result = results[i] search_result = response.add_result() self._CopyDocument(result.document, search_result.mutable_document(), - field_spec, ids_only) + field_names, ids_only) if cursor_type == search_service_pb.SearchParams.PER_RESULT: search_result.set_cursor(result.document.id()) if score: @@ -855,6 +848,7 @@ class SearchServiceStub(apiproxy_stub.APIProxyStub): expr.mutable_value().set_type(document_pb.FieldValue.NUMBER) else: expr.mutable_value().set_string_value(expression) + expr.mutable_value().set_type(document_pb.FieldValue.HTML) def _Dynamic_Search(self, request, response): """A local implementation of SearchService.Search RPC. @@ -897,10 +891,9 @@ class SearchServiceStub(apiproxy_stub.APIProxyStub): else: position_range = range(0) field_spec = None - if params.has_field_spec(): - field_spec = params.field_spec() + field_names = params.field_spec().name_list() self._FillSearchResponse(results, position_range, params.cursor_type(), - _ScoreRequested(params), response, field_spec, + _ScoreRequested(params), response, field_names, params.keys_only()) if (params.cursor_type() == search_service_pb.SearchParams.SINGLE and len(position_range)): diff --git a/python/google/appengine/api/search/stub/expression_evaluator.py b/python/google/appengine/api/search/stub/expression_evaluator.py index 3145dea8..ad0dbba0 100644 --- a/python/google/appengine/api/search/stub/expression_evaluator.py +++ b/python/google/appengine/api/search/stub/expression_evaluator.py @@ -192,7 +192,7 @@ class ExpressionEvaluator(object): field_val = search_util.GetFieldValue( search_util.GetFieldInDocument(self._doc_pb, field)) if not field_val: - return None + return '' return '%s...' % field_val[:search_util.DEFAULT_MAX_SNIPPET_LENGTH] def _Unsupported(self, method): diff --git a/python/google/appengine/api/servers/servers_stub.py b/python/google/appengine/api/servers/servers_stub.py index f52523e2..1c78610d 100644 --- a/python/google/appengine/api/servers/servers_stub.py +++ b/python/google/appengine/api/servers/servers_stub.py @@ -25,6 +25,7 @@ from google.appengine.runtime import apiproxy_errors class ServersServiceStub(apiproxy_stub.APIProxyStub): _ACCEPTS_REQUEST_ID = True + THREADSAFE = True def __init__(self, request_data): super(ServersServiceStub, self).__init__('servers', diff --git a/python/google/appengine/api/urlfetch_stub.py b/python/google/appengine/api/urlfetch_stub.py index f253290b..edb1fd25 100644 --- a/python/google/appengine/api/urlfetch_stub.py +++ b/python/google/appengine/api/urlfetch_stub.py @@ -142,6 +142,8 @@ def _IsAllowedPort(port): class URLFetchServiceStub(apiproxy_stub.APIProxyStub): """Stub version of the urlfetch API to be used with apiproxy_stub_map.""" + THREADSAFE = True + def __init__(self, service_name='urlfetch', urlmatchers_to_fetch_functions=None): diff --git a/python/google/appengine/api/user_service_stub.py b/python/google/appengine/api/user_service_stub.py index 3557ffba..4ffee32d 100644 --- a/python/google/appengine/api/user_service_stub.py +++ b/python/google/appengine/api/user_service_stub.py @@ -52,6 +52,8 @@ class UserServiceStub(apiproxy_stub.APIProxyStub): _ACCEPTS_REQUEST_ID = True + THREADSAFE = True + def __init__(self, login_url=_DEFAULT_LOGIN_URL, logout_url=_DEFAULT_LOGOUT_URL, diff --git a/python/google/appengine/api/xmpp/xmpp_service_stub.py b/python/google/appengine/api/xmpp/xmpp_service_stub.py index 47646c0b..8ebfff8c 100644 --- a/python/google/appengine/api/xmpp/xmpp_service_stub.py +++ b/python/google/appengine/api/xmpp/xmpp_service_stub.py @@ -48,6 +48,8 @@ class XmppServiceStub(apiproxy_stub.APIProxyStub): instead of sending any stanzas. """ + THREADSAFE = True + def __init__(self, log=logging.info, service_name='xmpp'): """Initializer. diff --git a/python/google/appengine/datastore/datastore_pb.py b/python/google/appengine/datastore/datastore_pb.py index 8fc088f9..049866a8 100644 --- a/python/google/appengine/datastore/datastore_pb.py +++ b/python/google/appengine/datastore/datastore_pb.py @@ -4371,6 +4371,19 @@ class GetResponse(ProtocolBuffer.ProtocolMessage): _STYLE_CONTENT_TYPE = """""" _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.GetResponse' class PutRequest(ProtocolBuffer.ProtocolMessage): + + + CURRENT = 0 + SEQUENTIAL = 1 + + _AutoIdPolicy_NAMES = { + 0: "CURRENT", + 1: "SEQUENTIAL", + } + + def AutoIdPolicy_Name(cls, x): return cls._AutoIdPolicy_NAMES.get(x, "") + AutoIdPolicy_Name = classmethod(AutoIdPolicy_Name) + has_transaction_ = 0 transaction_ = None has_trusted_ = 0 @@ -4379,6 +4392,8 @@ class PutRequest(ProtocolBuffer.ProtocolMessage): force_ = 0 has_mark_changes_ = 0 mark_changes_ = 0 + has_auto_id_policy_ = 0 + auto_id_policy_ = 0 def __init__(self, contents=None): self.entity_ = [] @@ -4493,6 +4508,19 @@ class PutRequest(ProtocolBuffer.ProtocolMessage): def clear_snapshot(self): self.snapshot_ = [] + def auto_id_policy(self): return self.auto_id_policy_ + + def set_auto_id_policy(self, x): + self.has_auto_id_policy_ = 1 + self.auto_id_policy_ = x + + def clear_auto_id_policy(self): + if self.has_auto_id_policy_: + self.has_auto_id_policy_ = 0 + self.auto_id_policy_ = 0 + + def has_auto_id_policy(self): return self.has_auto_id_policy_ + def MergeFrom(self, x): assert x is not self @@ -4503,6 +4531,7 @@ class PutRequest(ProtocolBuffer.ProtocolMessage): if (x.has_force()): self.set_force(x.force()) if (x.has_mark_changes()): self.set_mark_changes(x.mark_changes()) for i in xrange(x.snapshot_size()): self.add_snapshot().CopyFrom(x.snapshot(i)) + if (x.has_auto_id_policy()): self.set_auto_id_policy(x.auto_id_policy()) def Equals(self, x): if x is self: return 1 @@ -4523,6 +4552,8 @@ class PutRequest(ProtocolBuffer.ProtocolMessage): if len(self.snapshot_) != len(x.snapshot_): return 0 for e1, e2 in zip(self.snapshot_, x.snapshot_): if e1 != e2: return 0 + if self.has_auto_id_policy_ != x.has_auto_id_policy_: return 0 + if self.has_auto_id_policy_ and self.auto_id_policy_ != x.auto_id_policy_: return 0 return 1 def IsInitialized(self, debug_strs=None): @@ -4548,6 +4579,7 @@ class PutRequest(ProtocolBuffer.ProtocolMessage): if (self.has_mark_changes_): n += 2 n += 1 * len(self.snapshot_) for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSize()) + if (self.has_auto_id_policy_): n += 1 + self.lengthVarInt64(self.auto_id_policy_) return n def ByteSizePartial(self): @@ -4562,6 +4594,7 @@ class PutRequest(ProtocolBuffer.ProtocolMessage): if (self.has_mark_changes_): n += 2 n += 1 * len(self.snapshot_) for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSizePartial()) + if (self.has_auto_id_policy_): n += 1 + self.lengthVarInt64(self.auto_id_policy_) return n def Clear(self): @@ -4572,6 +4605,7 @@ class PutRequest(ProtocolBuffer.ProtocolMessage): self.clear_force() self.clear_mark_changes() self.clear_snapshot() + self.clear_auto_id_policy() def OutputUnchecked(self, out): for i in xrange(len(self.entity_)): @@ -4599,6 +4633,9 @@ class PutRequest(ProtocolBuffer.ProtocolMessage): out.putVarInt32(74) out.putVarInt32(self.snapshot_[i].ByteSize()) self.snapshot_[i].OutputUnchecked(out) + if (self.has_auto_id_policy_): + out.putVarInt32(80) + out.putVarInt32(self.auto_id_policy_) def OutputPartial(self, out): for i in xrange(len(self.entity_)): @@ -4626,6 +4663,9 @@ class PutRequest(ProtocolBuffer.ProtocolMessage): out.putVarInt32(74) out.putVarInt32(self.snapshot_[i].ByteSizePartial()) self.snapshot_[i].OutputPartial(out) + if (self.has_auto_id_policy_): + out.putVarInt32(80) + out.putVarInt32(self.auto_id_policy_) def TryMerge(self, d): while d.avail() > 0: @@ -4663,6 +4703,9 @@ class PutRequest(ProtocolBuffer.ProtocolMessage): d.skip(length) self.add_snapshot().TryMerge(tmp) continue + if tt == 80: + self.set_auto_id_policy(d.getVarInt32()) + continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError @@ -4702,6 +4745,7 @@ class PutRequest(ProtocolBuffer.ProtocolMessage): res+=e.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt+=1 + if self.has_auto_id_policy_: res+=prefix+("auto_id_policy: %s\n" % self.DebugFormatInt32(self.auto_id_policy_)) return res @@ -4715,6 +4759,7 @@ class PutRequest(ProtocolBuffer.ProtocolMessage): kforce = 7 kmark_changes = 8 ksnapshot = 9 + kauto_id_policy = 10 _TEXT = _BuildTagLookupTable({ 0: "ErrorCode", @@ -4725,7 +4770,8 @@ class PutRequest(ProtocolBuffer.ProtocolMessage): 7: "force", 8: "mark_changes", 9: "snapshot", - }, 9) + 10: "auto_id_policy", + }, 10) _TYPES = _BuildTagLookupTable({ 0: ProtocolBuffer.Encoder.NUMERIC, @@ -4736,7 +4782,8 @@ class PutRequest(ProtocolBuffer.ProtocolMessage): 7: ProtocolBuffer.Encoder.NUMERIC, 8: ProtocolBuffer.Encoder.NUMERIC, 9: ProtocolBuffer.Encoder.STRING, - }, 9, ProtocolBuffer.Encoder.MAX_TYPE) + 10: ProtocolBuffer.Encoder.NUMERIC, + }, 10, ProtocolBuffer.Encoder.MAX_TYPE) _STYLE = """""" diff --git a/python/google/appengine/datastore/datastore_query.py b/python/google/appengine/datastore/datastore_query.py index ca6554f4..35f87857 100644 --- a/python/google/appengine/datastore/datastore_query.py +++ b/python/google/appengine/datastore/datastore_query.py @@ -1958,15 +1958,14 @@ class Query(_BaseQuery): if self._group_by: extra = set(projection) - set(self._group_by) if extra: - raise datastore_errors.BadQueryError( + raise datastore_errors.BadRequestError( 'projections includes properties not in the group_by argument: %s' % extra) pb.property_name_list().extend(projection) elif self._group_by: - raise datastore_errors.BadQueryError( + raise datastore_errors.BadRequestError( 'cannot specify group_by without a projection') - if QueryOptions.produce_cursors(query_options, conn.config): pb.set_compile(True) diff --git a/python/google/appengine/datastore/datastore_rpc.py b/python/google/appengine/datastore/datastore_rpc.py index bf14ce0b..7248b546 100644 --- a/python/google/appengine/datastore/datastore_rpc.py +++ b/python/google/appengine/datastore/datastore_rpc.py @@ -67,6 +67,8 @@ from google.appengine.api import datastore_types from google.appengine.api.app_identity import app_identity from google.appengine.datastore import datastore_pb +from google.appengine.datastore import datastore_v4a_pb +from google.appengine.datastore import entity_v4_pb from google.appengine.runtime import apiproxy_errors @@ -675,6 +677,14 @@ class Configuration(BaseConfiguration): return value @ConfigOption + def max_allocate_ids_keys(value): + """The maximum number of keys in a v4 AllocateIds rpc.""" + if not (isinstance(value, (int, long)) and value > 0): + raise datastore_errors.BadArgumentError( + 'max_allocate_ids_keys should be a positive integer') + return value + + @ConfigOption def max_rpc_bytes(value): """The maximum serialized size of a Get/Put/Delete without batching.""" if not (isinstance(value, (int, long)) and value > 0): @@ -1087,11 +1097,12 @@ class BaseConnection(object): - def create_rpc(self, config=None): + def create_rpc(self, config=None, service_name='datastore_v3'): """Create an RPC object using the configuration parameters. Args: config: Optional Configuration object. + service_name: Optional datastore service name. Returns: A new UserRPC object with the designated settings. @@ -1114,7 +1125,7 @@ class BaseConnection(object): def callback(): return on_completion(rpc) - rpc = apiproxy_stub_map.UserRPC('datastore_v3', deadline, callback) + rpc = apiproxy_stub_map.UserRPC(service_name, deadline, callback) return rpc def _set_request_read_policy(self, request, config=None): @@ -1165,7 +1176,8 @@ class BaseConnection(object): return None def make_rpc_call(self, config, method, request, response, - get_result_hook=None, user_data=None): + get_result_hook=None, user_data=None, + service_name='datastore_v3'): """Make an RPC call. Except for the added config argument, this is a thin wrapper @@ -1192,7 +1204,7 @@ class BaseConnection(object): if isinstance(config, apiproxy_stub_map.UserRPC): rpc = config else: - rpc = self.create_rpc(config) + rpc = self.create_rpc(config, service_name) rpc.make_call(method, request, response, get_result_hook, user_data) self._add_pending(rpc) return rpc @@ -1231,6 +1243,7 @@ class BaseConnection(object): MAX_GET_KEYS = 1000 MAX_PUT_ENTITIES = 500 MAX_DELETE_KEYS = 500 + MAX_ALLOCATE_IDS_KEYS = 500 DEFAULT_MAX_ENTITY_GROUPS_PER_RPC = 10 @@ -1243,32 +1256,51 @@ class BaseConnection(object): return Configuration.max_entity_groups_per_rpc( config, self.__config) or self.DEFAULT_MAX_ENTITY_GROUPS_PER_RPC - def __extract_entity_group(self, value): + def _extract_entity_group(self, value): """Internal helper: extracts the entity group from a key or entity.""" if isinstance(value, entity_pb.EntityProto): value = value.key() return value.path().element(0) - def __group_indexed_pbs_by_entity_group(self, values, value_to_pb): + def _map_and_group(self, values, map_fn, group_fn): + """Internal helper: map values to keys and group by key. Here key is any + object derived from an input value by map_fn, and which can be grouped + by group_fn. + + Args: + values: The values to be grouped by applying get_group(to_ref(value)). + map_fn: a function that maps a value to a key to be grouped. + group_fn: a function that groups the keys output by map_fn. + + Returns: + A list where each element is a list of (key, index) pairs. Here + index is the location of the value from which the key was derived in + the original list. + """ + indexed_key_groups = collections.defaultdict(list) + for index, value in enumerate(values): + key = map_fn(value) + indexed_key_groups[group_fn(key)].append((key, index)) + return indexed_key_groups.values() + + def __group_indexed_pbs_by_entity_group(self, values, to_ref): """Internal helper: group pbs by entity group. Args: values: The values to be grouped by entity group. - value_to_pb: A function that translates a value to a pb. + to_ref: A function that translates a value to a Reference pb. Returns: A list where each element is a list of (pb, index) pairs. Here index is the location of the value from which pb was derived in the original list. """ - indexed_pbs_by_entity_group = collections.defaultdict(list) - for index, value in enumerate(values): - pb = value_to_pb(value) - eg = self.__extract_entity_group(pb) + def get_entity_group(ref): + eg = self._extract_entity_group(ref) + + return (eg.type(), eg.id() or eg.name() or ('new', id(eg))) - uid = (eg.type(), eg.id() or eg.name() or ('new', id(eg))) - indexed_pbs_by_entity_group[uid].append((pb, index)) - return indexed_pbs_by_entity_group.values() + return self._map_and_group(values, to_ref, get_entity_group) def __create_result_index_pairs(self, indexes): """Internal helper: build a function that ties an index with each result. @@ -1305,13 +1337,13 @@ class BaseConnection(object): return results return sort_result_index_pairs - def __generate_pb_lists(self, indexed_pb_lists_by_eg, base_size, max_count, - max_egs_per_rpc, config): + def _generate_pb_lists(self, grouped_values, base_size, max_count, + max_groups, config): """Internal helper: repeatedly yield a list of 2 elements. Args: - indexed_pb_lists_by_eg: A list of lists. The inner lists consist of - objects that all belong to the same entity group. + grouped_values: A list of lists. The inner lists consist of objects + grouped by e.g. entity group or id sequence. base_size: An integer representing the base size of an rpc. Used for splitting operations across multiple RPCs due to size limitations. @@ -1319,10 +1351,10 @@ class BaseConnection(object): max_count: An integer representing the maximum number of objects we can send in an rpc. Used for splitting operations across multiple RPCs. - max_egs_per_rpc: An integer representing the maximum number of entity - groups we can have represented in an rpc. Can be None. + max_groups: An integer representing the maximum number of groups we can + have represented in an rpc. Can be None, in which case no constraint. - config: The config object to use. + config: The config object, defining max rpc size in bytes. Yields: Repeatedly yields 2 element tuples. The first element is a list of @@ -1335,15 +1367,15 @@ class BaseConnection(object): pbs = [] pb_indexes = [] size = base_size - num_entity_groups = 0 - for indexed_pbs in indexed_pb_lists_by_eg: - num_entity_groups += 1 - if max_egs_per_rpc is not None and num_entity_groups > max_egs_per_rpc: + num_groups = 0 + for indexed_pbs in grouped_values: + num_groups += 1 + if max_groups is not None and num_groups > max_groups: yield (pbs, pb_indexes) pbs = [] pb_indexes = [] size = base_size - num_entity_groups = 1 + num_groups = 1 for indexed_pb in indexed_pbs: (pb, index) = indexed_pb @@ -1358,7 +1390,7 @@ class BaseConnection(object): pbs = [] pb_indexes = [] size = base_size - num_entity_groups = 1 + num_groups = 1 pbs.append(pb) pb_indexes.append(index) size += incr_size @@ -1433,7 +1465,7 @@ class BaseConnection(object): - pbsgen = self.__generate_pb_lists( + pbsgen = self._generate_pb_lists( indexed_keys_by_entity_group, base_size, max_count, max_egs_per_rpc, config) @@ -1553,7 +1585,7 @@ class BaseConnection(object): else: max_egs_per_rpc = None - pbsgen = self.__generate_pb_lists( + pbsgen = self._generate_pb_lists( indexed_entities_by_entity_group, base_size, max_count, max_egs_per_rpc, config) @@ -1628,7 +1660,7 @@ class BaseConnection(object): - pbsgen = self.__generate_pb_lists( + pbsgen = self._generate_pb_lists( indexed_keys_by_entity_group, base_size, max_count, max_egs_per_rpc, config) rpcs = [] @@ -1696,7 +1728,7 @@ class Connection(BaseConnection): """Transaction-less connection class. This contains those operations that are not allowed on transactional - connections. (Currently only allocate_ids.) + connections. (Currently only allocate_ids and reserve_key_ids.) """ @_positional(1) @@ -1732,6 +1764,38 @@ class Connection(BaseConnection): + def __to_v4_key(self, ref): + """Convert a valid v3 Reference pb to a v4 Key pb.""" + key = entity_v4_pb.Key() + key.mutable_partition_id().set_dataset_id(ref.app()) + if ref.name_space(): + key.mutable_partition_id().set_namespace(ref.name_space()) + for el_v3 in ref.path().element_list(): + el_v4 = key.add_path_element() + el_v4.set_kind(el_v3.type()) + if el_v3.has_id(): + el_v4.set_id(el_v3.id()) + if el_v3.has_name(): + el_v4.set_name(el_v3.name()) + return key + + def __to_v3_reference(self, key): + """Convert a valid v4 Key pb to a v3 Reference pb.""" + ref = entity_pb.Reference() + ref.set_app(key.partition_id().dataset_id()) + if key.partition_id().has_namespace(): + ref.set_name_space(key.partition_id().namespace()) + for el_v4 in key.path_element_list(): + el_v3 = ref.mutable_path().add_element() + el_v3.set_type(el_v4.kind()) + if el_v4.has_id(): + el_v3.set_id(el_v4.id()) + if el_v4.has_name(): + el_v3.set_name(el_v4.name()) + return ref + + + def allocate_ids(self, key, size=None, max=None): """Synchronous AllocateIds operation. @@ -1749,7 +1813,7 @@ class Connection(BaseConnection): def async_allocate_ids(self, config, key, size=None, max=None, extra_hook=None): - """Asynchronous Get operation. + """Asynchronous AllocateIds operation. Args: config: A Configuration object or None. Defaults are taken from @@ -1803,6 +1867,62 @@ class Connection(BaseConnection): return pair + + def _reserve_keys(self, keys): + """Synchronous AllocateIds operation to reserve the given keys. + + Sends one or more v4 AllocateIds rpcs with keys to reserve. + Reserved keys must be complete and must have valid ids. + + Args: + keys: Iterable of user-level keys. + """ + self._async_reserve_keys(None, keys).get_result() + + def _async_reserve_keys(self, config, keys, extra_hook=None): + """Asynchronous AllocateIds operation to reserve the given keys. + + Sends one or more v4 AllocateIds rpcs with keys to reserve. + Reserved keys must be complete and must have valid ids. + + Args: + config: A Configuration object or None to use Connection default. + keys: Iterable of user-level keys. + extra_hook: Optional function to be called on rpc result. + + Returns: + None, or the result of user-supplied extra_hook. + """ + def to_id_key(key): + if key.path().element_size() == 1: + return 'root_idkey' + else: + eg = self._extract_entity_group(key) + return (eg.type(), eg.id() or eg.name()) + + keys_by_idkey = self._map_and_group(keys, self.__adapter.key_to_pb, + to_id_key) + max_count = (Configuration.max_allocate_ids_keys(config, self.__config) or + self.MAX_ALLOCATE_IDS_KEYS) + + rpcs = [] + pbsgen = self._generate_pb_lists(keys_by_idkey, 0, max_count, None, config) + for pbs, _ in pbsgen: + req = datastore_v4a_pb.AllocateIdsRequest() + req.reserve_list().extend([self.__to_v4_key(key) for key in pbs]) + resp = datastore_v4a_pb.AllocateIdsResponse() + rpcs.append(self.make_rpc_call(config, 'AllocateIds', req, resp, + self.__reserve_keys_hook, extra_hook, + 'datastore_v4')) + return MultiRpc(rpcs) + + def __reserve_keys_hook(self, rpc): + """Internal get_result_hook for _reserve_keys.""" + self.check_rpc_success(rpc) + if rpc.user_data is not None: + return rpc.user_data(rpc.response) + + class TransactionOptions(Configuration): """An immutable class that contains options for a transaction.""" @@ -1810,8 +1930,8 @@ class TransactionOptions(Configuration): """Create a nested transaction under an existing one.""" MANDATORY = 2 - """Always propagate an exsiting transaction, throw an exception if there is no - exsiting transaction.""" + """Always propagate an existing transaction, throw an exception if there is + no existing transaction.""" ALLOWED = 3 """If there is an existing transaction propagate it.""" diff --git a/python/google/appengine/datastore/datastore_v4a_pb.py b/python/google/appengine/datastore/datastore_v4a_pb.py new file mode 100644 index 00000000..293bcc03 --- /dev/null +++ b/python/google/appengine/datastore/datastore_v4a_pb.py @@ -0,0 +1,6797 @@ +#!/usr/bin/env python +# +# Copyright 2007 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + + +from google.net.proto import ProtocolBuffer +import array +import base64 +import dummy_thread as thread +try: + from google3.net.proto import _net_proto___parse__python +except ImportError: + _net_proto___parse__python = None +import sys +try: + __import__('google.net.rpc.python.rpc_internals_lite') + __import__('google.net.rpc.python.pywraprpc_lite') + rpc_internals = sys.modules.get('google.net.rpc.python.rpc_internals_lite') + pywraprpc = sys.modules.get('google.net.rpc.python.pywraprpc_lite') + _client_stub_base_class = rpc_internals.StubbyRPCBaseStub +except ImportError: + _client_stub_base_class = object +try: + __import__('google.net.rpc.python.rpcserver') + rpcserver = sys.modules.get('google.net.rpc.python.rpcserver') + _server_stub_base_class = rpcserver.BaseRpcServer +except ImportError: + _server_stub_base_class = object + +__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit + unusednames=printElemNumber,debug_strs no-special""" + +if hasattr(ProtocolBuffer, 'ExtendableProtocolMessage'): + _extension_runtime = True + _ExtendableProtocolMessage = ProtocolBuffer.ExtendableProtocolMessage +else: + _extension_runtime = False + _ExtendableProtocolMessage = ProtocolBuffer.ProtocolMessage + +from google.appengine.datastore.entity_v4_pb import * +import google.appengine.datastore.entity_v4_pb +from google.appengine.datastore.entity_pb import * +import google.appengine.datastore.entity_pb +class Error(ProtocolBuffer.ProtocolMessage): + + + BAD_REQUEST = 1 + CONCURRENT_TRANSACTION = 2 + INTERNAL_ERROR = 3 + NEED_INDEX = 4 + TIMEOUT = 5 + PERMISSION_DENIED = 6 + BIGTABLE_ERROR = 7 + COMMITTED_BUT_STILL_APPLYING = 8 + CAPABILITY_DISABLED = 9 + TRY_ALTERNATE_BACKEND = 10 + SAFE_TIME_TOO_OLD = 11 + + _ErrorCode_NAMES = { + 1: "BAD_REQUEST", + 2: "CONCURRENT_TRANSACTION", + 3: "INTERNAL_ERROR", + 4: "NEED_INDEX", + 5: "TIMEOUT", + 6: "PERMISSION_DENIED", + 7: "BIGTABLE_ERROR", + 8: "COMMITTED_BUT_STILL_APPLYING", + 9: "CAPABILITY_DISABLED", + 10: "TRY_ALTERNATE_BACKEND", + 11: "SAFE_TIME_TOO_OLD", + } + + def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "") + ErrorCode_Name = classmethod(ErrorCode_Name) + + + def __init__(self, contents=None): + pass + if contents is not None: self.MergeFromString(contents) + + + def MergeFrom(self, x): + assert x is not self + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.Error', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.Error') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.Error') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.Error', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.Error', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.Error', s) + + + def Equals(self, x): + if x is self: return 1 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + return initialized + + def ByteSize(self): + n = 0 + return n + + def ByteSizePartial(self): + n = 0 + return n + + def Clear(self): + pass + + def OutputUnchecked(self, out): + pass + + def OutputPartial(self, out): + pass + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + }, 0) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + }, 0, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.Error' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCh1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcnN6CUVycm9yQ29kZYsBkgELQkFEX1JFUVVFU1SYAQGMAYsBkgEWQ09OQ1VSUkVOVF9UUkFOU0FDVElPTpgBAowBiwGSAQ5JTlRFUk5BTF9FUlJPUpgBA4wBiwGSAQpORUVEX0lOREVYmAEEjAGLAZIBB1RJTUVPVVSYAQWMAYsBkgERUEVSTUlTU0lPTl9ERU5JRUSYAQaMAYsBkgEOQklHVEFCTEVfRVJST1KYAQeMAYsBkgEcQ09NTUlUVEVEX0JVVF9TVElMTF9BUFBMWUlOR5gBCIwBiwGSARNDQVBBQklMSVRZX0RJU0FCTEVEmAEJjAGLAZIBFVRSWV9BTFRFUk5BVEVfQkFDS0VORJgBCowBiwGSARFTQUZFX1RJTUVfVE9PX09MRJgBC4wBdLoB4ysKKGFwcGhvc3RpbmcvZGF0YXN0b3JlL2RhdGFzdG9yZV92NGEucHJvdG8SF2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0GiRhcHBob3N0aW5nL2RhdGFzdG9yZS9lbnRpdHlfdjQucHJvdG8aIHN0b3JhZ2Uvb25lc3RvcmUvdjMvZW50aXR5LnByb3RvIosCCgVFcnJvciKBAgoJRXJyb3JDb2RlEg8KC0JBRF9SRVFVRVNUEAESGgoWQ09OQ1VSUkVOVF9UUkFOU0FDVElPThACEhIKDklOVEVSTkFMX0VSUk9SEAMSDgoKTkVFRF9JTkRFWBAEEgsKB1RJTUVPVVQQBRIVChFQRVJNSVNTSU9OX0RFTklFRBAGEhIKDkJJR1RBQkxFX0VSUk9SEAcSIAocQ09NTUlUVEVEX0JVVF9TVElMTF9BUFBMWUlORxAIEhcKE0NBUEFCSUxJVFlfRElTQUJMRUQQCRIZChVUUllfQUxURVJOQVRFX0JBQ0tFTkQQChIVChFTQUZFX1RJTUVfVE9PX09MRBALIpMCCghNdXRhdGlvbhIvCgZ1cHNlcnQYASADKAsyHy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHkSLwoGdXBkYXRlGAIgAygLMh8uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5Ei8KBmluc2VydBgDIAMoCzIfLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eRI3Cg5pbnNlcnRfYXV0b19pZBgEIAMoCzIfLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eRIsCgZkZWxldGUYBSADKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXkSDQoFZm9yY2UYBiABKAgiYQoOTXV0YXRpb25SZXN1bHQSFQoNaW5kZXhfdXBkYXRlcxgBIAIoBRI4ChJpbnNlcnRfYXV0b19pZF9rZXkYAiADKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXkidQoMRW50aXR5UmVzdWx0Ei8KBmVudGl0eRgBIAIoCzIfLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eSI0CgpSZXN1bHRUeXBlEggKBEZVTEwQARIOCgpQUk9KRUNUSU9OEAISDAoIS0VZX09OTFkQAyLxAgoFUXVlcnkSPwoKcHJvamVjdGlvbhgCIAMoCzIrLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5RXhwcmVzc2lvbhI1CgRraW5kGAMgAygLMicuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2luZEV4cHJlc3Npb24SLwoGZmlsdGVyGAQgASgLMh8uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRmlsdGVyEjUKBW9yZGVyGAUgAygLMiYuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlPcmRlchI8Cghncm91cF9ieRgGIAMoCzIqLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5UmVmZXJlbmNlEhQKDHN0YXJ0X2N1cnNvchgHIAEoDBISCgplbmRfY3Vyc29yGAggASgMEhEKBm9mZnNldBgKIAEoBToBMBINCgVsaW1pdBgLIAEoBSIeCg5LaW5kRXhwcmVzc2lvbhIMCgRuYW1lGAEgAigJIiEKEVByb3BlcnR5UmVmZXJlbmNlEgwKBG5hbWUYAiACKAki0wEKElByb3BlcnR5RXhwcmVzc2lvbhI8Cghwcm9wZXJ0eRgBIAIoCzIqLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5UmVmZXJlbmNlEl0KFGFnZ3JlZ2F0aW9uX2Z1bmN0aW9uGAIgASgOMj8uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlFeHByZXNzaW9uLkFnZ3JlZ2F0aW9uRnVuY3Rpb24iIAoTQWdncmVnYXRpb25GdW5jdGlvbhIJCgVGSVJTVBABIskBCg1Qcm9wZXJ0eU9yZGVyEjwKCHByb3BlcnR5GAEgAigLMiouYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlSZWZlcmVuY2USTgoJZGlyZWN0aW9uGAIgASgOMjAuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlPcmRlci5EaXJlY3Rpb246CUFTQ0VORElORyIqCglEaXJlY3Rpb24SDQoJQVNDRU5ESU5HEAESDgoKREVTQ0VORElORxACIo4BCgZGaWx0ZXISQgoQY29tcG9zaXRlX2ZpbHRlchgBIAEoCzIoLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbXBvc2l0ZUZpbHRlchJACg9wcm9wZXJ0eV9maWx0ZXIYAiABKAsyJy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eUZpbHRlciKcAQoPQ29tcG9zaXRlRmlsdGVyEkMKCG9wZXJhdG9yGAEgAigOMjEuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQ29tcG9zaXRlRmlsdGVyLk9wZXJhdG9yEi8KBmZpbHRlchgCIAMoCzIfLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkZpbHRlciITCghPcGVyYXRvchIHCgNBTkQQASK+AgoOUHJvcGVydHlGaWx0ZXISPAoIcHJvcGVydHkYASACKAsyKi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eVJlZmVyZW5jZRJCCghvcGVyYXRvchgCIAIoDjIwLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5RmlsdGVyLk9wZXJhdG9yEi0KBXZhbHVlGAMgAigLMh4uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuVmFsdWUiewoIT3BlcmF0b3ISDQoJTEVTU19USEFOEAESFgoSTEVTU19USEFOX09SX0VRVUFMEAISEAoMR1JFQVRFUl9USEFOEAMSGQoVR1JFQVRFUl9USEFOX09SX0VRVUFMEAQSCQoFRVFVQUwQBRIQCgxIQVNfQU5DRVNUT1IQCyKwAQoIR3FsUXVlcnkSFAoMcXVlcnlfc3RyaW5nGAEgAigJEhwKDWFsbG93X2xpdGVyYWwYAiABKAg6BWZhbHNlEjYKCG5hbWVfYXJnGAMgAygLMiQuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuR3FsUXVlcnlBcmcSOAoKbnVtYmVyX2FyZxgEIAMoCzIkLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkdxbFF1ZXJ5QXJnIkoKC0dxbFF1ZXJ5QXJnEgwKBG5hbWUYASABKAkSLQoFdmFsdWUYAiACKAsyHi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5WYWx1ZSL3AgoQUXVlcnlSZXN1bHRCYXRjaBJMChJlbnRpdHlfcmVzdWx0X3R5cGUYASACKA4yMC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHlSZXN1bHQuUmVzdWx0VHlwZRI8Cg1lbnRpdHlfcmVzdWx0GAIgAygLMiUuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5UmVzdWx0EhIKCmVuZF9jdXJzb3IYBCABKAwSTwoMbW9yZV9yZXN1bHRzGAUgAigOMjkuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUXVlcnlSZXN1bHRCYXRjaC5Nb3JlUmVzdWx0c1R5cGUSGgoPc2tpcHBlZF9yZXN1bHRzGAYgASgFOgEwIlYKD01vcmVSZXN1bHRzVHlwZRIQCgxOT1RfRklOSVNIRUQQARIcChhNT1JFX1JFU1VMVFNfQUZURVJfTElNSVQQAhITCg9OT19NT1JFX1JFU1VMVFMQAyK1AQoLUmVhZE9wdGlvbnMSVwoQcmVhZF9jb25zaXN0ZW5jeRgBIAEoDjI0LmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlJlYWRPcHRpb25zLlJlYWRDb25zaXN0ZW5jeToHREVGQVVMVBITCgt0cmFuc2FjdGlvbhgCIAEoDCI4Cg9SZWFkQ29uc2lzdGVuY3kSCwoHREVGQVVMVBAAEgoKBlNUUk9ORxABEgwKCEVWRU5UVUFMEAIicwoKR2V0UmVxdWVzdBI6CgxyZWFkX29wdGlvbnMYASABKAsyJC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5SZWFkT3B0aW9ucxIpCgNrZXkYAyADKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXkiqwEKC0dldFJlc3BvbnNlEjQKBWZvdW5kGAEgAygLMiUuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5UmVzdWx0EjYKB21pc3NpbmcYAiADKAsyJS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHlSZXN1bHQSLgoIZGVmZXJyZWQYAyADKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXki9QEKD1J1blF1ZXJ5UmVxdWVzdBI6CgxyZWFkX29wdGlvbnMYASABKAsyJC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5SZWFkT3B0aW9ucxI6CgxwYXJ0aXRpb25faWQYAiABKAsyJC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5QYXJ0aXRpb25JZBItCgVxdWVyeRgDIAIoCzIeLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlF1ZXJ5Eh0KFW1pbl9zYWZlX3RpbWVfc2Vjb25kcxgEIAEoAxIcChRzdWdnZXN0ZWRfYmF0Y2hfc2l6ZRgFIAEoBSKWAQoQUnVuUXVlcnlSZXNwb25zZRI4CgViYXRjaBgBIAIoCzIpLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlF1ZXJ5UmVzdWx0QmF0Y2gSFAoMcXVlcnlfaGFuZGxlGAIgASgMEjIKBWluZGV4GAMgAygLMiMuc3RvcmFnZV9vbmVzdG9yZV92My5Db21wb3NpdGVJbmRleCIsChRDb250aW51ZVF1ZXJ5UmVxdWVzdBIUCgxxdWVyeV9oYW5kbGUYASACKAwiUQoVQ29udGludWVRdWVyeVJlc3BvbnNlEjgKBWJhdGNoGAEgAigLMikuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUXVlcnlSZXN1bHRCYXRjaCJTChdCZWdpblRyYW5zYWN0aW9uUmVxdWVzdBIaCgtjcm9zc19ncm91cBgBIAEoCDoFZmFsc2USHAoNY3Jvc3NfcmVxdWVzdBgCIAEoCDoFZmFsc2UiLwoYQmVnaW5UcmFuc2FjdGlvblJlc3BvbnNlEhMKC3RyYW5zYWN0aW9uGAEgAigMIiYKD1JvbGxiYWNrUmVxdWVzdBITCgt0cmFuc2FjdGlvbhgBIAIoDCISChBSb2xsYmFja1Jlc3BvbnNlIlkKDUNvbW1pdFJlcXVlc3QSEwoLdHJhbnNhY3Rpb24YASACKAwSMwoIbXV0YXRpb24YAiABKAsyIS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5NdXRhdGlvbiJSCg5Db21taXRSZXNwb25zZRJACg9tdXRhdGlvbl9yZXN1bHQYASABKAsyJy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5NdXRhdGlvblJlc3VsdCJDCgxXcml0ZVJlcXVlc3QSMwoIbXV0YXRpb24YASACKAsyIS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5NdXRhdGlvbiJRCg1Xcml0ZVJlc3BvbnNlEkAKD211dGF0aW9uX3Jlc3VsdBgBIAIoCzInLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lk11dGF0aW9uUmVzdWx0InMKEkFsbG9jYXRlSWRzUmVxdWVzdBIuCghhbGxvY2F0ZRgBIAMoCzIcLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktleRItCgdyZXNlcnZlGAIgAygLMhwuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5IkYKE0FsbG9jYXRlSWRzUmVzcG9uc2USLwoJYWxsb2NhdGVkGAEgAygLMhwuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5Mr4GChJEYXRhc3RvcmVWNFNlcnZpY2USeQoQQmVnaW5UcmFuc2FjdGlvbhIwLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkJlZ2luVHJhbnNhY3Rpb25SZXF1ZXN0GjEuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQmVnaW5UcmFuc2FjdGlvblJlc3BvbnNlIgASYQoIUm9sbGJhY2sSKC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Sb2xsYmFja1JlcXVlc3QaKS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Sb2xsYmFja1Jlc3BvbnNlIgASWwoGQ29tbWl0EiYuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQ29tbWl0UmVxdWVzdBonLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbW1pdFJlc3BvbnNlIgASWAoFV3JpdGUSJS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Xcml0ZVJlcXVlc3QaJi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Xcml0ZVJlc3BvbnNlIgASYQoIUnVuUXVlcnkSKC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5SdW5RdWVyeVJlcXVlc3QaKS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5SdW5RdWVyeVJlc3BvbnNlIgAScAoNQ29udGludWVRdWVyeRItLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbnRpbnVlUXVlcnlSZXF1ZXN0Gi4uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQ29udGludWVRdWVyeVJlc3BvbnNlIgASUgoDR2V0EiMuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuR2V0UmVxdWVzdBokLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkdldFJlc3BvbnNlIgASagoLQWxsb2NhdGVJZHMSKy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5BbGxvY2F0ZUlkc1JlcXVlc3QaLC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5BbGxvY2F0ZUlkc1Jlc3BvbnNlIgBCIQofY29tLmdvb2dsZS5hcHBob3N0aW5nLmRhdGFzdG9yZQ==")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class Mutation(ProtocolBuffer.ProtocolMessage): + has_force_ = 0 + force_ = 0 + + def __init__(self, contents=None): + self.upsert_ = [] + self.update_ = [] + self.insert_ = [] + self.insert_auto_id_ = [] + self.delete_ = [] + if contents is not None: self.MergeFromString(contents) + + def upsert_size(self): return len(self.upsert_) + def upsert_list(self): return self.upsert_ + + def upsert(self, i): + return self.upsert_[i] + + def mutable_upsert(self, i): + return self.upsert_[i] + + def add_upsert(self): + x = google.appengine.datastore.entity_v4_pb.Entity() + self.upsert_.append(x) + return x + + def clear_upsert(self): + self.upsert_ = [] + def update_size(self): return len(self.update_) + def update_list(self): return self.update_ + + def update(self, i): + return self.update_[i] + + def mutable_update(self, i): + return self.update_[i] + + def add_update(self): + x = google.appengine.datastore.entity_v4_pb.Entity() + self.update_.append(x) + return x + + def clear_update(self): + self.update_ = [] + def insert_size(self): return len(self.insert_) + def insert_list(self): return self.insert_ + + def insert(self, i): + return self.insert_[i] + + def mutable_insert(self, i): + return self.insert_[i] + + def add_insert(self): + x = google.appengine.datastore.entity_v4_pb.Entity() + self.insert_.append(x) + return x + + def clear_insert(self): + self.insert_ = [] + def insert_auto_id_size(self): return len(self.insert_auto_id_) + def insert_auto_id_list(self): return self.insert_auto_id_ + + def insert_auto_id(self, i): + return self.insert_auto_id_[i] + + def mutable_insert_auto_id(self, i): + return self.insert_auto_id_[i] + + def add_insert_auto_id(self): + x = google.appengine.datastore.entity_v4_pb.Entity() + self.insert_auto_id_.append(x) + return x + + def clear_insert_auto_id(self): + self.insert_auto_id_ = [] + def delete_size(self): return len(self.delete_) + def delete_list(self): return self.delete_ + + def delete(self, i): + return self.delete_[i] + + def mutable_delete(self, i): + return self.delete_[i] + + def add_delete(self): + x = google.appengine.datastore.entity_v4_pb.Key() + self.delete_.append(x) + return x + + def clear_delete(self): + self.delete_ = [] + def force(self): return self.force_ + + def set_force(self, x): + self.has_force_ = 1 + self.force_ = x + + def clear_force(self): + if self.has_force_: + self.has_force_ = 0 + self.force_ = 0 + + def has_force(self): return self.has_force_ + + + def MergeFrom(self, x): + assert x is not self + for i in xrange(x.upsert_size()): self.add_upsert().CopyFrom(x.upsert(i)) + for i in xrange(x.update_size()): self.add_update().CopyFrom(x.update(i)) + for i in xrange(x.insert_size()): self.add_insert().CopyFrom(x.insert(i)) + for i in xrange(x.insert_auto_id_size()): self.add_insert_auto_id().CopyFrom(x.insert_auto_id(i)) + for i in xrange(x.delete_size()): self.add_delete().CopyFrom(x.delete(i)) + if (x.has_force()): self.set_force(x.force()) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.Mutation', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.Mutation') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.Mutation') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.Mutation', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.Mutation', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.Mutation', s) + + + def Equals(self, x): + if x is self: return 1 + if len(self.upsert_) != len(x.upsert_): return 0 + for e1, e2 in zip(self.upsert_, x.upsert_): + if e1 != e2: return 0 + if len(self.update_) != len(x.update_): return 0 + for e1, e2 in zip(self.update_, x.update_): + if e1 != e2: return 0 + if len(self.insert_) != len(x.insert_): return 0 + for e1, e2 in zip(self.insert_, x.insert_): + if e1 != e2: return 0 + if len(self.insert_auto_id_) != len(x.insert_auto_id_): return 0 + for e1, e2 in zip(self.insert_auto_id_, x.insert_auto_id_): + if e1 != e2: return 0 + if len(self.delete_) != len(x.delete_): return 0 + for e1, e2 in zip(self.delete_, x.delete_): + if e1 != e2: return 0 + if self.has_force_ != x.has_force_: return 0 + if self.has_force_ and self.force_ != x.force_: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + for p in self.upsert_: + if not p.IsInitialized(debug_strs): initialized=0 + for p in self.update_: + if not p.IsInitialized(debug_strs): initialized=0 + for p in self.insert_: + if not p.IsInitialized(debug_strs): initialized=0 + for p in self.insert_auto_id_: + if not p.IsInitialized(debug_strs): initialized=0 + for p in self.delete_: + if not p.IsInitialized(debug_strs): initialized=0 + return initialized + + def ByteSize(self): + n = 0 + n += 1 * len(self.upsert_) + for i in xrange(len(self.upsert_)): n += self.lengthString(self.upsert_[i].ByteSize()) + n += 1 * len(self.update_) + for i in xrange(len(self.update_)): n += self.lengthString(self.update_[i].ByteSize()) + n += 1 * len(self.insert_) + for i in xrange(len(self.insert_)): n += self.lengthString(self.insert_[i].ByteSize()) + n += 1 * len(self.insert_auto_id_) + for i in xrange(len(self.insert_auto_id_)): n += self.lengthString(self.insert_auto_id_[i].ByteSize()) + n += 1 * len(self.delete_) + for i in xrange(len(self.delete_)): n += self.lengthString(self.delete_[i].ByteSize()) + if (self.has_force_): n += 2 + return n + + def ByteSizePartial(self): + n = 0 + n += 1 * len(self.upsert_) + for i in xrange(len(self.upsert_)): n += self.lengthString(self.upsert_[i].ByteSizePartial()) + n += 1 * len(self.update_) + for i in xrange(len(self.update_)): n += self.lengthString(self.update_[i].ByteSizePartial()) + n += 1 * len(self.insert_) + for i in xrange(len(self.insert_)): n += self.lengthString(self.insert_[i].ByteSizePartial()) + n += 1 * len(self.insert_auto_id_) + for i in xrange(len(self.insert_auto_id_)): n += self.lengthString(self.insert_auto_id_[i].ByteSizePartial()) + n += 1 * len(self.delete_) + for i in xrange(len(self.delete_)): n += self.lengthString(self.delete_[i].ByteSizePartial()) + if (self.has_force_): n += 2 + return n + + def Clear(self): + self.clear_upsert() + self.clear_update() + self.clear_insert() + self.clear_insert_auto_id() + self.clear_delete() + self.clear_force() + + def OutputUnchecked(self, out): + for i in xrange(len(self.upsert_)): + out.putVarInt32(10) + out.putVarInt32(self.upsert_[i].ByteSize()) + self.upsert_[i].OutputUnchecked(out) + for i in xrange(len(self.update_)): + out.putVarInt32(18) + out.putVarInt32(self.update_[i].ByteSize()) + self.update_[i].OutputUnchecked(out) + for i in xrange(len(self.insert_)): + out.putVarInt32(26) + out.putVarInt32(self.insert_[i].ByteSize()) + self.insert_[i].OutputUnchecked(out) + for i in xrange(len(self.insert_auto_id_)): + out.putVarInt32(34) + out.putVarInt32(self.insert_auto_id_[i].ByteSize()) + self.insert_auto_id_[i].OutputUnchecked(out) + for i in xrange(len(self.delete_)): + out.putVarInt32(42) + out.putVarInt32(self.delete_[i].ByteSize()) + self.delete_[i].OutputUnchecked(out) + if (self.has_force_): + out.putVarInt32(48) + out.putBoolean(self.force_) + + def OutputPartial(self, out): + for i in xrange(len(self.upsert_)): + out.putVarInt32(10) + out.putVarInt32(self.upsert_[i].ByteSizePartial()) + self.upsert_[i].OutputPartial(out) + for i in xrange(len(self.update_)): + out.putVarInt32(18) + out.putVarInt32(self.update_[i].ByteSizePartial()) + self.update_[i].OutputPartial(out) + for i in xrange(len(self.insert_)): + out.putVarInt32(26) + out.putVarInt32(self.insert_[i].ByteSizePartial()) + self.insert_[i].OutputPartial(out) + for i in xrange(len(self.insert_auto_id_)): + out.putVarInt32(34) + out.putVarInt32(self.insert_auto_id_[i].ByteSizePartial()) + self.insert_auto_id_[i].OutputPartial(out) + for i in xrange(len(self.delete_)): + out.putVarInt32(42) + out.putVarInt32(self.delete_[i].ByteSizePartial()) + self.delete_[i].OutputPartial(out) + if (self.has_force_): + out.putVarInt32(48) + out.putBoolean(self.force_) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 10: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.add_upsert().TryMerge(tmp) + continue + if tt == 18: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.add_update().TryMerge(tmp) + continue + if tt == 26: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.add_insert().TryMerge(tmp) + continue + if tt == 34: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.add_insert_auto_id().TryMerge(tmp) + continue + if tt == 42: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.add_delete().TryMerge(tmp) + continue + if tt == 48: + self.set_force(d.getBoolean()) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + cnt=0 + for e in self.upsert_: + elm="" + if printElemNumber: elm="(%d)" % cnt + res+=prefix+("upsert%s <\n" % elm) + res+=e.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + cnt+=1 + cnt=0 + for e in self.update_: + elm="" + if printElemNumber: elm="(%d)" % cnt + res+=prefix+("update%s <\n" % elm) + res+=e.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + cnt+=1 + cnt=0 + for e in self.insert_: + elm="" + if printElemNumber: elm="(%d)" % cnt + res+=prefix+("insert%s <\n" % elm) + res+=e.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + cnt+=1 + cnt=0 + for e in self.insert_auto_id_: + elm="" + if printElemNumber: elm="(%d)" % cnt + res+=prefix+("insert_auto_id%s <\n" % elm) + res+=e.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + cnt+=1 + cnt=0 + for e in self.delete_: + elm="" + if printElemNumber: elm="(%d)" % cnt + res+=prefix+("delete%s <\n" % elm) + res+=e.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + cnt+=1 + if self.has_force_: res+=prefix+("force: %s\n" % self.DebugFormatBool(self.force_)) + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kupsert = 1 + kupdate = 2 + kinsert = 3 + kinsert_auto_id = 4 + kdelete = 5 + kforce = 6 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "upsert", + 2: "update", + 3: "insert", + 4: "insert_auto_id", + 5: "delete", + 6: "force", + }, 6) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.STRING, + 2: ProtocolBuffer.Encoder.STRING, + 3: ProtocolBuffer.Encoder.STRING, + 4: ProtocolBuffer.Encoder.STRING, + 5: ProtocolBuffer.Encoder.STRING, + 6: ProtocolBuffer.Encoder.NUMERIC, + }, 6, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.Mutation' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiBhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5NdXRhdGlvbhMaBnVwc2VydCABKAIwCzgDSh5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHmjAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoGdXBkYXRlIAIoAjALOANKHmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eaMBqgEFY3R5cGWyAQZwcm90bzKkARQTGgZpbnNlcnQgAygCMAs4A0oeYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5owGqAQVjdHlwZbIBBnByb3RvMqQBFBMaDmluc2VydF9hdXRvX2lkIAQoAjALOANKHmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eaMBqgEFY3R5cGWyAQZwcm90bzKkARQTGgZkZWxldGUgBSgCMAs4A0obYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5owGqAQVjdHlwZbIBBnByb3RvMqQBFBMaBWZvcmNlIAYoADAIOAEUwgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I=")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class MutationResult(ProtocolBuffer.ProtocolMessage): + has_index_updates_ = 0 + index_updates_ = 0 + + def __init__(self, contents=None): + self.insert_auto_id_key_ = [] + if contents is not None: self.MergeFromString(contents) + + def index_updates(self): return self.index_updates_ + + def set_index_updates(self, x): + self.has_index_updates_ = 1 + self.index_updates_ = x + + def clear_index_updates(self): + if self.has_index_updates_: + self.has_index_updates_ = 0 + self.index_updates_ = 0 + + def has_index_updates(self): return self.has_index_updates_ + + def insert_auto_id_key_size(self): return len(self.insert_auto_id_key_) + def insert_auto_id_key_list(self): return self.insert_auto_id_key_ + + def insert_auto_id_key(self, i): + return self.insert_auto_id_key_[i] + + def mutable_insert_auto_id_key(self, i): + return self.insert_auto_id_key_[i] + + def add_insert_auto_id_key(self): + x = google.appengine.datastore.entity_v4_pb.Key() + self.insert_auto_id_key_.append(x) + return x + + def clear_insert_auto_id_key(self): + self.insert_auto_id_key_ = [] + + def MergeFrom(self, x): + assert x is not self + if (x.has_index_updates()): self.set_index_updates(x.index_updates()) + for i in xrange(x.insert_auto_id_key_size()): self.add_insert_auto_id_key().CopyFrom(x.insert_auto_id_key(i)) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.MutationResult', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.MutationResult') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.MutationResult') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.MutationResult', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.MutationResult', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.MutationResult', s) + + + def Equals(self, x): + if x is self: return 1 + if self.has_index_updates_ != x.has_index_updates_: return 0 + if self.has_index_updates_ and self.index_updates_ != x.index_updates_: return 0 + if len(self.insert_auto_id_key_) != len(x.insert_auto_id_key_): return 0 + for e1, e2 in zip(self.insert_auto_id_key_, x.insert_auto_id_key_): + if e1 != e2: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + if (not self.has_index_updates_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: index_updates not set.') + for p in self.insert_auto_id_key_: + if not p.IsInitialized(debug_strs): initialized=0 + return initialized + + def ByteSize(self): + n = 0 + n += self.lengthVarInt64(self.index_updates_) + n += 1 * len(self.insert_auto_id_key_) + for i in xrange(len(self.insert_auto_id_key_)): n += self.lengthString(self.insert_auto_id_key_[i].ByteSize()) + return n + 1 + + def ByteSizePartial(self): + n = 0 + if (self.has_index_updates_): + n += 1 + n += self.lengthVarInt64(self.index_updates_) + n += 1 * len(self.insert_auto_id_key_) + for i in xrange(len(self.insert_auto_id_key_)): n += self.lengthString(self.insert_auto_id_key_[i].ByteSizePartial()) + return n + + def Clear(self): + self.clear_index_updates() + self.clear_insert_auto_id_key() + + def OutputUnchecked(self, out): + out.putVarInt32(8) + out.putVarInt32(self.index_updates_) + for i in xrange(len(self.insert_auto_id_key_)): + out.putVarInt32(18) + out.putVarInt32(self.insert_auto_id_key_[i].ByteSize()) + self.insert_auto_id_key_[i].OutputUnchecked(out) + + def OutputPartial(self, out): + if (self.has_index_updates_): + out.putVarInt32(8) + out.putVarInt32(self.index_updates_) + for i in xrange(len(self.insert_auto_id_key_)): + out.putVarInt32(18) + out.putVarInt32(self.insert_auto_id_key_[i].ByteSizePartial()) + self.insert_auto_id_key_[i].OutputPartial(out) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 8: + self.set_index_updates(d.getVarInt32()) + continue + if tt == 18: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.add_insert_auto_id_key().TryMerge(tmp) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_index_updates_: res+=prefix+("index_updates: %s\n" % self.DebugFormatInt32(self.index_updates_)) + cnt=0 + for e in self.insert_auto_id_key_: + elm="" + if printElemNumber: elm="(%d)" % cnt + res+=prefix+("insert_auto_id_key%s <\n" % elm) + res+=e.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + cnt+=1 + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kindex_updates = 1 + kinsert_auto_id_key = 2 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "index_updates", + 2: "insert_auto_id_key", + }, 2) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.NUMERIC, + 2: ProtocolBuffer.Encoder.STRING, + }, 2, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.MutationResult' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiZhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5NdXRhdGlvblJlc3VsdBMaDWluZGV4X3VwZGF0ZXMgASgAMAU4AhQTGhJpbnNlcnRfYXV0b19pZF9rZXkgAigCMAs4A0obYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5owGqAQVjdHlwZbIBBnByb3RvMqQBFMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class EntityResult(ProtocolBuffer.ProtocolMessage): + + + FULL = 1 + PROJECTION = 2 + KEY_ONLY = 3 + + _ResultType_NAMES = { + 1: "FULL", + 2: "PROJECTION", + 3: "KEY_ONLY", + } + + def ResultType_Name(cls, x): return cls._ResultType_NAMES.get(x, "") + ResultType_Name = classmethod(ResultType_Name) + + has_entity_ = 0 + + def __init__(self, contents=None): + self.entity_ = google.appengine.datastore.entity_v4_pb.Entity() + if contents is not None: self.MergeFromString(contents) + + def entity(self): return self.entity_ + + def mutable_entity(self): self.has_entity_ = 1; return self.entity_ + + def clear_entity(self):self.has_entity_ = 0; self.entity_.Clear() + + def has_entity(self): return self.has_entity_ + + + def MergeFrom(self, x): + assert x is not self + if (x.has_entity()): self.mutable_entity().MergeFrom(x.entity()) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.EntityResult', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.EntityResult') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.EntityResult') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.EntityResult', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.EntityResult', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.EntityResult', s) + + + def Equals(self, x): + if x is self: return 1 + if self.has_entity_ != x.has_entity_: return 0 + if self.has_entity_ and self.entity_ != x.entity_: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + if (not self.has_entity_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: entity not set.') + elif not self.entity_.IsInitialized(debug_strs): initialized = 0 + return initialized + + def ByteSize(self): + n = 0 + n += self.lengthString(self.entity_.ByteSize()) + return n + 1 + + def ByteSizePartial(self): + n = 0 + if (self.has_entity_): + n += 1 + n += self.lengthString(self.entity_.ByteSizePartial()) + return n + + def Clear(self): + self.clear_entity() + + def OutputUnchecked(self, out): + out.putVarInt32(10) + out.putVarInt32(self.entity_.ByteSize()) + self.entity_.OutputUnchecked(out) + + def OutputPartial(self, out): + if (self.has_entity_): + out.putVarInt32(10) + out.putVarInt32(self.entity_.ByteSizePartial()) + self.entity_.OutputPartial(out) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 10: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.mutable_entity().TryMerge(tmp) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_entity_: + res+=prefix+"entity <\n" + res+=self.entity_.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kentity = 1 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "entity", + }, 1) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.STRING, + }, 1, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.EntityResult' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiRhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHlSZXN1bHQTGgZlbnRpdHkgASgCMAs4AkoeYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5owGqAQVjdHlwZbIBBnByb3RvMqQBFHN6ClJlc3VsdFR5cGWLAZIBBEZVTEyYAQGMAYsBkgEKUFJPSkVDVElPTpgBAowBiwGSAQhLRVlfT05MWZgBA4wBdMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class Query(ProtocolBuffer.ProtocolMessage): + has_filter_ = 0 + filter_ = None + has_start_cursor_ = 0 + start_cursor_ = "" + has_end_cursor_ = 0 + end_cursor_ = "" + has_offset_ = 0 + offset_ = 0 + has_limit_ = 0 + limit_ = 0 + + def __init__(self, contents=None): + self.projection_ = [] + self.kind_ = [] + self.order_ = [] + self.group_by_ = [] + self.lazy_init_lock_ = thread.allocate_lock() + if contents is not None: self.MergeFromString(contents) + + def projection_size(self): return len(self.projection_) + def projection_list(self): return self.projection_ + + def projection(self, i): + return self.projection_[i] + + def mutable_projection(self, i): + return self.projection_[i] + + def add_projection(self): + x = PropertyExpression() + self.projection_.append(x) + return x + + def clear_projection(self): + self.projection_ = [] + def kind_size(self): return len(self.kind_) + def kind_list(self): return self.kind_ + + def kind(self, i): + return self.kind_[i] + + def mutable_kind(self, i): + return self.kind_[i] + + def add_kind(self): + x = KindExpression() + self.kind_.append(x) + return x + + def clear_kind(self): + self.kind_ = [] + def filter(self): + if self.filter_ is None: + self.lazy_init_lock_.acquire() + try: + if self.filter_ is None: self.filter_ = Filter() + finally: + self.lazy_init_lock_.release() + return self.filter_ + + def mutable_filter(self): self.has_filter_ = 1; return self.filter() + + def clear_filter(self): + + if self.has_filter_: + self.has_filter_ = 0; + if self.filter_ is not None: self.filter_.Clear() + + def has_filter(self): return self.has_filter_ + + def order_size(self): return len(self.order_) + def order_list(self): return self.order_ + + def order(self, i): + return self.order_[i] + + def mutable_order(self, i): + return self.order_[i] + + def add_order(self): + x = PropertyOrder() + self.order_.append(x) + return x + + def clear_order(self): + self.order_ = [] + def group_by_size(self): return len(self.group_by_) + def group_by_list(self): return self.group_by_ + + def group_by(self, i): + return self.group_by_[i] + + def mutable_group_by(self, i): + return self.group_by_[i] + + def add_group_by(self): + x = PropertyReference() + self.group_by_.append(x) + return x + + def clear_group_by(self): + self.group_by_ = [] + def start_cursor(self): return self.start_cursor_ + + def set_start_cursor(self, x): + self.has_start_cursor_ = 1 + self.start_cursor_ = x + + def clear_start_cursor(self): + if self.has_start_cursor_: + self.has_start_cursor_ = 0 + self.start_cursor_ = "" + + def has_start_cursor(self): return self.has_start_cursor_ + + def end_cursor(self): return self.end_cursor_ + + def set_end_cursor(self, x): + self.has_end_cursor_ = 1 + self.end_cursor_ = x + + def clear_end_cursor(self): + if self.has_end_cursor_: + self.has_end_cursor_ = 0 + self.end_cursor_ = "" + + def has_end_cursor(self): return self.has_end_cursor_ + + def offset(self): return self.offset_ + + def set_offset(self, x): + self.has_offset_ = 1 + self.offset_ = x + + def clear_offset(self): + if self.has_offset_: + self.has_offset_ = 0 + self.offset_ = 0 + + def has_offset(self): return self.has_offset_ + + def limit(self): return self.limit_ + + def set_limit(self, x): + self.has_limit_ = 1 + self.limit_ = x + + def clear_limit(self): + if self.has_limit_: + self.has_limit_ = 0 + self.limit_ = 0 + + def has_limit(self): return self.has_limit_ + + + def MergeFrom(self, x): + assert x is not self + for i in xrange(x.projection_size()): self.add_projection().CopyFrom(x.projection(i)) + for i in xrange(x.kind_size()): self.add_kind().CopyFrom(x.kind(i)) + if (x.has_filter()): self.mutable_filter().MergeFrom(x.filter()) + for i in xrange(x.order_size()): self.add_order().CopyFrom(x.order(i)) + for i in xrange(x.group_by_size()): self.add_group_by().CopyFrom(x.group_by(i)) + if (x.has_start_cursor()): self.set_start_cursor(x.start_cursor()) + if (x.has_end_cursor()): self.set_end_cursor(x.end_cursor()) + if (x.has_offset()): self.set_offset(x.offset()) + if (x.has_limit()): self.set_limit(x.limit()) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.Query', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.Query') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.Query') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.Query', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.Query', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.Query', s) + + + def Equals(self, x): + if x is self: return 1 + if len(self.projection_) != len(x.projection_): return 0 + for e1, e2 in zip(self.projection_, x.projection_): + if e1 != e2: return 0 + if len(self.kind_) != len(x.kind_): return 0 + for e1, e2 in zip(self.kind_, x.kind_): + if e1 != e2: return 0 + if self.has_filter_ != x.has_filter_: return 0 + if self.has_filter_ and self.filter_ != x.filter_: return 0 + if len(self.order_) != len(x.order_): return 0 + for e1, e2 in zip(self.order_, x.order_): + if e1 != e2: return 0 + if len(self.group_by_) != len(x.group_by_): return 0 + for e1, e2 in zip(self.group_by_, x.group_by_): + if e1 != e2: return 0 + if self.has_start_cursor_ != x.has_start_cursor_: return 0 + if self.has_start_cursor_ and self.start_cursor_ != x.start_cursor_: return 0 + if self.has_end_cursor_ != x.has_end_cursor_: return 0 + if self.has_end_cursor_ and self.end_cursor_ != x.end_cursor_: return 0 + if self.has_offset_ != x.has_offset_: return 0 + if self.has_offset_ and self.offset_ != x.offset_: return 0 + if self.has_limit_ != x.has_limit_: return 0 + if self.has_limit_ and self.limit_ != x.limit_: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + for p in self.projection_: + if not p.IsInitialized(debug_strs): initialized=0 + for p in self.kind_: + if not p.IsInitialized(debug_strs): initialized=0 + if (self.has_filter_ and not self.filter_.IsInitialized(debug_strs)): initialized = 0 + for p in self.order_: + if not p.IsInitialized(debug_strs): initialized=0 + for p in self.group_by_: + if not p.IsInitialized(debug_strs): initialized=0 + return initialized + + def ByteSize(self): + n = 0 + n += 1 * len(self.projection_) + for i in xrange(len(self.projection_)): n += self.lengthString(self.projection_[i].ByteSize()) + n += 1 * len(self.kind_) + for i in xrange(len(self.kind_)): n += self.lengthString(self.kind_[i].ByteSize()) + if (self.has_filter_): n += 1 + self.lengthString(self.filter_.ByteSize()) + n += 1 * len(self.order_) + for i in xrange(len(self.order_)): n += self.lengthString(self.order_[i].ByteSize()) + n += 1 * len(self.group_by_) + for i in xrange(len(self.group_by_)): n += self.lengthString(self.group_by_[i].ByteSize()) + if (self.has_start_cursor_): n += 1 + self.lengthString(len(self.start_cursor_)) + if (self.has_end_cursor_): n += 1 + self.lengthString(len(self.end_cursor_)) + if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_) + if (self.has_limit_): n += 1 + self.lengthVarInt64(self.limit_) + return n + + def ByteSizePartial(self): + n = 0 + n += 1 * len(self.projection_) + for i in xrange(len(self.projection_)): n += self.lengthString(self.projection_[i].ByteSizePartial()) + n += 1 * len(self.kind_) + for i in xrange(len(self.kind_)): n += self.lengthString(self.kind_[i].ByteSizePartial()) + if (self.has_filter_): n += 1 + self.lengthString(self.filter_.ByteSizePartial()) + n += 1 * len(self.order_) + for i in xrange(len(self.order_)): n += self.lengthString(self.order_[i].ByteSizePartial()) + n += 1 * len(self.group_by_) + for i in xrange(len(self.group_by_)): n += self.lengthString(self.group_by_[i].ByteSizePartial()) + if (self.has_start_cursor_): n += 1 + self.lengthString(len(self.start_cursor_)) + if (self.has_end_cursor_): n += 1 + self.lengthString(len(self.end_cursor_)) + if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_) + if (self.has_limit_): n += 1 + self.lengthVarInt64(self.limit_) + return n + + def Clear(self): + self.clear_projection() + self.clear_kind() + self.clear_filter() + self.clear_order() + self.clear_group_by() + self.clear_start_cursor() + self.clear_end_cursor() + self.clear_offset() + self.clear_limit() + + def OutputUnchecked(self, out): + for i in xrange(len(self.projection_)): + out.putVarInt32(18) + out.putVarInt32(self.projection_[i].ByteSize()) + self.projection_[i].OutputUnchecked(out) + for i in xrange(len(self.kind_)): + out.putVarInt32(26) + out.putVarInt32(self.kind_[i].ByteSize()) + self.kind_[i].OutputUnchecked(out) + if (self.has_filter_): + out.putVarInt32(34) + out.putVarInt32(self.filter_.ByteSize()) + self.filter_.OutputUnchecked(out) + for i in xrange(len(self.order_)): + out.putVarInt32(42) + out.putVarInt32(self.order_[i].ByteSize()) + self.order_[i].OutputUnchecked(out) + for i in xrange(len(self.group_by_)): + out.putVarInt32(50) + out.putVarInt32(self.group_by_[i].ByteSize()) + self.group_by_[i].OutputUnchecked(out) + if (self.has_start_cursor_): + out.putVarInt32(58) + out.putPrefixedString(self.start_cursor_) + if (self.has_end_cursor_): + out.putVarInt32(66) + out.putPrefixedString(self.end_cursor_) + if (self.has_offset_): + out.putVarInt32(80) + out.putVarInt32(self.offset_) + if (self.has_limit_): + out.putVarInt32(88) + out.putVarInt32(self.limit_) + + def OutputPartial(self, out): + for i in xrange(len(self.projection_)): + out.putVarInt32(18) + out.putVarInt32(self.projection_[i].ByteSizePartial()) + self.projection_[i].OutputPartial(out) + for i in xrange(len(self.kind_)): + out.putVarInt32(26) + out.putVarInt32(self.kind_[i].ByteSizePartial()) + self.kind_[i].OutputPartial(out) + if (self.has_filter_): + out.putVarInt32(34) + out.putVarInt32(self.filter_.ByteSizePartial()) + self.filter_.OutputPartial(out) + for i in xrange(len(self.order_)): + out.putVarInt32(42) + out.putVarInt32(self.order_[i].ByteSizePartial()) + self.order_[i].OutputPartial(out) + for i in xrange(len(self.group_by_)): + out.putVarInt32(50) + out.putVarInt32(self.group_by_[i].ByteSizePartial()) + self.group_by_[i].OutputPartial(out) + if (self.has_start_cursor_): + out.putVarInt32(58) + out.putPrefixedString(self.start_cursor_) + if (self.has_end_cursor_): + out.putVarInt32(66) + out.putPrefixedString(self.end_cursor_) + if (self.has_offset_): + out.putVarInt32(80) + out.putVarInt32(self.offset_) + if (self.has_limit_): + out.putVarInt32(88) + out.putVarInt32(self.limit_) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 18: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.add_projection().TryMerge(tmp) + continue + if tt == 26: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.add_kind().TryMerge(tmp) + continue + if tt == 34: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.mutable_filter().TryMerge(tmp) + continue + if tt == 42: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.add_order().TryMerge(tmp) + continue + if tt == 50: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.add_group_by().TryMerge(tmp) + continue + if tt == 58: + self.set_start_cursor(d.getPrefixedString()) + continue + if tt == 66: + self.set_end_cursor(d.getPrefixedString()) + continue + if tt == 80: + self.set_offset(d.getVarInt32()) + continue + if tt == 88: + self.set_limit(d.getVarInt32()) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + cnt=0 + for e in self.projection_: + elm="" + if printElemNumber: elm="(%d)" % cnt + res+=prefix+("projection%s <\n" % elm) + res+=e.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + cnt+=1 + cnt=0 + for e in self.kind_: + elm="" + if printElemNumber: elm="(%d)" % cnt + res+=prefix+("kind%s <\n" % elm) + res+=e.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + cnt+=1 + if self.has_filter_: + res+=prefix+"filter <\n" + res+=self.filter_.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + cnt=0 + for e in self.order_: + elm="" + if printElemNumber: elm="(%d)" % cnt + res+=prefix+("order%s <\n" % elm) + res+=e.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + cnt+=1 + cnt=0 + for e in self.group_by_: + elm="" + if printElemNumber: elm="(%d)" % cnt + res+=prefix+("group_by%s <\n" % elm) + res+=e.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + cnt+=1 + if self.has_start_cursor_: res+=prefix+("start_cursor: %s\n" % self.DebugFormatString(self.start_cursor_)) + if self.has_end_cursor_: res+=prefix+("end_cursor: %s\n" % self.DebugFormatString(self.end_cursor_)) + if self.has_offset_: res+=prefix+("offset: %s\n" % self.DebugFormatInt32(self.offset_)) + if self.has_limit_: res+=prefix+("limit: %s\n" % self.DebugFormatInt32(self.limit_)) + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kprojection = 2 + kkind = 3 + kfilter = 4 + korder = 5 + kgroup_by = 6 + kstart_cursor = 7 + kend_cursor = 8 + koffset = 10 + klimit = 11 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 2: "projection", + 3: "kind", + 4: "filter", + 5: "order", + 6: "group_by", + 7: "start_cursor", + 8: "end_cursor", + 10: "offset", + 11: "limit", + }, 11) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 2: ProtocolBuffer.Encoder.STRING, + 3: ProtocolBuffer.Encoder.STRING, + 4: ProtocolBuffer.Encoder.STRING, + 5: ProtocolBuffer.Encoder.STRING, + 6: ProtocolBuffer.Encoder.STRING, + 7: ProtocolBuffer.Encoder.STRING, + 8: ProtocolBuffer.Encoder.STRING, + 10: ProtocolBuffer.Encoder.NUMERIC, + 11: ProtocolBuffer.Encoder.NUMERIC, + }, 11, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.Query' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCh1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5RdWVyeRMaCnByb2plY3Rpb24gAigCMAs4A0oqYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlFeHByZXNzaW9uowGqAQVjdHlwZbIBBnByb3RvMqQBFBMaBGtpbmQgAygCMAs4A0omYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2luZEV4cHJlc3Npb26jAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoGZmlsdGVyIAQoAjALOAFKHmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkZpbHRlcqMBqgEFY3R5cGWyAQZwcm90bzKkARQTGgVvcmRlciAFKAIwCzgDSiVhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eU9yZGVyowGqAQVjdHlwZbIBBnByb3RvMqQBFBMaCGdyb3VwX2J5IAYoAjALOANKKWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5UmVmZXJlbmNlowGqAQVjdHlwZbIBBnByb3RvMqQBFBMaDHN0YXJ0X2N1cnNvciAHKAIwCTgBFBMaCmVuZF9jdXJzb3IgCCgCMAk4ARQTGgZvZmZzZXQgCigAMAU4AUIBMKMBqgEHZGVmYXVsdLIBATCkARQTGgVsaW1pdCALKAAwBTgBFMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class KindExpression(ProtocolBuffer.ProtocolMessage): + has_name_ = 0 + name_ = "" + + def __init__(self, contents=None): + if contents is not None: self.MergeFromString(contents) + + def name(self): return self.name_ + + def set_name(self, x): + self.has_name_ = 1 + self.name_ = x + + def clear_name(self): + if self.has_name_: + self.has_name_ = 0 + self.name_ = "" + + def has_name(self): return self.has_name_ + + + def MergeFrom(self, x): + assert x is not self + if (x.has_name()): self.set_name(x.name()) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.KindExpression', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.KindExpression') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.KindExpression') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.KindExpression', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.KindExpression', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.KindExpression', s) + + + def Equals(self, x): + if x is self: return 1 + if self.has_name_ != x.has_name_: return 0 + if self.has_name_ and self.name_ != x.name_: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + if (not self.has_name_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: name not set.') + return initialized + + def ByteSize(self): + n = 0 + n += self.lengthString(len(self.name_)) + return n + 1 + + def ByteSizePartial(self): + n = 0 + if (self.has_name_): + n += 1 + n += self.lengthString(len(self.name_)) + return n + + def Clear(self): + self.clear_name() + + def OutputUnchecked(self, out): + out.putVarInt32(10) + out.putPrefixedString(self.name_) + + def OutputPartial(self, out): + if (self.has_name_): + out.putVarInt32(10) + out.putPrefixedString(self.name_) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 10: + self.set_name(d.getPrefixedString()) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_)) + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kname = 1 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "name", + }, 1) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.STRING, + }, 1, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.KindExpression' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiZhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LaW5kRXhwcmVzc2lvbhMaBG5hbWUgASgCMAk4AhTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg==")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class PropertyReference(ProtocolBuffer.ProtocolMessage): + has_name_ = 0 + name_ = "" + + def __init__(self, contents=None): + if contents is not None: self.MergeFromString(contents) + + def name(self): return self.name_ + + def set_name(self, x): + self.has_name_ = 1 + self.name_ = x + + def clear_name(self): + if self.has_name_: + self.has_name_ = 0 + self.name_ = "" + + def has_name(self): return self.has_name_ + + + def MergeFrom(self, x): + assert x is not self + if (x.has_name()): self.set_name(x.name()) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.PropertyReference', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.PropertyReference') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.PropertyReference') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.PropertyReference', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.PropertyReference', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.PropertyReference', s) + + + def Equals(self, x): + if x is self: return 1 + if self.has_name_ != x.has_name_: return 0 + if self.has_name_ and self.name_ != x.name_: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + if (not self.has_name_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: name not set.') + return initialized + + def ByteSize(self): + n = 0 + n += self.lengthString(len(self.name_)) + return n + 1 + + def ByteSizePartial(self): + n = 0 + if (self.has_name_): + n += 1 + n += self.lengthString(len(self.name_)) + return n + + def Clear(self): + self.clear_name() + + def OutputUnchecked(self, out): + out.putVarInt32(18) + out.putPrefixedString(self.name_) + + def OutputPartial(self, out): + if (self.has_name_): + out.putVarInt32(18) + out.putPrefixedString(self.name_) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 18: + self.set_name(d.getPrefixedString()) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_)) + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kname = 2 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 2: "name", + }, 2) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 2: ProtocolBuffer.Encoder.STRING, + }, 2, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.PropertyReference' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCilhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eVJlZmVyZW5jZRMaBG5hbWUgAigCMAk4AhTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg==")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class PropertyExpression(ProtocolBuffer.ProtocolMessage): + + + FIRST = 1 + + _AggregationFunction_NAMES = { + 1: "FIRST", + } + + def AggregationFunction_Name(cls, x): return cls._AggregationFunction_NAMES.get(x, "") + AggregationFunction_Name = classmethod(AggregationFunction_Name) + + has_property_ = 0 + has_aggregation_function_ = 0 + aggregation_function_ = 0 + + def __init__(self, contents=None): + self.property_ = PropertyReference() + if contents is not None: self.MergeFromString(contents) + + def property(self): return self.property_ + + def mutable_property(self): self.has_property_ = 1; return self.property_ + + def clear_property(self):self.has_property_ = 0; self.property_.Clear() + + def has_property(self): return self.has_property_ + + def aggregation_function(self): return self.aggregation_function_ + + def set_aggregation_function(self, x): + self.has_aggregation_function_ = 1 + self.aggregation_function_ = x + + def clear_aggregation_function(self): + if self.has_aggregation_function_: + self.has_aggregation_function_ = 0 + self.aggregation_function_ = 0 + + def has_aggregation_function(self): return self.has_aggregation_function_ + + + def MergeFrom(self, x): + assert x is not self + if (x.has_property()): self.mutable_property().MergeFrom(x.property()) + if (x.has_aggregation_function()): self.set_aggregation_function(x.aggregation_function()) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.PropertyExpression', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.PropertyExpression') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.PropertyExpression') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.PropertyExpression', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.PropertyExpression', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.PropertyExpression', s) + + + def Equals(self, x): + if x is self: return 1 + if self.has_property_ != x.has_property_: return 0 + if self.has_property_ and self.property_ != x.property_: return 0 + if self.has_aggregation_function_ != x.has_aggregation_function_: return 0 + if self.has_aggregation_function_ and self.aggregation_function_ != x.aggregation_function_: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + if (not self.has_property_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: property not set.') + elif not self.property_.IsInitialized(debug_strs): initialized = 0 + return initialized + + def ByteSize(self): + n = 0 + n += self.lengthString(self.property_.ByteSize()) + if (self.has_aggregation_function_): n += 1 + self.lengthVarInt64(self.aggregation_function_) + return n + 1 + + def ByteSizePartial(self): + n = 0 + if (self.has_property_): + n += 1 + n += self.lengthString(self.property_.ByteSizePartial()) + if (self.has_aggregation_function_): n += 1 + self.lengthVarInt64(self.aggregation_function_) + return n + + def Clear(self): + self.clear_property() + self.clear_aggregation_function() + + def OutputUnchecked(self, out): + out.putVarInt32(10) + out.putVarInt32(self.property_.ByteSize()) + self.property_.OutputUnchecked(out) + if (self.has_aggregation_function_): + out.putVarInt32(16) + out.putVarInt32(self.aggregation_function_) + + def OutputPartial(self, out): + if (self.has_property_): + out.putVarInt32(10) + out.putVarInt32(self.property_.ByteSizePartial()) + self.property_.OutputPartial(out) + if (self.has_aggregation_function_): + out.putVarInt32(16) + out.putVarInt32(self.aggregation_function_) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 10: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.mutable_property().TryMerge(tmp) + continue + if tt == 16: + self.set_aggregation_function(d.getVarInt32()) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_property_: + res+=prefix+"property <\n" + res+=self.property_.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + if self.has_aggregation_function_: res+=prefix+("aggregation_function: %s\n" % self.DebugFormatInt32(self.aggregation_function_)) + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kproperty = 1 + kaggregation_function = 2 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "property", + 2: "aggregation_function", + }, 2) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.STRING, + 2: ProtocolBuffer.Encoder.NUMERIC, + }, 2, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.PropertyExpression' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiphcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eUV4cHJlc3Npb24TGghwcm9wZXJ0eSABKAIwCzgCSilhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eVJlZmVyZW5jZaMBqgEFY3R5cGWyAQZwcm90bzKkARQTGhRhZ2dyZWdhdGlvbl9mdW5jdGlvbiACKAAwBTgBaAAUc3oTQWdncmVnYXRpb25GdW5jdGlvbosBkgEFRklSU1SYAQGMAXTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg==")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class PropertyOrder(ProtocolBuffer.ProtocolMessage): + + + ASCENDING = 1 + DESCENDING = 2 + + _Direction_NAMES = { + 1: "ASCENDING", + 2: "DESCENDING", + } + + def Direction_Name(cls, x): return cls._Direction_NAMES.get(x, "") + Direction_Name = classmethod(Direction_Name) + + has_property_ = 0 + has_direction_ = 0 + direction_ = 1 + + def __init__(self, contents=None): + self.property_ = PropertyReference() + if contents is not None: self.MergeFromString(contents) + + def property(self): return self.property_ + + def mutable_property(self): self.has_property_ = 1; return self.property_ + + def clear_property(self):self.has_property_ = 0; self.property_.Clear() + + def has_property(self): return self.has_property_ + + def direction(self): return self.direction_ + + def set_direction(self, x): + self.has_direction_ = 1 + self.direction_ = x + + def clear_direction(self): + if self.has_direction_: + self.has_direction_ = 0 + self.direction_ = 1 + + def has_direction(self): return self.has_direction_ + + + def MergeFrom(self, x): + assert x is not self + if (x.has_property()): self.mutable_property().MergeFrom(x.property()) + if (x.has_direction()): self.set_direction(x.direction()) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.PropertyOrder', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.PropertyOrder') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.PropertyOrder') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.PropertyOrder', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.PropertyOrder', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.PropertyOrder', s) + + + def Equals(self, x): + if x is self: return 1 + if self.has_property_ != x.has_property_: return 0 + if self.has_property_ and self.property_ != x.property_: return 0 + if self.has_direction_ != x.has_direction_: return 0 + if self.has_direction_ and self.direction_ != x.direction_: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + if (not self.has_property_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: property not set.') + elif not self.property_.IsInitialized(debug_strs): initialized = 0 + return initialized + + def ByteSize(self): + n = 0 + n += self.lengthString(self.property_.ByteSize()) + if (self.has_direction_): n += 1 + self.lengthVarInt64(self.direction_) + return n + 1 + + def ByteSizePartial(self): + n = 0 + if (self.has_property_): + n += 1 + n += self.lengthString(self.property_.ByteSizePartial()) + if (self.has_direction_): n += 1 + self.lengthVarInt64(self.direction_) + return n + + def Clear(self): + self.clear_property() + self.clear_direction() + + def OutputUnchecked(self, out): + out.putVarInt32(10) + out.putVarInt32(self.property_.ByteSize()) + self.property_.OutputUnchecked(out) + if (self.has_direction_): + out.putVarInt32(16) + out.putVarInt32(self.direction_) + + def OutputPartial(self, out): + if (self.has_property_): + out.putVarInt32(10) + out.putVarInt32(self.property_.ByteSizePartial()) + self.property_.OutputPartial(out) + if (self.has_direction_): + out.putVarInt32(16) + out.putVarInt32(self.direction_) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 10: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.mutable_property().TryMerge(tmp) + continue + if tt == 16: + self.set_direction(d.getVarInt32()) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_property_: + res+=prefix+"property <\n" + res+=self.property_.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + if self.has_direction_: res+=prefix+("direction: %s\n" % self.DebugFormatInt32(self.direction_)) + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kproperty = 1 + kdirection = 2 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "property", + 2: "direction", + }, 2) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.STRING, + 2: ProtocolBuffer.Encoder.NUMERIC, + }, 2, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.PropertyOrder' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiVhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eU9yZGVyExoIcHJvcGVydHkgASgCMAs4AkopYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlSZWZlcmVuY2WjAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoJZGlyZWN0aW9uIAIoADAFOAFCATFoAKMBqgEHZGVmYXVsdLIBCUFTQ0VORElOR6QBFHN6CURpcmVjdGlvbosBkgEJQVNDRU5ESU5HmAEBjAGLAZIBCkRFU0NFTkRJTkeYAQKMAXTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg==")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class Filter(ProtocolBuffer.ProtocolMessage): + has_composite_filter_ = 0 + composite_filter_ = None + has_property_filter_ = 0 + property_filter_ = None + + def __init__(self, contents=None): + self.lazy_init_lock_ = thread.allocate_lock() + if contents is not None: self.MergeFromString(contents) + + def composite_filter(self): + if self.composite_filter_ is None: + self.lazy_init_lock_.acquire() + try: + if self.composite_filter_ is None: self.composite_filter_ = CompositeFilter() + finally: + self.lazy_init_lock_.release() + return self.composite_filter_ + + def mutable_composite_filter(self): self.has_composite_filter_ = 1; return self.composite_filter() + + def clear_composite_filter(self): + + if self.has_composite_filter_: + self.has_composite_filter_ = 0; + if self.composite_filter_ is not None: self.composite_filter_.Clear() + + def has_composite_filter(self): return self.has_composite_filter_ + + def property_filter(self): + if self.property_filter_ is None: + self.lazy_init_lock_.acquire() + try: + if self.property_filter_ is None: self.property_filter_ = PropertyFilter() + finally: + self.lazy_init_lock_.release() + return self.property_filter_ + + def mutable_property_filter(self): self.has_property_filter_ = 1; return self.property_filter() + + def clear_property_filter(self): + + if self.has_property_filter_: + self.has_property_filter_ = 0; + if self.property_filter_ is not None: self.property_filter_.Clear() + + def has_property_filter(self): return self.has_property_filter_ + + + def MergeFrom(self, x): + assert x is not self + if (x.has_composite_filter()): self.mutable_composite_filter().MergeFrom(x.composite_filter()) + if (x.has_property_filter()): self.mutable_property_filter().MergeFrom(x.property_filter()) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.Filter', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.Filter') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.Filter') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.Filter', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.Filter', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.Filter', s) + + + def Equals(self, x): + if x is self: return 1 + if self.has_composite_filter_ != x.has_composite_filter_: return 0 + if self.has_composite_filter_ and self.composite_filter_ != x.composite_filter_: return 0 + if self.has_property_filter_ != x.has_property_filter_: return 0 + if self.has_property_filter_ and self.property_filter_ != x.property_filter_: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + if (self.has_composite_filter_ and not self.composite_filter_.IsInitialized(debug_strs)): initialized = 0 + if (self.has_property_filter_ and not self.property_filter_.IsInitialized(debug_strs)): initialized = 0 + return initialized + + def ByteSize(self): + n = 0 + if (self.has_composite_filter_): n += 1 + self.lengthString(self.composite_filter_.ByteSize()) + if (self.has_property_filter_): n += 1 + self.lengthString(self.property_filter_.ByteSize()) + return n + + def ByteSizePartial(self): + n = 0 + if (self.has_composite_filter_): n += 1 + self.lengthString(self.composite_filter_.ByteSizePartial()) + if (self.has_property_filter_): n += 1 + self.lengthString(self.property_filter_.ByteSizePartial()) + return n + + def Clear(self): + self.clear_composite_filter() + self.clear_property_filter() + + def OutputUnchecked(self, out): + if (self.has_composite_filter_): + out.putVarInt32(10) + out.putVarInt32(self.composite_filter_.ByteSize()) + self.composite_filter_.OutputUnchecked(out) + if (self.has_property_filter_): + out.putVarInt32(18) + out.putVarInt32(self.property_filter_.ByteSize()) + self.property_filter_.OutputUnchecked(out) + + def OutputPartial(self, out): + if (self.has_composite_filter_): + out.putVarInt32(10) + out.putVarInt32(self.composite_filter_.ByteSizePartial()) + self.composite_filter_.OutputPartial(out) + if (self.has_property_filter_): + out.putVarInt32(18) + out.putVarInt32(self.property_filter_.ByteSizePartial()) + self.property_filter_.OutputPartial(out) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 10: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.mutable_composite_filter().TryMerge(tmp) + continue + if tt == 18: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.mutable_property_filter().TryMerge(tmp) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_composite_filter_: + res+=prefix+"composite_filter <\n" + res+=self.composite_filter_.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + if self.has_property_filter_: + res+=prefix+"property_filter <\n" + res+=self.property_filter_.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kcomposite_filter = 1 + kproperty_filter = 2 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "composite_filter", + 2: "property_filter", + }, 2) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.STRING, + 2: ProtocolBuffer.Encoder.STRING, + }, 2, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.Filter' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCh5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5GaWx0ZXITGhBjb21wb3NpdGVfZmlsdGVyIAEoAjALOAFKJ2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbXBvc2l0ZUZpbHRlcqMBqgEFY3R5cGWyAQZwcm90bzKkARQTGg9wcm9wZXJ0eV9maWx0ZXIgAigCMAs4AUomYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlGaWx0ZXKjAaoBBWN0eXBlsgEGcHJvdG8ypAEUwgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I=")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class CompositeFilter(ProtocolBuffer.ProtocolMessage): + + + AND = 1 + + _Operator_NAMES = { + 1: "AND", + } + + def Operator_Name(cls, x): return cls._Operator_NAMES.get(x, "") + Operator_Name = classmethod(Operator_Name) + + has_operator_ = 0 + operator_ = 0 + + def __init__(self, contents=None): + self.filter_ = [] + if contents is not None: self.MergeFromString(contents) + + def operator(self): return self.operator_ + + def set_operator(self, x): + self.has_operator_ = 1 + self.operator_ = x + + def clear_operator(self): + if self.has_operator_: + self.has_operator_ = 0 + self.operator_ = 0 + + def has_operator(self): return self.has_operator_ + + def filter_size(self): return len(self.filter_) + def filter_list(self): return self.filter_ + + def filter(self, i): + return self.filter_[i] + + def mutable_filter(self, i): + return self.filter_[i] + + def add_filter(self): + x = Filter() + self.filter_.append(x) + return x + + def clear_filter(self): + self.filter_ = [] + + def MergeFrom(self, x): + assert x is not self + if (x.has_operator()): self.set_operator(x.operator()) + for i in xrange(x.filter_size()): self.add_filter().CopyFrom(x.filter(i)) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.CompositeFilter', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.CompositeFilter') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.CompositeFilter') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.CompositeFilter', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.CompositeFilter', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.CompositeFilter', s) + + + def Equals(self, x): + if x is self: return 1 + if self.has_operator_ != x.has_operator_: return 0 + if self.has_operator_ and self.operator_ != x.operator_: return 0 + if len(self.filter_) != len(x.filter_): return 0 + for e1, e2 in zip(self.filter_, x.filter_): + if e1 != e2: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + if (not self.has_operator_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: operator not set.') + for p in self.filter_: + if not p.IsInitialized(debug_strs): initialized=0 + return initialized + + def ByteSize(self): + n = 0 + n += self.lengthVarInt64(self.operator_) + n += 1 * len(self.filter_) + for i in xrange(len(self.filter_)): n += self.lengthString(self.filter_[i].ByteSize()) + return n + 1 + + def ByteSizePartial(self): + n = 0 + if (self.has_operator_): + n += 1 + n += self.lengthVarInt64(self.operator_) + n += 1 * len(self.filter_) + for i in xrange(len(self.filter_)): n += self.lengthString(self.filter_[i].ByteSizePartial()) + return n + + def Clear(self): + self.clear_operator() + self.clear_filter() + + def OutputUnchecked(self, out): + out.putVarInt32(8) + out.putVarInt32(self.operator_) + for i in xrange(len(self.filter_)): + out.putVarInt32(18) + out.putVarInt32(self.filter_[i].ByteSize()) + self.filter_[i].OutputUnchecked(out) + + def OutputPartial(self, out): + if (self.has_operator_): + out.putVarInt32(8) + out.putVarInt32(self.operator_) + for i in xrange(len(self.filter_)): + out.putVarInt32(18) + out.putVarInt32(self.filter_[i].ByteSizePartial()) + self.filter_[i].OutputPartial(out) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 8: + self.set_operator(d.getVarInt32()) + continue + if tt == 18: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.add_filter().TryMerge(tmp) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_operator_: res+=prefix+("operator: %s\n" % self.DebugFormatInt32(self.operator_)) + cnt=0 + for e in self.filter_: + elm="" + if printElemNumber: elm="(%d)" % cnt + res+=prefix+("filter%s <\n" % elm) + res+=e.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + cnt+=1 + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + koperator = 1 + kfilter = 2 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "operator", + 2: "filter", + }, 2) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.NUMERIC, + 2: ProtocolBuffer.Encoder.STRING, + }, 2, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.CompositeFilter' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCidhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Db21wb3NpdGVGaWx0ZXITGghvcGVyYXRvciABKAAwBTgCaAAUExoGZmlsdGVyIAIoAjALOANKHmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkZpbHRlcqMBqgEFY3R5cGWyAQZwcm90bzKkARRzeghPcGVyYXRvcosBkgEDQU5EmAEBjAF0wgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I=")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class PropertyFilter(ProtocolBuffer.ProtocolMessage): + + + LESS_THAN = 1 + LESS_THAN_OR_EQUAL = 2 + GREATER_THAN = 3 + GREATER_THAN_OR_EQUAL = 4 + EQUAL = 5 + HAS_ANCESTOR = 11 + + _Operator_NAMES = { + 1: "LESS_THAN", + 2: "LESS_THAN_OR_EQUAL", + 3: "GREATER_THAN", + 4: "GREATER_THAN_OR_EQUAL", + 5: "EQUAL", + 11: "HAS_ANCESTOR", + } + + def Operator_Name(cls, x): return cls._Operator_NAMES.get(x, "") + Operator_Name = classmethod(Operator_Name) + + has_property_ = 0 + has_operator_ = 0 + operator_ = 0 + has_value_ = 0 + + def __init__(self, contents=None): + self.property_ = PropertyReference() + self.value_ = google.appengine.datastore.entity_v4_pb.Value() + if contents is not None: self.MergeFromString(contents) + + def property(self): return self.property_ + + def mutable_property(self): self.has_property_ = 1; return self.property_ + + def clear_property(self):self.has_property_ = 0; self.property_.Clear() + + def has_property(self): return self.has_property_ + + def operator(self): return self.operator_ + + def set_operator(self, x): + self.has_operator_ = 1 + self.operator_ = x + + def clear_operator(self): + if self.has_operator_: + self.has_operator_ = 0 + self.operator_ = 0 + + def has_operator(self): return self.has_operator_ + + def value(self): return self.value_ + + def mutable_value(self): self.has_value_ = 1; return self.value_ + + def clear_value(self):self.has_value_ = 0; self.value_.Clear() + + def has_value(self): return self.has_value_ + + + def MergeFrom(self, x): + assert x is not self + if (x.has_property()): self.mutable_property().MergeFrom(x.property()) + if (x.has_operator()): self.set_operator(x.operator()) + if (x.has_value()): self.mutable_value().MergeFrom(x.value()) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.PropertyFilter', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.PropertyFilter') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.PropertyFilter') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.PropertyFilter', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.PropertyFilter', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.PropertyFilter', s) + + + def Equals(self, x): + if x is self: return 1 + if self.has_property_ != x.has_property_: return 0 + if self.has_property_ and self.property_ != x.property_: return 0 + if self.has_operator_ != x.has_operator_: return 0 + if self.has_operator_ and self.operator_ != x.operator_: return 0 + if self.has_value_ != x.has_value_: return 0 + if self.has_value_ and self.value_ != x.value_: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + if (not self.has_property_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: property not set.') + elif not self.property_.IsInitialized(debug_strs): initialized = 0 + if (not self.has_operator_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: operator not set.') + if (not self.has_value_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: value not set.') + elif not self.value_.IsInitialized(debug_strs): initialized = 0 + return initialized + + def ByteSize(self): + n = 0 + n += self.lengthString(self.property_.ByteSize()) + n += self.lengthVarInt64(self.operator_) + n += self.lengthString(self.value_.ByteSize()) + return n + 3 + + def ByteSizePartial(self): + n = 0 + if (self.has_property_): + n += 1 + n += self.lengthString(self.property_.ByteSizePartial()) + if (self.has_operator_): + n += 1 + n += self.lengthVarInt64(self.operator_) + if (self.has_value_): + n += 1 + n += self.lengthString(self.value_.ByteSizePartial()) + return n + + def Clear(self): + self.clear_property() + self.clear_operator() + self.clear_value() + + def OutputUnchecked(self, out): + out.putVarInt32(10) + out.putVarInt32(self.property_.ByteSize()) + self.property_.OutputUnchecked(out) + out.putVarInt32(16) + out.putVarInt32(self.operator_) + out.putVarInt32(26) + out.putVarInt32(self.value_.ByteSize()) + self.value_.OutputUnchecked(out) + + def OutputPartial(self, out): + if (self.has_property_): + out.putVarInt32(10) + out.putVarInt32(self.property_.ByteSizePartial()) + self.property_.OutputPartial(out) + if (self.has_operator_): + out.putVarInt32(16) + out.putVarInt32(self.operator_) + if (self.has_value_): + out.putVarInt32(26) + out.putVarInt32(self.value_.ByteSizePartial()) + self.value_.OutputPartial(out) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 10: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.mutable_property().TryMerge(tmp) + continue + if tt == 16: + self.set_operator(d.getVarInt32()) + continue + if tt == 26: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.mutable_value().TryMerge(tmp) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_property_: + res+=prefix+"property <\n" + res+=self.property_.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + if self.has_operator_: res+=prefix+("operator: %s\n" % self.DebugFormatInt32(self.operator_)) + if self.has_value_: + res+=prefix+"value <\n" + res+=self.value_.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kproperty = 1 + koperator = 2 + kvalue = 3 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "property", + 2: "operator", + 3: "value", + }, 3) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.STRING, + 2: ProtocolBuffer.Encoder.NUMERIC, + 3: ProtocolBuffer.Encoder.STRING, + }, 3, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.PropertyFilter' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiZhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eUZpbHRlchMaCHByb3BlcnR5IAEoAjALOAJKKWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5UmVmZXJlbmNlowGqAQVjdHlwZbIBBnByb3RvMqQBFBMaCG9wZXJhdG9yIAIoADAFOAJoABQTGgV2YWx1ZSADKAIwCzgCSh1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5WYWx1ZaMBqgEFY3R5cGWyAQZwcm90bzKkARRzeghPcGVyYXRvcosBkgEJTEVTU19USEFOmAEBjAGLAZIBEkxFU1NfVEhBTl9PUl9FUVVBTJgBAowBiwGSAQxHUkVBVEVSX1RIQU6YAQOMAYsBkgEVR1JFQVRFUl9USEFOX09SX0VRVUFMmAEEjAGLAZIBBUVRVUFMmAEFjAGLAZIBDEhBU19BTkNFU1RPUpgBC4wBdMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class GqlQuery(ProtocolBuffer.ProtocolMessage): + has_query_string_ = 0 + query_string_ = "" + has_allow_literal_ = 0 + allow_literal_ = 0 + + def __init__(self, contents=None): + self.name_arg_ = [] + self.number_arg_ = [] + if contents is not None: self.MergeFromString(contents) + + def query_string(self): return self.query_string_ + + def set_query_string(self, x): + self.has_query_string_ = 1 + self.query_string_ = x + + def clear_query_string(self): + if self.has_query_string_: + self.has_query_string_ = 0 + self.query_string_ = "" + + def has_query_string(self): return self.has_query_string_ + + def allow_literal(self): return self.allow_literal_ + + def set_allow_literal(self, x): + self.has_allow_literal_ = 1 + self.allow_literal_ = x + + def clear_allow_literal(self): + if self.has_allow_literal_: + self.has_allow_literal_ = 0 + self.allow_literal_ = 0 + + def has_allow_literal(self): return self.has_allow_literal_ + + def name_arg_size(self): return len(self.name_arg_) + def name_arg_list(self): return self.name_arg_ + + def name_arg(self, i): + return self.name_arg_[i] + + def mutable_name_arg(self, i): + return self.name_arg_[i] + + def add_name_arg(self): + x = GqlQueryArg() + self.name_arg_.append(x) + return x + + def clear_name_arg(self): + self.name_arg_ = [] + def number_arg_size(self): return len(self.number_arg_) + def number_arg_list(self): return self.number_arg_ + + def number_arg(self, i): + return self.number_arg_[i] + + def mutable_number_arg(self, i): + return self.number_arg_[i] + + def add_number_arg(self): + x = GqlQueryArg() + self.number_arg_.append(x) + return x + + def clear_number_arg(self): + self.number_arg_ = [] + + def MergeFrom(self, x): + assert x is not self + if (x.has_query_string()): self.set_query_string(x.query_string()) + if (x.has_allow_literal()): self.set_allow_literal(x.allow_literal()) + for i in xrange(x.name_arg_size()): self.add_name_arg().CopyFrom(x.name_arg(i)) + for i in xrange(x.number_arg_size()): self.add_number_arg().CopyFrom(x.number_arg(i)) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.GqlQuery', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.GqlQuery') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.GqlQuery') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.GqlQuery', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.GqlQuery', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.GqlQuery', s) + + + def Equals(self, x): + if x is self: return 1 + if self.has_query_string_ != x.has_query_string_: return 0 + if self.has_query_string_ and self.query_string_ != x.query_string_: return 0 + if self.has_allow_literal_ != x.has_allow_literal_: return 0 + if self.has_allow_literal_ and self.allow_literal_ != x.allow_literal_: return 0 + if len(self.name_arg_) != len(x.name_arg_): return 0 + for e1, e2 in zip(self.name_arg_, x.name_arg_): + if e1 != e2: return 0 + if len(self.number_arg_) != len(x.number_arg_): return 0 + for e1, e2 in zip(self.number_arg_, x.number_arg_): + if e1 != e2: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + if (not self.has_query_string_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: query_string not set.') + for p in self.name_arg_: + if not p.IsInitialized(debug_strs): initialized=0 + for p in self.number_arg_: + if not p.IsInitialized(debug_strs): initialized=0 + return initialized + + def ByteSize(self): + n = 0 + n += self.lengthString(len(self.query_string_)) + if (self.has_allow_literal_): n += 2 + n += 1 * len(self.name_arg_) + for i in xrange(len(self.name_arg_)): n += self.lengthString(self.name_arg_[i].ByteSize()) + n += 1 * len(self.number_arg_) + for i in xrange(len(self.number_arg_)): n += self.lengthString(self.number_arg_[i].ByteSize()) + return n + 1 + + def ByteSizePartial(self): + n = 0 + if (self.has_query_string_): + n += 1 + n += self.lengthString(len(self.query_string_)) + if (self.has_allow_literal_): n += 2 + n += 1 * len(self.name_arg_) + for i in xrange(len(self.name_arg_)): n += self.lengthString(self.name_arg_[i].ByteSizePartial()) + n += 1 * len(self.number_arg_) + for i in xrange(len(self.number_arg_)): n += self.lengthString(self.number_arg_[i].ByteSizePartial()) + return n + + def Clear(self): + self.clear_query_string() + self.clear_allow_literal() + self.clear_name_arg() + self.clear_number_arg() + + def OutputUnchecked(self, out): + out.putVarInt32(10) + out.putPrefixedString(self.query_string_) + if (self.has_allow_literal_): + out.putVarInt32(16) + out.putBoolean(self.allow_literal_) + for i in xrange(len(self.name_arg_)): + out.putVarInt32(26) + out.putVarInt32(self.name_arg_[i].ByteSize()) + self.name_arg_[i].OutputUnchecked(out) + for i in xrange(len(self.number_arg_)): + out.putVarInt32(34) + out.putVarInt32(self.number_arg_[i].ByteSize()) + self.number_arg_[i].OutputUnchecked(out) + + def OutputPartial(self, out): + if (self.has_query_string_): + out.putVarInt32(10) + out.putPrefixedString(self.query_string_) + if (self.has_allow_literal_): + out.putVarInt32(16) + out.putBoolean(self.allow_literal_) + for i in xrange(len(self.name_arg_)): + out.putVarInt32(26) + out.putVarInt32(self.name_arg_[i].ByteSizePartial()) + self.name_arg_[i].OutputPartial(out) + for i in xrange(len(self.number_arg_)): + out.putVarInt32(34) + out.putVarInt32(self.number_arg_[i].ByteSizePartial()) + self.number_arg_[i].OutputPartial(out) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 10: + self.set_query_string(d.getPrefixedString()) + continue + if tt == 16: + self.set_allow_literal(d.getBoolean()) + continue + if tt == 26: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.add_name_arg().TryMerge(tmp) + continue + if tt == 34: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.add_number_arg().TryMerge(tmp) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_query_string_: res+=prefix+("query_string: %s\n" % self.DebugFormatString(self.query_string_)) + if self.has_allow_literal_: res+=prefix+("allow_literal: %s\n" % self.DebugFormatBool(self.allow_literal_)) + cnt=0 + for e in self.name_arg_: + elm="" + if printElemNumber: elm="(%d)" % cnt + res+=prefix+("name_arg%s <\n" % elm) + res+=e.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + cnt+=1 + cnt=0 + for e in self.number_arg_: + elm="" + if printElemNumber: elm="(%d)" % cnt + res+=prefix+("number_arg%s <\n" % elm) + res+=e.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + cnt+=1 + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kquery_string = 1 + kallow_literal = 2 + kname_arg = 3 + knumber_arg = 4 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "query_string", + 2: "allow_literal", + 3: "name_arg", + 4: "number_arg", + }, 4) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.STRING, + 2: ProtocolBuffer.Encoder.NUMERIC, + 3: ProtocolBuffer.Encoder.STRING, + 4: ProtocolBuffer.Encoder.STRING, + }, 4, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.GqlQuery' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiBhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5HcWxRdWVyeRMaDHF1ZXJ5X3N0cmluZyABKAIwCTgCFBMaDWFsbG93X2xpdGVyYWwgAigAMAg4AUIFZmFsc2WjAaoBB2RlZmF1bHSyAQVmYWxzZaQBFBMaCG5hbWVfYXJnIAMoAjALOANKI2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkdxbFF1ZXJ5QXJnowGqAQVjdHlwZbIBBnByb3RvMqQBFBMaCm51bWJlcl9hcmcgBCgCMAs4A0ojYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuR3FsUXVlcnlBcmejAaoBBWN0eXBlsgEGcHJvdG8ypAEUwgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I=")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class GqlQueryArg(ProtocolBuffer.ProtocolMessage): + has_name_ = 0 + name_ = "" + has_value_ = 0 + + def __init__(self, contents=None): + self.value_ = google.appengine.datastore.entity_v4_pb.Value() + if contents is not None: self.MergeFromString(contents) + + def name(self): return self.name_ + + def set_name(self, x): + self.has_name_ = 1 + self.name_ = x + + def clear_name(self): + if self.has_name_: + self.has_name_ = 0 + self.name_ = "" + + def has_name(self): return self.has_name_ + + def value(self): return self.value_ + + def mutable_value(self): self.has_value_ = 1; return self.value_ + + def clear_value(self):self.has_value_ = 0; self.value_.Clear() + + def has_value(self): return self.has_value_ + + + def MergeFrom(self, x): + assert x is not self + if (x.has_name()): self.set_name(x.name()) + if (x.has_value()): self.mutable_value().MergeFrom(x.value()) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.GqlQueryArg', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.GqlQueryArg') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.GqlQueryArg') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.GqlQueryArg', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.GqlQueryArg', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.GqlQueryArg', s) + + + def Equals(self, x): + if x is self: return 1 + if self.has_name_ != x.has_name_: return 0 + if self.has_name_ and self.name_ != x.name_: return 0 + if self.has_value_ != x.has_value_: return 0 + if self.has_value_ and self.value_ != x.value_: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + if (not self.has_value_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: value not set.') + elif not self.value_.IsInitialized(debug_strs): initialized = 0 + return initialized + + def ByteSize(self): + n = 0 + if (self.has_name_): n += 1 + self.lengthString(len(self.name_)) + n += self.lengthString(self.value_.ByteSize()) + return n + 1 + + def ByteSizePartial(self): + n = 0 + if (self.has_name_): n += 1 + self.lengthString(len(self.name_)) + if (self.has_value_): + n += 1 + n += self.lengthString(self.value_.ByteSizePartial()) + return n + + def Clear(self): + self.clear_name() + self.clear_value() + + def OutputUnchecked(self, out): + if (self.has_name_): + out.putVarInt32(10) + out.putPrefixedString(self.name_) + out.putVarInt32(18) + out.putVarInt32(self.value_.ByteSize()) + self.value_.OutputUnchecked(out) + + def OutputPartial(self, out): + if (self.has_name_): + out.putVarInt32(10) + out.putPrefixedString(self.name_) + if (self.has_value_): + out.putVarInt32(18) + out.putVarInt32(self.value_.ByteSizePartial()) + self.value_.OutputPartial(out) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 10: + self.set_name(d.getPrefixedString()) + continue + if tt == 18: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.mutable_value().TryMerge(tmp) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_)) + if self.has_value_: + res+=prefix+"value <\n" + res+=self.value_.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kname = 1 + kvalue = 2 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "name", + 2: "value", + }, 2) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.STRING, + 2: ProtocolBuffer.Encoder.STRING, + }, 2, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.GqlQueryArg' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiNhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5HcWxRdWVyeUFyZxMaBG5hbWUgASgCMAk4ARQTGgV2YWx1ZSACKAIwCzgCSh1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5WYWx1ZaMBqgEFY3R5cGWyAQZwcm90bzKkARTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg==")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class QueryResultBatch(ProtocolBuffer.ProtocolMessage): + + + NOT_FINISHED = 1 + MORE_RESULTS_AFTER_LIMIT = 2 + NO_MORE_RESULTS = 3 + + _MoreResultsType_NAMES = { + 1: "NOT_FINISHED", + 2: "MORE_RESULTS_AFTER_LIMIT", + 3: "NO_MORE_RESULTS", + } + + def MoreResultsType_Name(cls, x): return cls._MoreResultsType_NAMES.get(x, "") + MoreResultsType_Name = classmethod(MoreResultsType_Name) + + has_entity_result_type_ = 0 + entity_result_type_ = 0 + has_end_cursor_ = 0 + end_cursor_ = "" + has_more_results_ = 0 + more_results_ = 0 + has_skipped_results_ = 0 + skipped_results_ = 0 + + def __init__(self, contents=None): + self.entity_result_ = [] + if contents is not None: self.MergeFromString(contents) + + def entity_result_type(self): return self.entity_result_type_ + + def set_entity_result_type(self, x): + self.has_entity_result_type_ = 1 + self.entity_result_type_ = x + + def clear_entity_result_type(self): + if self.has_entity_result_type_: + self.has_entity_result_type_ = 0 + self.entity_result_type_ = 0 + + def has_entity_result_type(self): return self.has_entity_result_type_ + + def entity_result_size(self): return len(self.entity_result_) + def entity_result_list(self): return self.entity_result_ + + def entity_result(self, i): + return self.entity_result_[i] + + def mutable_entity_result(self, i): + return self.entity_result_[i] + + def add_entity_result(self): + x = EntityResult() + self.entity_result_.append(x) + return x + + def clear_entity_result(self): + self.entity_result_ = [] + def end_cursor(self): return self.end_cursor_ + + def set_end_cursor(self, x): + self.has_end_cursor_ = 1 + self.end_cursor_ = x + + def clear_end_cursor(self): + if self.has_end_cursor_: + self.has_end_cursor_ = 0 + self.end_cursor_ = "" + + def has_end_cursor(self): return self.has_end_cursor_ + + def more_results(self): return self.more_results_ + + def set_more_results(self, x): + self.has_more_results_ = 1 + self.more_results_ = x + + def clear_more_results(self): + if self.has_more_results_: + self.has_more_results_ = 0 + self.more_results_ = 0 + + def has_more_results(self): return self.has_more_results_ + + def skipped_results(self): return self.skipped_results_ + + def set_skipped_results(self, x): + self.has_skipped_results_ = 1 + self.skipped_results_ = x + + def clear_skipped_results(self): + if self.has_skipped_results_: + self.has_skipped_results_ = 0 + self.skipped_results_ = 0 + + def has_skipped_results(self): return self.has_skipped_results_ + + + def MergeFrom(self, x): + assert x is not self + if (x.has_entity_result_type()): self.set_entity_result_type(x.entity_result_type()) + for i in xrange(x.entity_result_size()): self.add_entity_result().CopyFrom(x.entity_result(i)) + if (x.has_end_cursor()): self.set_end_cursor(x.end_cursor()) + if (x.has_more_results()): self.set_more_results(x.more_results()) + if (x.has_skipped_results()): self.set_skipped_results(x.skipped_results()) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.QueryResultBatch', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.QueryResultBatch') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.QueryResultBatch') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.QueryResultBatch', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.QueryResultBatch', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.QueryResultBatch', s) + + + def Equals(self, x): + if x is self: return 1 + if self.has_entity_result_type_ != x.has_entity_result_type_: return 0 + if self.has_entity_result_type_ and self.entity_result_type_ != x.entity_result_type_: return 0 + if len(self.entity_result_) != len(x.entity_result_): return 0 + for e1, e2 in zip(self.entity_result_, x.entity_result_): + if e1 != e2: return 0 + if self.has_end_cursor_ != x.has_end_cursor_: return 0 + if self.has_end_cursor_ and self.end_cursor_ != x.end_cursor_: return 0 + if self.has_more_results_ != x.has_more_results_: return 0 + if self.has_more_results_ and self.more_results_ != x.more_results_: return 0 + if self.has_skipped_results_ != x.has_skipped_results_: return 0 + if self.has_skipped_results_ and self.skipped_results_ != x.skipped_results_: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + if (not self.has_entity_result_type_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: entity_result_type not set.') + for p in self.entity_result_: + if not p.IsInitialized(debug_strs): initialized=0 + if (not self.has_more_results_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: more_results not set.') + return initialized + + def ByteSize(self): + n = 0 + n += self.lengthVarInt64(self.entity_result_type_) + n += 1 * len(self.entity_result_) + for i in xrange(len(self.entity_result_)): n += self.lengthString(self.entity_result_[i].ByteSize()) + if (self.has_end_cursor_): n += 1 + self.lengthString(len(self.end_cursor_)) + n += self.lengthVarInt64(self.more_results_) + if (self.has_skipped_results_): n += 1 + self.lengthVarInt64(self.skipped_results_) + return n + 2 + + def ByteSizePartial(self): + n = 0 + if (self.has_entity_result_type_): + n += 1 + n += self.lengthVarInt64(self.entity_result_type_) + n += 1 * len(self.entity_result_) + for i in xrange(len(self.entity_result_)): n += self.lengthString(self.entity_result_[i].ByteSizePartial()) + if (self.has_end_cursor_): n += 1 + self.lengthString(len(self.end_cursor_)) + if (self.has_more_results_): + n += 1 + n += self.lengthVarInt64(self.more_results_) + if (self.has_skipped_results_): n += 1 + self.lengthVarInt64(self.skipped_results_) + return n + + def Clear(self): + self.clear_entity_result_type() + self.clear_entity_result() + self.clear_end_cursor() + self.clear_more_results() + self.clear_skipped_results() + + def OutputUnchecked(self, out): + out.putVarInt32(8) + out.putVarInt32(self.entity_result_type_) + for i in xrange(len(self.entity_result_)): + out.putVarInt32(18) + out.putVarInt32(self.entity_result_[i].ByteSize()) + self.entity_result_[i].OutputUnchecked(out) + if (self.has_end_cursor_): + out.putVarInt32(34) + out.putPrefixedString(self.end_cursor_) + out.putVarInt32(40) + out.putVarInt32(self.more_results_) + if (self.has_skipped_results_): + out.putVarInt32(48) + out.putVarInt32(self.skipped_results_) + + def OutputPartial(self, out): + if (self.has_entity_result_type_): + out.putVarInt32(8) + out.putVarInt32(self.entity_result_type_) + for i in xrange(len(self.entity_result_)): + out.putVarInt32(18) + out.putVarInt32(self.entity_result_[i].ByteSizePartial()) + self.entity_result_[i].OutputPartial(out) + if (self.has_end_cursor_): + out.putVarInt32(34) + out.putPrefixedString(self.end_cursor_) + if (self.has_more_results_): + out.putVarInt32(40) + out.putVarInt32(self.more_results_) + if (self.has_skipped_results_): + out.putVarInt32(48) + out.putVarInt32(self.skipped_results_) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 8: + self.set_entity_result_type(d.getVarInt32()) + continue + if tt == 18: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.add_entity_result().TryMerge(tmp) + continue + if tt == 34: + self.set_end_cursor(d.getPrefixedString()) + continue + if tt == 40: + self.set_more_results(d.getVarInt32()) + continue + if tt == 48: + self.set_skipped_results(d.getVarInt32()) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_entity_result_type_: res+=prefix+("entity_result_type: %s\n" % self.DebugFormatInt32(self.entity_result_type_)) + cnt=0 + for e in self.entity_result_: + elm="" + if printElemNumber: elm="(%d)" % cnt + res+=prefix+("entity_result%s <\n" % elm) + res+=e.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + cnt+=1 + if self.has_end_cursor_: res+=prefix+("end_cursor: %s\n" % self.DebugFormatString(self.end_cursor_)) + if self.has_more_results_: res+=prefix+("more_results: %s\n" % self.DebugFormatInt32(self.more_results_)) + if self.has_skipped_results_: res+=prefix+("skipped_results: %s\n" % self.DebugFormatInt32(self.skipped_results_)) + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kentity_result_type = 1 + kentity_result = 2 + kend_cursor = 4 + kmore_results = 5 + kskipped_results = 6 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "entity_result_type", + 2: "entity_result", + 4: "end_cursor", + 5: "more_results", + 6: "skipped_results", + }, 6) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.NUMERIC, + 2: ProtocolBuffer.Encoder.STRING, + 4: ProtocolBuffer.Encoder.STRING, + 5: ProtocolBuffer.Encoder.NUMERIC, + 6: ProtocolBuffer.Encoder.NUMERIC, + }, 6, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.QueryResultBatch' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCihhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5RdWVyeVJlc3VsdEJhdGNoExoSZW50aXR5X3Jlc3VsdF90eXBlIAEoADAFOAIUExoNZW50aXR5X3Jlc3VsdCACKAIwCzgDSiRhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHlSZXN1bHSjAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoKZW5kX2N1cnNvciAEKAIwCTgBFBMaDG1vcmVfcmVzdWx0cyAFKAAwBTgCaAAUExoPc2tpcHBlZF9yZXN1bHRzIAYoADAFOAFCATCjAaoBB2RlZmF1bHSyAQEwpAEUc3oPTW9yZVJlc3VsdHNUeXBliwGSAQxOT1RfRklOSVNIRUSYAQGMAYsBkgEYTU9SRV9SRVNVTFRTX0FGVEVSX0xJTUlUmAECjAGLAZIBD05PX01PUkVfUkVTVUxUU5gBA4wBdMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class ReadOptions(ProtocolBuffer.ProtocolMessage): + + + DEFAULT = 0 + STRONG = 1 + EVENTUAL = 2 + + _ReadConsistency_NAMES = { + 0: "DEFAULT", + 1: "STRONG", + 2: "EVENTUAL", + } + + def ReadConsistency_Name(cls, x): return cls._ReadConsistency_NAMES.get(x, "") + ReadConsistency_Name = classmethod(ReadConsistency_Name) + + has_read_consistency_ = 0 + read_consistency_ = 0 + has_transaction_ = 0 + transaction_ = "" + + def __init__(self, contents=None): + if contents is not None: self.MergeFromString(contents) + + def read_consistency(self): return self.read_consistency_ + + def set_read_consistency(self, x): + self.has_read_consistency_ = 1 + self.read_consistency_ = x + + def clear_read_consistency(self): + if self.has_read_consistency_: + self.has_read_consistency_ = 0 + self.read_consistency_ = 0 + + def has_read_consistency(self): return self.has_read_consistency_ + + def transaction(self): return self.transaction_ + + def set_transaction(self, x): + self.has_transaction_ = 1 + self.transaction_ = x + + def clear_transaction(self): + if self.has_transaction_: + self.has_transaction_ = 0 + self.transaction_ = "" + + def has_transaction(self): return self.has_transaction_ + + + def MergeFrom(self, x): + assert x is not self + if (x.has_read_consistency()): self.set_read_consistency(x.read_consistency()) + if (x.has_transaction()): self.set_transaction(x.transaction()) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.ReadOptions', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.ReadOptions') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.ReadOptions') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.ReadOptions', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.ReadOptions', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.ReadOptions', s) + + + def Equals(self, x): + if x is self: return 1 + if self.has_read_consistency_ != x.has_read_consistency_: return 0 + if self.has_read_consistency_ and self.read_consistency_ != x.read_consistency_: return 0 + if self.has_transaction_ != x.has_transaction_: return 0 + if self.has_transaction_ and self.transaction_ != x.transaction_: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + return initialized + + def ByteSize(self): + n = 0 + if (self.has_read_consistency_): n += 1 + self.lengthVarInt64(self.read_consistency_) + if (self.has_transaction_): n += 1 + self.lengthString(len(self.transaction_)) + return n + + def ByteSizePartial(self): + n = 0 + if (self.has_read_consistency_): n += 1 + self.lengthVarInt64(self.read_consistency_) + if (self.has_transaction_): n += 1 + self.lengthString(len(self.transaction_)) + return n + + def Clear(self): + self.clear_read_consistency() + self.clear_transaction() + + def OutputUnchecked(self, out): + if (self.has_read_consistency_): + out.putVarInt32(8) + out.putVarInt32(self.read_consistency_) + if (self.has_transaction_): + out.putVarInt32(18) + out.putPrefixedString(self.transaction_) + + def OutputPartial(self, out): + if (self.has_read_consistency_): + out.putVarInt32(8) + out.putVarInt32(self.read_consistency_) + if (self.has_transaction_): + out.putVarInt32(18) + out.putPrefixedString(self.transaction_) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 8: + self.set_read_consistency(d.getVarInt32()) + continue + if tt == 18: + self.set_transaction(d.getPrefixedString()) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_read_consistency_: res+=prefix+("read_consistency: %s\n" % self.DebugFormatInt32(self.read_consistency_)) + if self.has_transaction_: res+=prefix+("transaction: %s\n" % self.DebugFormatString(self.transaction_)) + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kread_consistency = 1 + ktransaction = 2 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "read_consistency", + 2: "transaction", + }, 2) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.NUMERIC, + 2: ProtocolBuffer.Encoder.STRING, + }, 2, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.ReadOptions' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiNhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5SZWFkT3B0aW9ucxMaEHJlYWRfY29uc2lzdGVuY3kgASgAMAU4AUIBMGgAowGqAQdkZWZhdWx0sgEHREVGQVVMVKQBFBMaC3RyYW5zYWN0aW9uIAIoAjAJOAEUc3oPUmVhZENvbnNpc3RlbmN5iwGSAQdERUZBVUxUmAEAjAGLAZIBBlNUUk9OR5gBAYwBiwGSAQhFVkVOVFVBTJgBAowBdMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class GetRequest(ProtocolBuffer.ProtocolMessage): + has_read_options_ = 0 + read_options_ = None + + def __init__(self, contents=None): + self.key_ = [] + self.lazy_init_lock_ = thread.allocate_lock() + if contents is not None: self.MergeFromString(contents) + + def read_options(self): + if self.read_options_ is None: + self.lazy_init_lock_.acquire() + try: + if self.read_options_ is None: self.read_options_ = ReadOptions() + finally: + self.lazy_init_lock_.release() + return self.read_options_ + + def mutable_read_options(self): self.has_read_options_ = 1; return self.read_options() + + def clear_read_options(self): + + if self.has_read_options_: + self.has_read_options_ = 0; + if self.read_options_ is not None: self.read_options_.Clear() + + def has_read_options(self): return self.has_read_options_ + + def key_size(self): return len(self.key_) + def key_list(self): return self.key_ + + def key(self, i): + return self.key_[i] + + def mutable_key(self, i): + return self.key_[i] + + def add_key(self): + x = google.appengine.datastore.entity_v4_pb.Key() + self.key_.append(x) + return x + + def clear_key(self): + self.key_ = [] + + def MergeFrom(self, x): + assert x is not self + if (x.has_read_options()): self.mutable_read_options().MergeFrom(x.read_options()) + for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i)) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.GetRequest', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.GetRequest') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.GetRequest') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.GetRequest', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.GetRequest', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.GetRequest', s) + + + def Equals(self, x): + if x is self: return 1 + if self.has_read_options_ != x.has_read_options_: return 0 + if self.has_read_options_ and self.read_options_ != x.read_options_: return 0 + if len(self.key_) != len(x.key_): return 0 + for e1, e2 in zip(self.key_, x.key_): + if e1 != e2: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + if (self.has_read_options_ and not self.read_options_.IsInitialized(debug_strs)): initialized = 0 + for p in self.key_: + if not p.IsInitialized(debug_strs): initialized=0 + return initialized + + def ByteSize(self): + n = 0 + if (self.has_read_options_): n += 1 + self.lengthString(self.read_options_.ByteSize()) + n += 1 * len(self.key_) + for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize()) + return n + + def ByteSizePartial(self): + n = 0 + if (self.has_read_options_): n += 1 + self.lengthString(self.read_options_.ByteSizePartial()) + n += 1 * len(self.key_) + for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSizePartial()) + return n + + def Clear(self): + self.clear_read_options() + self.clear_key() + + def OutputUnchecked(self, out): + if (self.has_read_options_): + out.putVarInt32(10) + out.putVarInt32(self.read_options_.ByteSize()) + self.read_options_.OutputUnchecked(out) + for i in xrange(len(self.key_)): + out.putVarInt32(26) + out.putVarInt32(self.key_[i].ByteSize()) + self.key_[i].OutputUnchecked(out) + + def OutputPartial(self, out): + if (self.has_read_options_): + out.putVarInt32(10) + out.putVarInt32(self.read_options_.ByteSizePartial()) + self.read_options_.OutputPartial(out) + for i in xrange(len(self.key_)): + out.putVarInt32(26) + out.putVarInt32(self.key_[i].ByteSizePartial()) + self.key_[i].OutputPartial(out) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 10: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.mutable_read_options().TryMerge(tmp) + continue + if tt == 26: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.add_key().TryMerge(tmp) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_read_options_: + res+=prefix+"read_options <\n" + res+=self.read_options_.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + cnt=0 + for e in self.key_: + elm="" + if printElemNumber: elm="(%d)" % cnt + res+=prefix+("key%s <\n" % elm) + res+=e.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + cnt+=1 + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kread_options = 1 + kkey = 3 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "read_options", + 3: "key", + }, 3) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.STRING, + 3: ProtocolBuffer.Encoder.STRING, + }, 3, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.GetRequest' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiJhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5HZXRSZXF1ZXN0ExoMcmVhZF9vcHRpb25zIAEoAjALOAFKI2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlJlYWRPcHRpb25zowGqAQVjdHlwZbIBBnByb3RvMqQBFBMaA2tleSADKAIwCzgDShthcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXmjAaoBBWN0eXBlsgEGcHJvdG8ypAEUwgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I=")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class GetResponse(ProtocolBuffer.ProtocolMessage): + + def __init__(self, contents=None): + self.found_ = [] + self.missing_ = [] + self.deferred_ = [] + if contents is not None: self.MergeFromString(contents) + + def found_size(self): return len(self.found_) + def found_list(self): return self.found_ + + def found(self, i): + return self.found_[i] + + def mutable_found(self, i): + return self.found_[i] + + def add_found(self): + x = EntityResult() + self.found_.append(x) + return x + + def clear_found(self): + self.found_ = [] + def missing_size(self): return len(self.missing_) + def missing_list(self): return self.missing_ + + def missing(self, i): + return self.missing_[i] + + def mutable_missing(self, i): + return self.missing_[i] + + def add_missing(self): + x = EntityResult() + self.missing_.append(x) + return x + + def clear_missing(self): + self.missing_ = [] + def deferred_size(self): return len(self.deferred_) + def deferred_list(self): return self.deferred_ + + def deferred(self, i): + return self.deferred_[i] + + def mutable_deferred(self, i): + return self.deferred_[i] + + def add_deferred(self): + x = google.appengine.datastore.entity_v4_pb.Key() + self.deferred_.append(x) + return x + + def clear_deferred(self): + self.deferred_ = [] + + def MergeFrom(self, x): + assert x is not self + for i in xrange(x.found_size()): self.add_found().CopyFrom(x.found(i)) + for i in xrange(x.missing_size()): self.add_missing().CopyFrom(x.missing(i)) + for i in xrange(x.deferred_size()): self.add_deferred().CopyFrom(x.deferred(i)) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.GetResponse', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.GetResponse') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.GetResponse') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.GetResponse', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.GetResponse', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.GetResponse', s) + + + def Equals(self, x): + if x is self: return 1 + if len(self.found_) != len(x.found_): return 0 + for e1, e2 in zip(self.found_, x.found_): + if e1 != e2: return 0 + if len(self.missing_) != len(x.missing_): return 0 + for e1, e2 in zip(self.missing_, x.missing_): + if e1 != e2: return 0 + if len(self.deferred_) != len(x.deferred_): return 0 + for e1, e2 in zip(self.deferred_, x.deferred_): + if e1 != e2: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + for p in self.found_: + if not p.IsInitialized(debug_strs): initialized=0 + for p in self.missing_: + if not p.IsInitialized(debug_strs): initialized=0 + for p in self.deferred_: + if not p.IsInitialized(debug_strs): initialized=0 + return initialized + + def ByteSize(self): + n = 0 + n += 1 * len(self.found_) + for i in xrange(len(self.found_)): n += self.lengthString(self.found_[i].ByteSize()) + n += 1 * len(self.missing_) + for i in xrange(len(self.missing_)): n += self.lengthString(self.missing_[i].ByteSize()) + n += 1 * len(self.deferred_) + for i in xrange(len(self.deferred_)): n += self.lengthString(self.deferred_[i].ByteSize()) + return n + + def ByteSizePartial(self): + n = 0 + n += 1 * len(self.found_) + for i in xrange(len(self.found_)): n += self.lengthString(self.found_[i].ByteSizePartial()) + n += 1 * len(self.missing_) + for i in xrange(len(self.missing_)): n += self.lengthString(self.missing_[i].ByteSizePartial()) + n += 1 * len(self.deferred_) + for i in xrange(len(self.deferred_)): n += self.lengthString(self.deferred_[i].ByteSizePartial()) + return n + + def Clear(self): + self.clear_found() + self.clear_missing() + self.clear_deferred() + + def OutputUnchecked(self, out): + for i in xrange(len(self.found_)): + out.putVarInt32(10) + out.putVarInt32(self.found_[i].ByteSize()) + self.found_[i].OutputUnchecked(out) + for i in xrange(len(self.missing_)): + out.putVarInt32(18) + out.putVarInt32(self.missing_[i].ByteSize()) + self.missing_[i].OutputUnchecked(out) + for i in xrange(len(self.deferred_)): + out.putVarInt32(26) + out.putVarInt32(self.deferred_[i].ByteSize()) + self.deferred_[i].OutputUnchecked(out) + + def OutputPartial(self, out): + for i in xrange(len(self.found_)): + out.putVarInt32(10) + out.putVarInt32(self.found_[i].ByteSizePartial()) + self.found_[i].OutputPartial(out) + for i in xrange(len(self.missing_)): + out.putVarInt32(18) + out.putVarInt32(self.missing_[i].ByteSizePartial()) + self.missing_[i].OutputPartial(out) + for i in xrange(len(self.deferred_)): + out.putVarInt32(26) + out.putVarInt32(self.deferred_[i].ByteSizePartial()) + self.deferred_[i].OutputPartial(out) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 10: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.add_found().TryMerge(tmp) + continue + if tt == 18: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.add_missing().TryMerge(tmp) + continue + if tt == 26: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.add_deferred().TryMerge(tmp) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + cnt=0 + for e in self.found_: + elm="" + if printElemNumber: elm="(%d)" % cnt + res+=prefix+("found%s <\n" % elm) + res+=e.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + cnt+=1 + cnt=0 + for e in self.missing_: + elm="" + if printElemNumber: elm="(%d)" % cnt + res+=prefix+("missing%s <\n" % elm) + res+=e.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + cnt+=1 + cnt=0 + for e in self.deferred_: + elm="" + if printElemNumber: elm="(%d)" % cnt + res+=prefix+("deferred%s <\n" % elm) + res+=e.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + cnt+=1 + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kfound = 1 + kmissing = 2 + kdeferred = 3 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "found", + 2: "missing", + 3: "deferred", + }, 3) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.STRING, + 2: ProtocolBuffer.Encoder.STRING, + 3: ProtocolBuffer.Encoder.STRING, + }, 3, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.GetResponse' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiNhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5HZXRSZXNwb25zZRMaBWZvdW5kIAEoAjALOANKJGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eVJlc3VsdKMBqgEFY3R5cGWyAQZwcm90bzKkARQTGgdtaXNzaW5nIAIoAjALOANKJGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eVJlc3VsdKMBqgEFY3R5cGWyAQZwcm90bzKkARQTGghkZWZlcnJlZCADKAIwCzgDShthcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXmjAaoBBWN0eXBlsgEGcHJvdG8ypAEUwgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I=")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class RunQueryRequest(ProtocolBuffer.ProtocolMessage): + has_read_options_ = 0 + read_options_ = None + has_partition_id_ = 0 + partition_id_ = None + has_query_ = 0 + has_min_safe_time_seconds_ = 0 + min_safe_time_seconds_ = 0 + has_suggested_batch_size_ = 0 + suggested_batch_size_ = 0 + + def __init__(self, contents=None): + self.query_ = Query() + self.lazy_init_lock_ = thread.allocate_lock() + if contents is not None: self.MergeFromString(contents) + + def read_options(self): + if self.read_options_ is None: + self.lazy_init_lock_.acquire() + try: + if self.read_options_ is None: self.read_options_ = ReadOptions() + finally: + self.lazy_init_lock_.release() + return self.read_options_ + + def mutable_read_options(self): self.has_read_options_ = 1; return self.read_options() + + def clear_read_options(self): + + if self.has_read_options_: + self.has_read_options_ = 0; + if self.read_options_ is not None: self.read_options_.Clear() + + def has_read_options(self): return self.has_read_options_ + + def partition_id(self): + if self.partition_id_ is None: + self.lazy_init_lock_.acquire() + try: + if self.partition_id_ is None: self.partition_id_ = google.appengine.datastore.entity_v4_pb.PartitionId() + finally: + self.lazy_init_lock_.release() + return self.partition_id_ + + def mutable_partition_id(self): self.has_partition_id_ = 1; return self.partition_id() + + def clear_partition_id(self): + + if self.has_partition_id_: + self.has_partition_id_ = 0; + if self.partition_id_ is not None: self.partition_id_.Clear() + + def has_partition_id(self): return self.has_partition_id_ + + def query(self): return self.query_ + + def mutable_query(self): self.has_query_ = 1; return self.query_ + + def clear_query(self):self.has_query_ = 0; self.query_.Clear() + + def has_query(self): return self.has_query_ + + def min_safe_time_seconds(self): return self.min_safe_time_seconds_ + + def set_min_safe_time_seconds(self, x): + self.has_min_safe_time_seconds_ = 1 + self.min_safe_time_seconds_ = x + + def clear_min_safe_time_seconds(self): + if self.has_min_safe_time_seconds_: + self.has_min_safe_time_seconds_ = 0 + self.min_safe_time_seconds_ = 0 + + def has_min_safe_time_seconds(self): return self.has_min_safe_time_seconds_ + + def suggested_batch_size(self): return self.suggested_batch_size_ + + def set_suggested_batch_size(self, x): + self.has_suggested_batch_size_ = 1 + self.suggested_batch_size_ = x + + def clear_suggested_batch_size(self): + if self.has_suggested_batch_size_: + self.has_suggested_batch_size_ = 0 + self.suggested_batch_size_ = 0 + + def has_suggested_batch_size(self): return self.has_suggested_batch_size_ + + + def MergeFrom(self, x): + assert x is not self + if (x.has_read_options()): self.mutable_read_options().MergeFrom(x.read_options()) + if (x.has_partition_id()): self.mutable_partition_id().MergeFrom(x.partition_id()) + if (x.has_query()): self.mutable_query().MergeFrom(x.query()) + if (x.has_min_safe_time_seconds()): self.set_min_safe_time_seconds(x.min_safe_time_seconds()) + if (x.has_suggested_batch_size()): self.set_suggested_batch_size(x.suggested_batch_size()) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.RunQueryRequest', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.RunQueryRequest') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.RunQueryRequest') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.RunQueryRequest', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.RunQueryRequest', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.RunQueryRequest', s) + + + def Equals(self, x): + if x is self: return 1 + if self.has_read_options_ != x.has_read_options_: return 0 + if self.has_read_options_ and self.read_options_ != x.read_options_: return 0 + if self.has_partition_id_ != x.has_partition_id_: return 0 + if self.has_partition_id_ and self.partition_id_ != x.partition_id_: return 0 + if self.has_query_ != x.has_query_: return 0 + if self.has_query_ and self.query_ != x.query_: return 0 + if self.has_min_safe_time_seconds_ != x.has_min_safe_time_seconds_: return 0 + if self.has_min_safe_time_seconds_ and self.min_safe_time_seconds_ != x.min_safe_time_seconds_: return 0 + if self.has_suggested_batch_size_ != x.has_suggested_batch_size_: return 0 + if self.has_suggested_batch_size_ and self.suggested_batch_size_ != x.suggested_batch_size_: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + if (self.has_read_options_ and not self.read_options_.IsInitialized(debug_strs)): initialized = 0 + if (self.has_partition_id_ and not self.partition_id_.IsInitialized(debug_strs)): initialized = 0 + if (not self.has_query_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: query not set.') + elif not self.query_.IsInitialized(debug_strs): initialized = 0 + return initialized + + def ByteSize(self): + n = 0 + if (self.has_read_options_): n += 1 + self.lengthString(self.read_options_.ByteSize()) + if (self.has_partition_id_): n += 1 + self.lengthString(self.partition_id_.ByteSize()) + n += self.lengthString(self.query_.ByteSize()) + if (self.has_min_safe_time_seconds_): n += 1 + self.lengthVarInt64(self.min_safe_time_seconds_) + if (self.has_suggested_batch_size_): n += 1 + self.lengthVarInt64(self.suggested_batch_size_) + return n + 1 + + def ByteSizePartial(self): + n = 0 + if (self.has_read_options_): n += 1 + self.lengthString(self.read_options_.ByteSizePartial()) + if (self.has_partition_id_): n += 1 + self.lengthString(self.partition_id_.ByteSizePartial()) + if (self.has_query_): + n += 1 + n += self.lengthString(self.query_.ByteSizePartial()) + if (self.has_min_safe_time_seconds_): n += 1 + self.lengthVarInt64(self.min_safe_time_seconds_) + if (self.has_suggested_batch_size_): n += 1 + self.lengthVarInt64(self.suggested_batch_size_) + return n + + def Clear(self): + self.clear_read_options() + self.clear_partition_id() + self.clear_query() + self.clear_min_safe_time_seconds() + self.clear_suggested_batch_size() + + def OutputUnchecked(self, out): + if (self.has_read_options_): + out.putVarInt32(10) + out.putVarInt32(self.read_options_.ByteSize()) + self.read_options_.OutputUnchecked(out) + if (self.has_partition_id_): + out.putVarInt32(18) + out.putVarInt32(self.partition_id_.ByteSize()) + self.partition_id_.OutputUnchecked(out) + out.putVarInt32(26) + out.putVarInt32(self.query_.ByteSize()) + self.query_.OutputUnchecked(out) + if (self.has_min_safe_time_seconds_): + out.putVarInt32(32) + out.putVarInt64(self.min_safe_time_seconds_) + if (self.has_suggested_batch_size_): + out.putVarInt32(40) + out.putVarInt32(self.suggested_batch_size_) + + def OutputPartial(self, out): + if (self.has_read_options_): + out.putVarInt32(10) + out.putVarInt32(self.read_options_.ByteSizePartial()) + self.read_options_.OutputPartial(out) + if (self.has_partition_id_): + out.putVarInt32(18) + out.putVarInt32(self.partition_id_.ByteSizePartial()) + self.partition_id_.OutputPartial(out) + if (self.has_query_): + out.putVarInt32(26) + out.putVarInt32(self.query_.ByteSizePartial()) + self.query_.OutputPartial(out) + if (self.has_min_safe_time_seconds_): + out.putVarInt32(32) + out.putVarInt64(self.min_safe_time_seconds_) + if (self.has_suggested_batch_size_): + out.putVarInt32(40) + out.putVarInt32(self.suggested_batch_size_) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 10: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.mutable_read_options().TryMerge(tmp) + continue + if tt == 18: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.mutable_partition_id().TryMerge(tmp) + continue + if tt == 26: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.mutable_query().TryMerge(tmp) + continue + if tt == 32: + self.set_min_safe_time_seconds(d.getVarInt64()) + continue + if tt == 40: + self.set_suggested_batch_size(d.getVarInt32()) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_read_options_: + res+=prefix+"read_options <\n" + res+=self.read_options_.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + if self.has_partition_id_: + res+=prefix+"partition_id <\n" + res+=self.partition_id_.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + if self.has_query_: + res+=prefix+"query <\n" + res+=self.query_.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + if self.has_min_safe_time_seconds_: res+=prefix+("min_safe_time_seconds: %s\n" % self.DebugFormatInt64(self.min_safe_time_seconds_)) + if self.has_suggested_batch_size_: res+=prefix+("suggested_batch_size: %s\n" % self.DebugFormatInt32(self.suggested_batch_size_)) + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kread_options = 1 + kpartition_id = 2 + kquery = 3 + kmin_safe_time_seconds = 4 + ksuggested_batch_size = 5 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "read_options", + 2: "partition_id", + 3: "query", + 4: "min_safe_time_seconds", + 5: "suggested_batch_size", + }, 5) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.STRING, + 2: ProtocolBuffer.Encoder.STRING, + 3: ProtocolBuffer.Encoder.STRING, + 4: ProtocolBuffer.Encoder.NUMERIC, + 5: ProtocolBuffer.Encoder.NUMERIC, + }, 5, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.RunQueryRequest' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCidhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5SdW5RdWVyeVJlcXVlc3QTGgxyZWFkX29wdGlvbnMgASgCMAs4AUojYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUmVhZE9wdGlvbnOjAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoMcGFydGl0aW9uX2lkIAIoAjALOAFKI2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlBhcnRpdGlvbklkowGqAQVjdHlwZbIBBnByb3RvMqQBFBMaBXF1ZXJ5IAMoAjALOAJKHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlF1ZXJ5owGqAQVjdHlwZbIBBnByb3RvMqQBFBMaFW1pbl9zYWZlX3RpbWVfc2Vjb25kcyAEKAAwAzgBFBMaFHN1Z2dlc3RlZF9iYXRjaF9zaXplIAUoADAFOAEUwgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I=")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class RunQueryResponse(ProtocolBuffer.ProtocolMessage): + has_batch_ = 0 + has_query_handle_ = 0 + query_handle_ = "" + + def __init__(self, contents=None): + self.batch_ = QueryResultBatch() + self.index_ = [] + if contents is not None: self.MergeFromString(contents) + + def batch(self): return self.batch_ + + def mutable_batch(self): self.has_batch_ = 1; return self.batch_ + + def clear_batch(self):self.has_batch_ = 0; self.batch_.Clear() + + def has_batch(self): return self.has_batch_ + + def query_handle(self): return self.query_handle_ + + def set_query_handle(self, x): + self.has_query_handle_ = 1 + self.query_handle_ = x + + def clear_query_handle(self): + if self.has_query_handle_: + self.has_query_handle_ = 0 + self.query_handle_ = "" + + def has_query_handle(self): return self.has_query_handle_ + + def index_size(self): return len(self.index_) + def index_list(self): return self.index_ + + def index(self, i): + return self.index_[i] + + def mutable_index(self, i): + return self.index_[i] + + def add_index(self): + x = google.appengine.datastore.entity_pb.CompositeIndex() + self.index_.append(x) + return x + + def clear_index(self): + self.index_ = [] + + def MergeFrom(self, x): + assert x is not self + if (x.has_batch()): self.mutable_batch().MergeFrom(x.batch()) + if (x.has_query_handle()): self.set_query_handle(x.query_handle()) + for i in xrange(x.index_size()): self.add_index().CopyFrom(x.index(i)) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.RunQueryResponse', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.RunQueryResponse') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.RunQueryResponse') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.RunQueryResponse', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.RunQueryResponse', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.RunQueryResponse', s) + + + def Equals(self, x): + if x is self: return 1 + if self.has_batch_ != x.has_batch_: return 0 + if self.has_batch_ and self.batch_ != x.batch_: return 0 + if self.has_query_handle_ != x.has_query_handle_: return 0 + if self.has_query_handle_ and self.query_handle_ != x.query_handle_: return 0 + if len(self.index_) != len(x.index_): return 0 + for e1, e2 in zip(self.index_, x.index_): + if e1 != e2: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + if (not self.has_batch_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: batch not set.') + elif not self.batch_.IsInitialized(debug_strs): initialized = 0 + for p in self.index_: + if not p.IsInitialized(debug_strs): initialized=0 + return initialized + + def ByteSize(self): + n = 0 + n += self.lengthString(self.batch_.ByteSize()) + if (self.has_query_handle_): n += 1 + self.lengthString(len(self.query_handle_)) + n += 1 * len(self.index_) + for i in xrange(len(self.index_)): n += self.lengthString(self.index_[i].ByteSize()) + return n + 1 + + def ByteSizePartial(self): + n = 0 + if (self.has_batch_): + n += 1 + n += self.lengthString(self.batch_.ByteSizePartial()) + if (self.has_query_handle_): n += 1 + self.lengthString(len(self.query_handle_)) + n += 1 * len(self.index_) + for i in xrange(len(self.index_)): n += self.lengthString(self.index_[i].ByteSizePartial()) + return n + + def Clear(self): + self.clear_batch() + self.clear_query_handle() + self.clear_index() + + def OutputUnchecked(self, out): + out.putVarInt32(10) + out.putVarInt32(self.batch_.ByteSize()) + self.batch_.OutputUnchecked(out) + if (self.has_query_handle_): + out.putVarInt32(18) + out.putPrefixedString(self.query_handle_) + for i in xrange(len(self.index_)): + out.putVarInt32(26) + out.putVarInt32(self.index_[i].ByteSize()) + self.index_[i].OutputUnchecked(out) + + def OutputPartial(self, out): + if (self.has_batch_): + out.putVarInt32(10) + out.putVarInt32(self.batch_.ByteSizePartial()) + self.batch_.OutputPartial(out) + if (self.has_query_handle_): + out.putVarInt32(18) + out.putPrefixedString(self.query_handle_) + for i in xrange(len(self.index_)): + out.putVarInt32(26) + out.putVarInt32(self.index_[i].ByteSizePartial()) + self.index_[i].OutputPartial(out) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 10: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.mutable_batch().TryMerge(tmp) + continue + if tt == 18: + self.set_query_handle(d.getPrefixedString()) + continue + if tt == 26: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.add_index().TryMerge(tmp) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_batch_: + res+=prefix+"batch <\n" + res+=self.batch_.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + if self.has_query_handle_: res+=prefix+("query_handle: %s\n" % self.DebugFormatString(self.query_handle_)) + cnt=0 + for e in self.index_: + elm="" + if printElemNumber: elm="(%d)" % cnt + res+=prefix+("index%s <\n" % elm) + res+=e.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + cnt+=1 + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kbatch = 1 + kquery_handle = 2 + kindex = 3 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "batch", + 2: "query_handle", + 3: "index", + }, 3) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.STRING, + 2: ProtocolBuffer.Encoder.STRING, + 3: ProtocolBuffer.Encoder.STRING, + }, 3, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.RunQueryResponse' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCihhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5SdW5RdWVyeVJlc3BvbnNlExoFYmF0Y2ggASgCMAs4AkooYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUXVlcnlSZXN1bHRCYXRjaKMBqgEFY3R5cGWyAQZwcm90bzKkARQTGgxxdWVyeV9oYW5kbGUgAigCMAk4ARQTGgVpbmRleCADKAIwCzgDSiJzdG9yYWdlX29uZXN0b3JlX3YzLkNvbXBvc2l0ZUluZGV4FMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class ContinueQueryRequest(ProtocolBuffer.ProtocolMessage): + has_query_handle_ = 0 + query_handle_ = "" + + def __init__(self, contents=None): + if contents is not None: self.MergeFromString(contents) + + def query_handle(self): return self.query_handle_ + + def set_query_handle(self, x): + self.has_query_handle_ = 1 + self.query_handle_ = x + + def clear_query_handle(self): + if self.has_query_handle_: + self.has_query_handle_ = 0 + self.query_handle_ = "" + + def has_query_handle(self): return self.has_query_handle_ + + + def MergeFrom(self, x): + assert x is not self + if (x.has_query_handle()): self.set_query_handle(x.query_handle()) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.ContinueQueryRequest', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.ContinueQueryRequest') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.ContinueQueryRequest') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.ContinueQueryRequest', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.ContinueQueryRequest', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.ContinueQueryRequest', s) + + + def Equals(self, x): + if x is self: return 1 + if self.has_query_handle_ != x.has_query_handle_: return 0 + if self.has_query_handle_ and self.query_handle_ != x.query_handle_: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + if (not self.has_query_handle_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: query_handle not set.') + return initialized + + def ByteSize(self): + n = 0 + n += self.lengthString(len(self.query_handle_)) + return n + 1 + + def ByteSizePartial(self): + n = 0 + if (self.has_query_handle_): + n += 1 + n += self.lengthString(len(self.query_handle_)) + return n + + def Clear(self): + self.clear_query_handle() + + def OutputUnchecked(self, out): + out.putVarInt32(10) + out.putPrefixedString(self.query_handle_) + + def OutputPartial(self, out): + if (self.has_query_handle_): + out.putVarInt32(10) + out.putPrefixedString(self.query_handle_) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 10: + self.set_query_handle(d.getPrefixedString()) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_query_handle_: res+=prefix+("query_handle: %s\n" % self.DebugFormatString(self.query_handle_)) + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kquery_handle = 1 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "query_handle", + }, 1) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.STRING, + }, 1, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.ContinueQueryRequest' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCixhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Db250aW51ZVF1ZXJ5UmVxdWVzdBMaDHF1ZXJ5X2hhbmRsZSABKAIwCTgCFMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class ContinueQueryResponse(ProtocolBuffer.ProtocolMessage): + has_batch_ = 0 + + def __init__(self, contents=None): + self.batch_ = QueryResultBatch() + if contents is not None: self.MergeFromString(contents) + + def batch(self): return self.batch_ + + def mutable_batch(self): self.has_batch_ = 1; return self.batch_ + + def clear_batch(self):self.has_batch_ = 0; self.batch_.Clear() + + def has_batch(self): return self.has_batch_ + + + def MergeFrom(self, x): + assert x is not self + if (x.has_batch()): self.mutable_batch().MergeFrom(x.batch()) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.ContinueQueryResponse', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.ContinueQueryResponse') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.ContinueQueryResponse') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.ContinueQueryResponse', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.ContinueQueryResponse', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.ContinueQueryResponse', s) + + + def Equals(self, x): + if x is self: return 1 + if self.has_batch_ != x.has_batch_: return 0 + if self.has_batch_ and self.batch_ != x.batch_: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + if (not self.has_batch_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: batch not set.') + elif not self.batch_.IsInitialized(debug_strs): initialized = 0 + return initialized + + def ByteSize(self): + n = 0 + n += self.lengthString(self.batch_.ByteSize()) + return n + 1 + + def ByteSizePartial(self): + n = 0 + if (self.has_batch_): + n += 1 + n += self.lengthString(self.batch_.ByteSizePartial()) + return n + + def Clear(self): + self.clear_batch() + + def OutputUnchecked(self, out): + out.putVarInt32(10) + out.putVarInt32(self.batch_.ByteSize()) + self.batch_.OutputUnchecked(out) + + def OutputPartial(self, out): + if (self.has_batch_): + out.putVarInt32(10) + out.putVarInt32(self.batch_.ByteSizePartial()) + self.batch_.OutputPartial(out) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 10: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.mutable_batch().TryMerge(tmp) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_batch_: + res+=prefix+"batch <\n" + res+=self.batch_.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kbatch = 1 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "batch", + }, 1) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.STRING, + }, 1, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.ContinueQueryResponse' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCi1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Db250aW51ZVF1ZXJ5UmVzcG9uc2UTGgViYXRjaCABKAIwCzgCSihhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5RdWVyeVJlc3VsdEJhdGNoowGqAQVjdHlwZbIBBnByb3RvMqQBFMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class BeginTransactionRequest(ProtocolBuffer.ProtocolMessage): + has_cross_group_ = 0 + cross_group_ = 0 + has_cross_request_ = 0 + cross_request_ = 0 + + def __init__(self, contents=None): + if contents is not None: self.MergeFromString(contents) + + def cross_group(self): return self.cross_group_ + + def set_cross_group(self, x): + self.has_cross_group_ = 1 + self.cross_group_ = x + + def clear_cross_group(self): + if self.has_cross_group_: + self.has_cross_group_ = 0 + self.cross_group_ = 0 + + def has_cross_group(self): return self.has_cross_group_ + + def cross_request(self): return self.cross_request_ + + def set_cross_request(self, x): + self.has_cross_request_ = 1 + self.cross_request_ = x + + def clear_cross_request(self): + if self.has_cross_request_: + self.has_cross_request_ = 0 + self.cross_request_ = 0 + + def has_cross_request(self): return self.has_cross_request_ + + + def MergeFrom(self, x): + assert x is not self + if (x.has_cross_group()): self.set_cross_group(x.cross_group()) + if (x.has_cross_request()): self.set_cross_request(x.cross_request()) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.BeginTransactionRequest', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.BeginTransactionRequest') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.BeginTransactionRequest') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.BeginTransactionRequest', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.BeginTransactionRequest', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.BeginTransactionRequest', s) + + + def Equals(self, x): + if x is self: return 1 + if self.has_cross_group_ != x.has_cross_group_: return 0 + if self.has_cross_group_ and self.cross_group_ != x.cross_group_: return 0 + if self.has_cross_request_ != x.has_cross_request_: return 0 + if self.has_cross_request_ and self.cross_request_ != x.cross_request_: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + return initialized + + def ByteSize(self): + n = 0 + if (self.has_cross_group_): n += 2 + if (self.has_cross_request_): n += 2 + return n + + def ByteSizePartial(self): + n = 0 + if (self.has_cross_group_): n += 2 + if (self.has_cross_request_): n += 2 + return n + + def Clear(self): + self.clear_cross_group() + self.clear_cross_request() + + def OutputUnchecked(self, out): + if (self.has_cross_group_): + out.putVarInt32(8) + out.putBoolean(self.cross_group_) + if (self.has_cross_request_): + out.putVarInt32(16) + out.putBoolean(self.cross_request_) + + def OutputPartial(self, out): + if (self.has_cross_group_): + out.putVarInt32(8) + out.putBoolean(self.cross_group_) + if (self.has_cross_request_): + out.putVarInt32(16) + out.putBoolean(self.cross_request_) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 8: + self.set_cross_group(d.getBoolean()) + continue + if tt == 16: + self.set_cross_request(d.getBoolean()) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_cross_group_: res+=prefix+("cross_group: %s\n" % self.DebugFormatBool(self.cross_group_)) + if self.has_cross_request_: res+=prefix+("cross_request: %s\n" % self.DebugFormatBool(self.cross_request_)) + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kcross_group = 1 + kcross_request = 2 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "cross_group", + 2: "cross_request", + }, 2) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.NUMERIC, + 2: ProtocolBuffer.Encoder.NUMERIC, + }, 2, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.BeginTransactionRequest' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCi9hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5CZWdpblRyYW5zYWN0aW9uUmVxdWVzdBMaC2Nyb3NzX2dyb3VwIAEoADAIOAFCBWZhbHNlowGqAQdkZWZhdWx0sgEFZmFsc2WkARQTGg1jcm9zc19yZXF1ZXN0IAIoADAIOAFCBWZhbHNlowGqAQdkZWZhdWx0sgEFZmFsc2WkARTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg==")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class BeginTransactionResponse(ProtocolBuffer.ProtocolMessage): + has_transaction_ = 0 + transaction_ = "" + + def __init__(self, contents=None): + if contents is not None: self.MergeFromString(contents) + + def transaction(self): return self.transaction_ + + def set_transaction(self, x): + self.has_transaction_ = 1 + self.transaction_ = x + + def clear_transaction(self): + if self.has_transaction_: + self.has_transaction_ = 0 + self.transaction_ = "" + + def has_transaction(self): return self.has_transaction_ + + + def MergeFrom(self, x): + assert x is not self + if (x.has_transaction()): self.set_transaction(x.transaction()) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.BeginTransactionResponse', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.BeginTransactionResponse') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.BeginTransactionResponse') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.BeginTransactionResponse', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.BeginTransactionResponse', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.BeginTransactionResponse', s) + + + def Equals(self, x): + if x is self: return 1 + if self.has_transaction_ != x.has_transaction_: return 0 + if self.has_transaction_ and self.transaction_ != x.transaction_: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + if (not self.has_transaction_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: transaction not set.') + return initialized + + def ByteSize(self): + n = 0 + n += self.lengthString(len(self.transaction_)) + return n + 1 + + def ByteSizePartial(self): + n = 0 + if (self.has_transaction_): + n += 1 + n += self.lengthString(len(self.transaction_)) + return n + + def Clear(self): + self.clear_transaction() + + def OutputUnchecked(self, out): + out.putVarInt32(10) + out.putPrefixedString(self.transaction_) + + def OutputPartial(self, out): + if (self.has_transaction_): + out.putVarInt32(10) + out.putPrefixedString(self.transaction_) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 10: + self.set_transaction(d.getPrefixedString()) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_transaction_: res+=prefix+("transaction: %s\n" % self.DebugFormatString(self.transaction_)) + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + ktransaction = 1 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "transaction", + }, 1) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.STRING, + }, 1, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.BeginTransactionResponse' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCjBhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5CZWdpblRyYW5zYWN0aW9uUmVzcG9uc2UTGgt0cmFuc2FjdGlvbiABKAIwCTgCFMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class RollbackRequest(ProtocolBuffer.ProtocolMessage): + has_transaction_ = 0 + transaction_ = "" + + def __init__(self, contents=None): + if contents is not None: self.MergeFromString(contents) + + def transaction(self): return self.transaction_ + + def set_transaction(self, x): + self.has_transaction_ = 1 + self.transaction_ = x + + def clear_transaction(self): + if self.has_transaction_: + self.has_transaction_ = 0 + self.transaction_ = "" + + def has_transaction(self): return self.has_transaction_ + + + def MergeFrom(self, x): + assert x is not self + if (x.has_transaction()): self.set_transaction(x.transaction()) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.RollbackRequest', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.RollbackRequest') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.RollbackRequest') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.RollbackRequest', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.RollbackRequest', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.RollbackRequest', s) + + + def Equals(self, x): + if x is self: return 1 + if self.has_transaction_ != x.has_transaction_: return 0 + if self.has_transaction_ and self.transaction_ != x.transaction_: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + if (not self.has_transaction_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: transaction not set.') + return initialized + + def ByteSize(self): + n = 0 + n += self.lengthString(len(self.transaction_)) + return n + 1 + + def ByteSizePartial(self): + n = 0 + if (self.has_transaction_): + n += 1 + n += self.lengthString(len(self.transaction_)) + return n + + def Clear(self): + self.clear_transaction() + + def OutputUnchecked(self, out): + out.putVarInt32(10) + out.putPrefixedString(self.transaction_) + + def OutputPartial(self, out): + if (self.has_transaction_): + out.putVarInt32(10) + out.putPrefixedString(self.transaction_) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 10: + self.set_transaction(d.getPrefixedString()) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_transaction_: res+=prefix+("transaction: %s\n" % self.DebugFormatString(self.transaction_)) + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + ktransaction = 1 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "transaction", + }, 1) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.STRING, + }, 1, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.RollbackRequest' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCidhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Sb2xsYmFja1JlcXVlc3QTGgt0cmFuc2FjdGlvbiABKAIwCTgCFMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class RollbackResponse(ProtocolBuffer.ProtocolMessage): + + def __init__(self, contents=None): + pass + if contents is not None: self.MergeFromString(contents) + + + def MergeFrom(self, x): + assert x is not self + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.RollbackResponse', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.RollbackResponse') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.RollbackResponse') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.RollbackResponse', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.RollbackResponse', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.RollbackResponse', s) + + + def Equals(self, x): + if x is self: return 1 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + return initialized + + def ByteSize(self): + n = 0 + return n + + def ByteSizePartial(self): + n = 0 + return n + + def Clear(self): + pass + + def OutputUnchecked(self, out): + pass + + def OutputPartial(self, out): + pass + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + }, 0) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + }, 0, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.RollbackResponse' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCihhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Sb2xsYmFja1Jlc3BvbnNlwgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I=")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class CommitRequest(ProtocolBuffer.ProtocolMessage): + has_transaction_ = 0 + transaction_ = "" + has_mutation_ = 0 + mutation_ = None + + def __init__(self, contents=None): + self.lazy_init_lock_ = thread.allocate_lock() + if contents is not None: self.MergeFromString(contents) + + def transaction(self): return self.transaction_ + + def set_transaction(self, x): + self.has_transaction_ = 1 + self.transaction_ = x + + def clear_transaction(self): + if self.has_transaction_: + self.has_transaction_ = 0 + self.transaction_ = "" + + def has_transaction(self): return self.has_transaction_ + + def mutation(self): + if self.mutation_ is None: + self.lazy_init_lock_.acquire() + try: + if self.mutation_ is None: self.mutation_ = Mutation() + finally: + self.lazy_init_lock_.release() + return self.mutation_ + + def mutable_mutation(self): self.has_mutation_ = 1; return self.mutation() + + def clear_mutation(self): + + if self.has_mutation_: + self.has_mutation_ = 0; + if self.mutation_ is not None: self.mutation_.Clear() + + def has_mutation(self): return self.has_mutation_ + + + def MergeFrom(self, x): + assert x is not self + if (x.has_transaction()): self.set_transaction(x.transaction()) + if (x.has_mutation()): self.mutable_mutation().MergeFrom(x.mutation()) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.CommitRequest', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.CommitRequest') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.CommitRequest') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.CommitRequest', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.CommitRequest', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.CommitRequest', s) + + + def Equals(self, x): + if x is self: return 1 + if self.has_transaction_ != x.has_transaction_: return 0 + if self.has_transaction_ and self.transaction_ != x.transaction_: return 0 + if self.has_mutation_ != x.has_mutation_: return 0 + if self.has_mutation_ and self.mutation_ != x.mutation_: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + if (not self.has_transaction_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: transaction not set.') + if (self.has_mutation_ and not self.mutation_.IsInitialized(debug_strs)): initialized = 0 + return initialized + + def ByteSize(self): + n = 0 + n += self.lengthString(len(self.transaction_)) + if (self.has_mutation_): n += 1 + self.lengthString(self.mutation_.ByteSize()) + return n + 1 + + def ByteSizePartial(self): + n = 0 + if (self.has_transaction_): + n += 1 + n += self.lengthString(len(self.transaction_)) + if (self.has_mutation_): n += 1 + self.lengthString(self.mutation_.ByteSizePartial()) + return n + + def Clear(self): + self.clear_transaction() + self.clear_mutation() + + def OutputUnchecked(self, out): + out.putVarInt32(10) + out.putPrefixedString(self.transaction_) + if (self.has_mutation_): + out.putVarInt32(18) + out.putVarInt32(self.mutation_.ByteSize()) + self.mutation_.OutputUnchecked(out) + + def OutputPartial(self, out): + if (self.has_transaction_): + out.putVarInt32(10) + out.putPrefixedString(self.transaction_) + if (self.has_mutation_): + out.putVarInt32(18) + out.putVarInt32(self.mutation_.ByteSizePartial()) + self.mutation_.OutputPartial(out) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 10: + self.set_transaction(d.getPrefixedString()) + continue + if tt == 18: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.mutable_mutation().TryMerge(tmp) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_transaction_: res+=prefix+("transaction: %s\n" % self.DebugFormatString(self.transaction_)) + if self.has_mutation_: + res+=prefix+"mutation <\n" + res+=self.mutation_.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + ktransaction = 1 + kmutation = 2 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "transaction", + 2: "mutation", + }, 2) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.STRING, + 2: ProtocolBuffer.Encoder.STRING, + }, 2, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.CommitRequest' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiVhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Db21taXRSZXF1ZXN0ExoLdHJhbnNhY3Rpb24gASgCMAk4AhQTGghtdXRhdGlvbiACKAIwCzgBSiBhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5NdXRhdGlvbqMBqgEFY3R5cGWyAQZwcm90bzKkARTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg==")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class CommitResponse(ProtocolBuffer.ProtocolMessage): + has_mutation_result_ = 0 + mutation_result_ = None + + def __init__(self, contents=None): + self.lazy_init_lock_ = thread.allocate_lock() + if contents is not None: self.MergeFromString(contents) + + def mutation_result(self): + if self.mutation_result_ is None: + self.lazy_init_lock_.acquire() + try: + if self.mutation_result_ is None: self.mutation_result_ = MutationResult() + finally: + self.lazy_init_lock_.release() + return self.mutation_result_ + + def mutable_mutation_result(self): self.has_mutation_result_ = 1; return self.mutation_result() + + def clear_mutation_result(self): + + if self.has_mutation_result_: + self.has_mutation_result_ = 0; + if self.mutation_result_ is not None: self.mutation_result_.Clear() + + def has_mutation_result(self): return self.has_mutation_result_ + + + def MergeFrom(self, x): + assert x is not self + if (x.has_mutation_result()): self.mutable_mutation_result().MergeFrom(x.mutation_result()) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.CommitResponse', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.CommitResponse') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.CommitResponse') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.CommitResponse', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.CommitResponse', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.CommitResponse', s) + + + def Equals(self, x): + if x is self: return 1 + if self.has_mutation_result_ != x.has_mutation_result_: return 0 + if self.has_mutation_result_ and self.mutation_result_ != x.mutation_result_: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + if (self.has_mutation_result_ and not self.mutation_result_.IsInitialized(debug_strs)): initialized = 0 + return initialized + + def ByteSize(self): + n = 0 + if (self.has_mutation_result_): n += 1 + self.lengthString(self.mutation_result_.ByteSize()) + return n + + def ByteSizePartial(self): + n = 0 + if (self.has_mutation_result_): n += 1 + self.lengthString(self.mutation_result_.ByteSizePartial()) + return n + + def Clear(self): + self.clear_mutation_result() + + def OutputUnchecked(self, out): + if (self.has_mutation_result_): + out.putVarInt32(10) + out.putVarInt32(self.mutation_result_.ByteSize()) + self.mutation_result_.OutputUnchecked(out) + + def OutputPartial(self, out): + if (self.has_mutation_result_): + out.putVarInt32(10) + out.putVarInt32(self.mutation_result_.ByteSizePartial()) + self.mutation_result_.OutputPartial(out) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 10: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.mutable_mutation_result().TryMerge(tmp) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_mutation_result_: + res+=prefix+"mutation_result <\n" + res+=self.mutation_result_.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kmutation_result = 1 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "mutation_result", + }, 1) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.STRING, + }, 1, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.CommitResponse' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiZhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Db21taXRSZXNwb25zZRMaD211dGF0aW9uX3Jlc3VsdCABKAIwCzgBSiZhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5NdXRhdGlvblJlc3VsdKMBqgEFY3R5cGWyAQZwcm90bzKkARTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg==")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class WriteRequest(ProtocolBuffer.ProtocolMessage): + has_mutation_ = 0 + + def __init__(self, contents=None): + self.mutation_ = Mutation() + if contents is not None: self.MergeFromString(contents) + + def mutation(self): return self.mutation_ + + def mutable_mutation(self): self.has_mutation_ = 1; return self.mutation_ + + def clear_mutation(self):self.has_mutation_ = 0; self.mutation_.Clear() + + def has_mutation(self): return self.has_mutation_ + + + def MergeFrom(self, x): + assert x is not self + if (x.has_mutation()): self.mutable_mutation().MergeFrom(x.mutation()) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.WriteRequest', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.WriteRequest') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.WriteRequest') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.WriteRequest', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.WriteRequest', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.WriteRequest', s) + + + def Equals(self, x): + if x is self: return 1 + if self.has_mutation_ != x.has_mutation_: return 0 + if self.has_mutation_ and self.mutation_ != x.mutation_: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + if (not self.has_mutation_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: mutation not set.') + elif not self.mutation_.IsInitialized(debug_strs): initialized = 0 + return initialized + + def ByteSize(self): + n = 0 + n += self.lengthString(self.mutation_.ByteSize()) + return n + 1 + + def ByteSizePartial(self): + n = 0 + if (self.has_mutation_): + n += 1 + n += self.lengthString(self.mutation_.ByteSizePartial()) + return n + + def Clear(self): + self.clear_mutation() + + def OutputUnchecked(self, out): + out.putVarInt32(10) + out.putVarInt32(self.mutation_.ByteSize()) + self.mutation_.OutputUnchecked(out) + + def OutputPartial(self, out): + if (self.has_mutation_): + out.putVarInt32(10) + out.putVarInt32(self.mutation_.ByteSizePartial()) + self.mutation_.OutputPartial(out) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 10: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.mutable_mutation().TryMerge(tmp) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_mutation_: + res+=prefix+"mutation <\n" + res+=self.mutation_.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kmutation = 1 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "mutation", + }, 1) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.STRING, + }, 1, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.WriteRequest' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiRhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Xcml0ZVJlcXVlc3QTGghtdXRhdGlvbiABKAIwCzgCSiBhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5NdXRhdGlvbqMBqgEFY3R5cGWyAQZwcm90bzKkARTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg==")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class WriteResponse(ProtocolBuffer.ProtocolMessage): + has_mutation_result_ = 0 + + def __init__(self, contents=None): + self.mutation_result_ = MutationResult() + if contents is not None: self.MergeFromString(contents) + + def mutation_result(self): return self.mutation_result_ + + def mutable_mutation_result(self): self.has_mutation_result_ = 1; return self.mutation_result_ + + def clear_mutation_result(self):self.has_mutation_result_ = 0; self.mutation_result_.Clear() + + def has_mutation_result(self): return self.has_mutation_result_ + + + def MergeFrom(self, x): + assert x is not self + if (x.has_mutation_result()): self.mutable_mutation_result().MergeFrom(x.mutation_result()) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.WriteResponse', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.WriteResponse') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.WriteResponse') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.WriteResponse', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.WriteResponse', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.WriteResponse', s) + + + def Equals(self, x): + if x is self: return 1 + if self.has_mutation_result_ != x.has_mutation_result_: return 0 + if self.has_mutation_result_ and self.mutation_result_ != x.mutation_result_: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + if (not self.has_mutation_result_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: mutation_result not set.') + elif not self.mutation_result_.IsInitialized(debug_strs): initialized = 0 + return initialized + + def ByteSize(self): + n = 0 + n += self.lengthString(self.mutation_result_.ByteSize()) + return n + 1 + + def ByteSizePartial(self): + n = 0 + if (self.has_mutation_result_): + n += 1 + n += self.lengthString(self.mutation_result_.ByteSizePartial()) + return n + + def Clear(self): + self.clear_mutation_result() + + def OutputUnchecked(self, out): + out.putVarInt32(10) + out.putVarInt32(self.mutation_result_.ByteSize()) + self.mutation_result_.OutputUnchecked(out) + + def OutputPartial(self, out): + if (self.has_mutation_result_): + out.putVarInt32(10) + out.putVarInt32(self.mutation_result_.ByteSizePartial()) + self.mutation_result_.OutputPartial(out) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 10: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.mutable_mutation_result().TryMerge(tmp) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_mutation_result_: + res+=prefix+"mutation_result <\n" + res+=self.mutation_result_.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kmutation_result = 1 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "mutation_result", + }, 1) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.STRING, + }, 1, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.WriteResponse' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiVhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Xcml0ZVJlc3BvbnNlExoPbXV0YXRpb25fcmVzdWx0IAEoAjALOAJKJmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lk11dGF0aW9uUmVzdWx0owGqAQVjdHlwZbIBBnByb3RvMqQBFMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class AllocateIdsRequest(ProtocolBuffer.ProtocolMessage): + + def __init__(self, contents=None): + self.allocate_ = [] + self.reserve_ = [] + if contents is not None: self.MergeFromString(contents) + + def allocate_size(self): return len(self.allocate_) + def allocate_list(self): return self.allocate_ + + def allocate(self, i): + return self.allocate_[i] + + def mutable_allocate(self, i): + return self.allocate_[i] + + def add_allocate(self): + x = google.appengine.datastore.entity_v4_pb.Key() + self.allocate_.append(x) + return x + + def clear_allocate(self): + self.allocate_ = [] + def reserve_size(self): return len(self.reserve_) + def reserve_list(self): return self.reserve_ + + def reserve(self, i): + return self.reserve_[i] + + def mutable_reserve(self, i): + return self.reserve_[i] + + def add_reserve(self): + x = google.appengine.datastore.entity_v4_pb.Key() + self.reserve_.append(x) + return x + + def clear_reserve(self): + self.reserve_ = [] + + def MergeFrom(self, x): + assert x is not self + for i in xrange(x.allocate_size()): self.add_allocate().CopyFrom(x.allocate(i)) + for i in xrange(x.reserve_size()): self.add_reserve().CopyFrom(x.reserve(i)) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.AllocateIdsRequest', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.AllocateIdsRequest') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.AllocateIdsRequest') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.AllocateIdsRequest', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.AllocateIdsRequest', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.AllocateIdsRequest', s) + + + def Equals(self, x): + if x is self: return 1 + if len(self.allocate_) != len(x.allocate_): return 0 + for e1, e2 in zip(self.allocate_, x.allocate_): + if e1 != e2: return 0 + if len(self.reserve_) != len(x.reserve_): return 0 + for e1, e2 in zip(self.reserve_, x.reserve_): + if e1 != e2: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + for p in self.allocate_: + if not p.IsInitialized(debug_strs): initialized=0 + for p in self.reserve_: + if not p.IsInitialized(debug_strs): initialized=0 + return initialized + + def ByteSize(self): + n = 0 + n += 1 * len(self.allocate_) + for i in xrange(len(self.allocate_)): n += self.lengthString(self.allocate_[i].ByteSize()) + n += 1 * len(self.reserve_) + for i in xrange(len(self.reserve_)): n += self.lengthString(self.reserve_[i].ByteSize()) + return n + + def ByteSizePartial(self): + n = 0 + n += 1 * len(self.allocate_) + for i in xrange(len(self.allocate_)): n += self.lengthString(self.allocate_[i].ByteSizePartial()) + n += 1 * len(self.reserve_) + for i in xrange(len(self.reserve_)): n += self.lengthString(self.reserve_[i].ByteSizePartial()) + return n + + def Clear(self): + self.clear_allocate() + self.clear_reserve() + + def OutputUnchecked(self, out): + for i in xrange(len(self.allocate_)): + out.putVarInt32(10) + out.putVarInt32(self.allocate_[i].ByteSize()) + self.allocate_[i].OutputUnchecked(out) + for i in xrange(len(self.reserve_)): + out.putVarInt32(18) + out.putVarInt32(self.reserve_[i].ByteSize()) + self.reserve_[i].OutputUnchecked(out) + + def OutputPartial(self, out): + for i in xrange(len(self.allocate_)): + out.putVarInt32(10) + out.putVarInt32(self.allocate_[i].ByteSizePartial()) + self.allocate_[i].OutputPartial(out) + for i in xrange(len(self.reserve_)): + out.putVarInt32(18) + out.putVarInt32(self.reserve_[i].ByteSizePartial()) + self.reserve_[i].OutputPartial(out) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 10: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.add_allocate().TryMerge(tmp) + continue + if tt == 18: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.add_reserve().TryMerge(tmp) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + cnt=0 + for e in self.allocate_: + elm="" + if printElemNumber: elm="(%d)" % cnt + res+=prefix+("allocate%s <\n" % elm) + res+=e.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + cnt+=1 + cnt=0 + for e in self.reserve_: + elm="" + if printElemNumber: elm="(%d)" % cnt + res+=prefix+("reserve%s <\n" % elm) + res+=e.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + cnt+=1 + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kallocate = 1 + kreserve = 2 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "allocate", + 2: "reserve", + }, 2) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.STRING, + 2: ProtocolBuffer.Encoder.STRING, + }, 2, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.AllocateIdsRequest' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiphcHBob3N0aW5nLmRhdGFzdG9yZS52NC5BbGxvY2F0ZUlkc1JlcXVlc3QTGghhbGxvY2F0ZSABKAIwCzgDShthcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXmjAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoHcmVzZXJ2ZSACKAIwCzgDShthcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXmjAaoBBWN0eXBlsgEGcHJvdG8ypAEUwgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I=")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class AllocateIdsResponse(ProtocolBuffer.ProtocolMessage): + + def __init__(self, contents=None): + self.allocated_ = [] + if contents is not None: self.MergeFromString(contents) + + def allocated_size(self): return len(self.allocated_) + def allocated_list(self): return self.allocated_ + + def allocated(self, i): + return self.allocated_[i] + + def mutable_allocated(self, i): + return self.allocated_[i] + + def add_allocated(self): + x = google.appengine.datastore.entity_v4_pb.Key() + self.allocated_.append(x) + return x + + def clear_allocated(self): + self.allocated_ = [] + + def MergeFrom(self, x): + assert x is not self + for i in xrange(x.allocated_size()): self.add_allocated().CopyFrom(x.allocated(i)) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.AllocateIdsResponse', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.AllocateIdsResponse') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.AllocateIdsResponse') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.AllocateIdsResponse', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.AllocateIdsResponse', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.AllocateIdsResponse', s) + + + def Equals(self, x): + if x is self: return 1 + if len(self.allocated_) != len(x.allocated_): return 0 + for e1, e2 in zip(self.allocated_, x.allocated_): + if e1 != e2: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + for p in self.allocated_: + if not p.IsInitialized(debug_strs): initialized=0 + return initialized + + def ByteSize(self): + n = 0 + n += 1 * len(self.allocated_) + for i in xrange(len(self.allocated_)): n += self.lengthString(self.allocated_[i].ByteSize()) + return n + + def ByteSizePartial(self): + n = 0 + n += 1 * len(self.allocated_) + for i in xrange(len(self.allocated_)): n += self.lengthString(self.allocated_[i].ByteSizePartial()) + return n + + def Clear(self): + self.clear_allocated() + + def OutputUnchecked(self, out): + for i in xrange(len(self.allocated_)): + out.putVarInt32(10) + out.putVarInt32(self.allocated_[i].ByteSize()) + self.allocated_[i].OutputUnchecked(out) + + def OutputPartial(self, out): + for i in xrange(len(self.allocated_)): + out.putVarInt32(10) + out.putVarInt32(self.allocated_[i].ByteSizePartial()) + self.allocated_[i].OutputPartial(out) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 10: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.add_allocated().TryMerge(tmp) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + cnt=0 + for e in self.allocated_: + elm="" + if printElemNumber: elm="(%d)" % cnt + res+=prefix+("allocated%s <\n" % elm) + res+=e.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + cnt+=1 + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kallocated = 1 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "allocated", + }, 1) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.STRING, + }, 1, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.AllocateIdsResponse' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCithcHBob3N0aW5nLmRhdGFzdG9yZS52NC5BbGxvY2F0ZUlkc1Jlc3BvbnNlExoJYWxsb2NhdGVkIAEoAjALOANKG2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktleaMBqgEFY3R5cGWyAQZwcm90bzKkARTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg==")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + + + +class _DatastoreV4Service_ClientBaseStub(_client_stub_base_class): + """Makes Stubby RPC calls to a DatastoreV4Service server.""" + + __slots__ = ( + '_protorpc_BeginTransaction', '_full_name_BeginTransaction', + '_protorpc_Rollback', '_full_name_Rollback', + '_protorpc_Commit', '_full_name_Commit', + '_protorpc_Write', '_full_name_Write', + '_protorpc_RunQuery', '_full_name_RunQuery', + '_protorpc_ContinueQuery', '_full_name_ContinueQuery', + '_protorpc_Get', '_full_name_Get', + '_protorpc_AllocateIds', '_full_name_AllocateIds', + ) + + def __init__(self, rpc_stub): + self._stub = rpc_stub + + self._protorpc_BeginTransaction = pywraprpc.RPC() + self._full_name_BeginTransaction = self._stub.GetFullMethodName( + 'BeginTransaction') + + self._protorpc_Rollback = pywraprpc.RPC() + self._full_name_Rollback = self._stub.GetFullMethodName( + 'Rollback') + + self._protorpc_Commit = pywraprpc.RPC() + self._full_name_Commit = self._stub.GetFullMethodName( + 'Commit') + + self._protorpc_Write = pywraprpc.RPC() + self._full_name_Write = self._stub.GetFullMethodName( + 'Write') + + self._protorpc_RunQuery = pywraprpc.RPC() + self._full_name_RunQuery = self._stub.GetFullMethodName( + 'RunQuery') + + self._protorpc_ContinueQuery = pywraprpc.RPC() + self._full_name_ContinueQuery = self._stub.GetFullMethodName( + 'ContinueQuery') + + self._protorpc_Get = pywraprpc.RPC() + self._full_name_Get = self._stub.GetFullMethodName( + 'Get') + + self._protorpc_AllocateIds = pywraprpc.RPC() + self._full_name_AllocateIds = self._stub.GetFullMethodName( + 'AllocateIds') + + def BeginTransaction(self, request, rpc=None, callback=None, response=None): + """Make a BeginTransaction RPC call. + + Args: + request: a BeginTransactionRequest instance. + rpc: Optional RPC instance to use for the call. + callback: Optional final callback. Will be called as + callback(rpc, result) when the rpc completes. If None, the + call is synchronous. + response: Optional ProtocolMessage to be filled in with response. + + Returns: + The BeginTransactionResponse if callback is None. Otherwise, returns None. + """ + + if response is None: + response = BeginTransactionResponse + return self._MakeCall(rpc, + self._full_name_BeginTransaction, + 'BeginTransaction', + request, + response, + callback, + self._protorpc_BeginTransaction) + + def Rollback(self, request, rpc=None, callback=None, response=None): + """Make a Rollback RPC call. + + Args: + request: a RollbackRequest instance. + rpc: Optional RPC instance to use for the call. + callback: Optional final callback. Will be called as + callback(rpc, result) when the rpc completes. If None, the + call is synchronous. + response: Optional ProtocolMessage to be filled in with response. + + Returns: + The RollbackResponse if callback is None. Otherwise, returns None. + """ + + if response is None: + response = RollbackResponse + return self._MakeCall(rpc, + self._full_name_Rollback, + 'Rollback', + request, + response, + callback, + self._protorpc_Rollback) + + def Commit(self, request, rpc=None, callback=None, response=None): + """Make a Commit RPC call. + + Args: + request: a CommitRequest instance. + rpc: Optional RPC instance to use for the call. + callback: Optional final callback. Will be called as + callback(rpc, result) when the rpc completes. If None, the + call is synchronous. + response: Optional ProtocolMessage to be filled in with response. + + Returns: + The CommitResponse if callback is None. Otherwise, returns None. + """ + + if response is None: + response = CommitResponse + return self._MakeCall(rpc, + self._full_name_Commit, + 'Commit', + request, + response, + callback, + self._protorpc_Commit) + + def Write(self, request, rpc=None, callback=None, response=None): + """Make a Write RPC call. + + Args: + request: a WriteRequest instance. + rpc: Optional RPC instance to use for the call. + callback: Optional final callback. Will be called as + callback(rpc, result) when the rpc completes. If None, the + call is synchronous. + response: Optional ProtocolMessage to be filled in with response. + + Returns: + The WriteResponse if callback is None. Otherwise, returns None. + """ + + if response is None: + response = WriteResponse + return self._MakeCall(rpc, + self._full_name_Write, + 'Write', + request, + response, + callback, + self._protorpc_Write) + + def RunQuery(self, request, rpc=None, callback=None, response=None): + """Make a RunQuery RPC call. + + Args: + request: a RunQueryRequest instance. + rpc: Optional RPC instance to use for the call. + callback: Optional final callback. Will be called as + callback(rpc, result) when the rpc completes. If None, the + call is synchronous. + response: Optional ProtocolMessage to be filled in with response. + + Returns: + The RunQueryResponse if callback is None. Otherwise, returns None. + """ + + if response is None: + response = RunQueryResponse + return self._MakeCall(rpc, + self._full_name_RunQuery, + 'RunQuery', + request, + response, + callback, + self._protorpc_RunQuery) + + def ContinueQuery(self, request, rpc=None, callback=None, response=None): + """Make a ContinueQuery RPC call. + + Args: + request: a ContinueQueryRequest instance. + rpc: Optional RPC instance to use for the call. + callback: Optional final callback. Will be called as + callback(rpc, result) when the rpc completes. If None, the + call is synchronous. + response: Optional ProtocolMessage to be filled in with response. + + Returns: + The ContinueQueryResponse if callback is None. Otherwise, returns None. + """ + + if response is None: + response = ContinueQueryResponse + return self._MakeCall(rpc, + self._full_name_ContinueQuery, + 'ContinueQuery', + request, + response, + callback, + self._protorpc_ContinueQuery) + + def Get(self, request, rpc=None, callback=None, response=None): + """Make a Get RPC call. + + Args: + request: a GetRequest instance. + rpc: Optional RPC instance to use for the call. + callback: Optional final callback. Will be called as + callback(rpc, result) when the rpc completes. If None, the + call is synchronous. + response: Optional ProtocolMessage to be filled in with response. + + Returns: + The GetResponse if callback is None. Otherwise, returns None. + """ + + if response is None: + response = GetResponse + return self._MakeCall(rpc, + self._full_name_Get, + 'Get', + request, + response, + callback, + self._protorpc_Get) + + def AllocateIds(self, request, rpc=None, callback=None, response=None): + """Make a AllocateIds RPC call. + + Args: + request: a AllocateIdsRequest instance. + rpc: Optional RPC instance to use for the call. + callback: Optional final callback. Will be called as + callback(rpc, result) when the rpc completes. If None, the + call is synchronous. + response: Optional ProtocolMessage to be filled in with response. + + Returns: + The AllocateIdsResponse if callback is None. Otherwise, returns None. + """ + + if response is None: + response = AllocateIdsResponse + return self._MakeCall(rpc, + self._full_name_AllocateIds, + 'AllocateIds', + request, + response, + callback, + self._protorpc_AllocateIds) + + +class _DatastoreV4Service_ClientStub(_DatastoreV4Service_ClientBaseStub): + __slots__ = ('_params',) + def __init__(self, rpc_stub_parameters, service_name): + if service_name is None: + service_name = 'DatastoreV4Service' + _DatastoreV4Service_ClientBaseStub.__init__(self, pywraprpc.RPC_GenericStub(service_name, rpc_stub_parameters)) + self._params = rpc_stub_parameters + + +class _DatastoreV4Service_RPC2ClientStub(_DatastoreV4Service_ClientBaseStub): + __slots__ = () + def __init__(self, server, channel, service_name): + if service_name is None: + service_name = 'DatastoreV4Service' + if channel is not None: + if channel.version() == 1: + raise RuntimeError('Expecting an RPC2 channel to create the stub') + _DatastoreV4Service_ClientBaseStub.__init__(self, pywraprpc.RPC_GenericStub(service_name, channel)) + elif server is not None: + _DatastoreV4Service_ClientBaseStub.__init__(self, pywraprpc.RPC_GenericStub(service_name, pywraprpc.NewClientChannel(server))) + else: + raise RuntimeError('Invalid argument combination to create a stub') + + +class DatastoreV4Service(_server_stub_base_class): + """Base class for DatastoreV4Service Stubby servers.""" + + @classmethod + def _MethodSignatures(cls): + return { + 'BeginTransaction': (BeginTransactionRequest, BeginTransactionResponse), + 'Rollback': (RollbackRequest, RollbackResponse), + 'Commit': (CommitRequest, CommitResponse), + 'Write': (WriteRequest, WriteResponse), + 'RunQuery': (RunQueryRequest, RunQueryResponse), + 'ContinueQuery': (ContinueQueryRequest, ContinueQueryResponse), + 'Get': (GetRequest, GetResponse), + 'AllocateIds': (AllocateIdsRequest, AllocateIdsResponse), + } + + def __init__(self, *args, **kwargs): + """Creates a Stubby RPC server. + + See BaseRpcServer.__init__ in rpcserver.py for detail on arguments. + """ + if _server_stub_base_class is object: + raise NotImplementedError('Add //net/rpc/python:rpcserver as a ' + 'dependency for Stubby server support.') + _server_stub_base_class.__init__(self, 'apphosting.datastore.v4.DatastoreV4Service', *args, **kwargs) + + @staticmethod + def NewStub(rpc_stub_parameters, service_name=None): + """Creates a new DatastoreV4Service Stubby client stub. + + Args: + rpc_stub_parameters: an RPC_StubParameter instance. + service_name: the service name used by the Stubby server. + """ + + if _client_stub_base_class is object: + raise RuntimeError('Add //net/rpc/python as a dependency to use Stubby') + return _DatastoreV4Service_ClientStub(rpc_stub_parameters, service_name) + + @staticmethod + def NewRPC2Stub(server=None, channel=None, service_name=None): + """Creates a new DatastoreV4Service Stubby2 client stub. + + Args: + server: host:port or bns address. + channel: directly use a channel to create a stub. Will ignore server + argument if this is specified. + service_name: the service name used by the Stubby server. + """ + + if _client_stub_base_class is object: + raise RuntimeError('Add //net/rpc/python as a dependency to use Stubby') + return _DatastoreV4Service_RPC2ClientStub(server, channel, service_name) + + def BeginTransaction(self, rpc, request, response): + """Handles a BeginTransaction RPC call. You should override this. + + Args: + rpc: a Stubby RPC object + request: a BeginTransactionRequest that contains the client request + response: a BeginTransactionResponse that should be modified to send the response + """ + raise NotImplementedError + + + def Rollback(self, rpc, request, response): + """Handles a Rollback RPC call. You should override this. + + Args: + rpc: a Stubby RPC object + request: a RollbackRequest that contains the client request + response: a RollbackResponse that should be modified to send the response + """ + raise NotImplementedError + + + def Commit(self, rpc, request, response): + """Handles a Commit RPC call. You should override this. + + Args: + rpc: a Stubby RPC object + request: a CommitRequest that contains the client request + response: a CommitResponse that should be modified to send the response + """ + raise NotImplementedError + + + def Write(self, rpc, request, response): + """Handles a Write RPC call. You should override this. + + Args: + rpc: a Stubby RPC object + request: a WriteRequest that contains the client request + response: a WriteResponse that should be modified to send the response + """ + raise NotImplementedError + + + def RunQuery(self, rpc, request, response): + """Handles a RunQuery RPC call. You should override this. + + Args: + rpc: a Stubby RPC object + request: a RunQueryRequest that contains the client request + response: a RunQueryResponse that should be modified to send the response + """ + raise NotImplementedError + + + def ContinueQuery(self, rpc, request, response): + """Handles a ContinueQuery RPC call. You should override this. + + Args: + rpc: a Stubby RPC object + request: a ContinueQueryRequest that contains the client request + response: a ContinueQueryResponse that should be modified to send the response + """ + raise NotImplementedError + + + def Get(self, rpc, request, response): + """Handles a Get RPC call. You should override this. + + Args: + rpc: a Stubby RPC object + request: a GetRequest that contains the client request + response: a GetResponse that should be modified to send the response + """ + raise NotImplementedError + + + def AllocateIds(self, rpc, request, response): + """Handles a AllocateIds RPC call. You should override this. + + Args: + rpc: a Stubby RPC object + request: a AllocateIdsRequest that contains the client request + response: a AllocateIdsResponse that should be modified to send the response + """ + raise NotImplementedError + + def _AddMethodAttributes(self): + """Sets attributes on Python RPC handlers. + + See BaseRpcServer in rpcserver.py for details. + """ + rpcserver._GetHandlerDecorator( + self.BeginTransaction.im_func, + BeginTransactionRequest, + BeginTransactionResponse, + None, + 'none') + rpcserver._GetHandlerDecorator( + self.Rollback.im_func, + RollbackRequest, + RollbackResponse, + None, + 'none') + rpcserver._GetHandlerDecorator( + self.Commit.im_func, + CommitRequest, + CommitResponse, + None, + 'none') + rpcserver._GetHandlerDecorator( + self.Write.im_func, + WriteRequest, + WriteResponse, + None, + 'none') + rpcserver._GetHandlerDecorator( + self.RunQuery.im_func, + RunQueryRequest, + RunQueryResponse, + None, + 'none') + rpcserver._GetHandlerDecorator( + self.ContinueQuery.im_func, + ContinueQueryRequest, + ContinueQueryResponse, + None, + 'none') + rpcserver._GetHandlerDecorator( + self.Get.im_func, + GetRequest, + GetResponse, + None, + 'none') + rpcserver._GetHandlerDecorator( + self.AllocateIds.im_func, + AllocateIdsRequest, + AllocateIdsResponse, + None, + 'none') + +if _extension_runtime: + pass + +__all__ = ['Error','Mutation','MutationResult','EntityResult','Query','KindExpression','PropertyReference','PropertyExpression','PropertyOrder','Filter','CompositeFilter','PropertyFilter','GqlQuery','GqlQueryArg','QueryResultBatch','ReadOptions','GetRequest','GetResponse','RunQueryRequest','RunQueryResponse','ContinueQueryRequest','ContinueQueryResponse','BeginTransactionRequest','BeginTransactionResponse','RollbackRequest','RollbackResponse','CommitRequest','CommitResponse','WriteRequest','WriteResponse','AllocateIdsRequest','AllocateIdsResponse','DatastoreV4Service'] diff --git a/python/google/appengine/datastore/entity_v4_pb.py b/python/google/appengine/datastore/entity_v4_pb.py new file mode 100644 index 00000000..ce2972a6 --- /dev/null +++ b/python/google/appengine/datastore/entity_v4_pb.py @@ -0,0 +1,1529 @@ +#!/usr/bin/env python +# +# Copyright 2007 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + + +from google.net.proto import ProtocolBuffer +import array +import base64 +import dummy_thread as thread +try: + from google3.net.proto import _net_proto___parse__python +except ImportError: + _net_proto___parse__python = None + +__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit + unusednames=printElemNumber,debug_strs no-special""" + +if hasattr(ProtocolBuffer, 'ExtendableProtocolMessage'): + _extension_runtime = True + _ExtendableProtocolMessage = ProtocolBuffer.ExtendableProtocolMessage +else: + _extension_runtime = False + _ExtendableProtocolMessage = ProtocolBuffer.ProtocolMessage + +class PartitionId(ProtocolBuffer.ProtocolMessage): + + + MAX_DIMENSION_TAG = 100 + + _Constants_NAMES = { + 100: "MAX_DIMENSION_TAG", + } + + def Constants_Name(cls, x): return cls._Constants_NAMES.get(x, "") + Constants_Name = classmethod(Constants_Name) + + has_dataset_id_ = 0 + dataset_id_ = "" + has_namespace_ = 0 + namespace_ = "" + + def __init__(self, contents=None): + if contents is not None: self.MergeFromString(contents) + + def dataset_id(self): return self.dataset_id_ + + def set_dataset_id(self, x): + self.has_dataset_id_ = 1 + self.dataset_id_ = x + + def clear_dataset_id(self): + if self.has_dataset_id_: + self.has_dataset_id_ = 0 + self.dataset_id_ = "" + + def has_dataset_id(self): return self.has_dataset_id_ + + def namespace(self): return self.namespace_ + + def set_namespace(self, x): + self.has_namespace_ = 1 + self.namespace_ = x + + def clear_namespace(self): + if self.has_namespace_: + self.has_namespace_ = 0 + self.namespace_ = "" + + def has_namespace(self): return self.has_namespace_ + + + def MergeFrom(self, x): + assert x is not self + if (x.has_dataset_id()): self.set_dataset_id(x.dataset_id()) + if (x.has_namespace()): self.set_namespace(x.namespace()) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.PartitionId', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.PartitionId') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.PartitionId') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.PartitionId', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.PartitionId', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.PartitionId', s) + + + def Equals(self, x): + if x is self: return 1 + if self.has_dataset_id_ != x.has_dataset_id_: return 0 + if self.has_dataset_id_ and self.dataset_id_ != x.dataset_id_: return 0 + if self.has_namespace_ != x.has_namespace_: return 0 + if self.has_namespace_ and self.namespace_ != x.namespace_: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + return initialized + + def ByteSize(self): + n = 0 + if (self.has_dataset_id_): n += 1 + self.lengthString(len(self.dataset_id_)) + if (self.has_namespace_): n += 1 + self.lengthString(len(self.namespace_)) + return n + + def ByteSizePartial(self): + n = 0 + if (self.has_dataset_id_): n += 1 + self.lengthString(len(self.dataset_id_)) + if (self.has_namespace_): n += 1 + self.lengthString(len(self.namespace_)) + return n + + def Clear(self): + self.clear_dataset_id() + self.clear_namespace() + + def OutputUnchecked(self, out): + if (self.has_dataset_id_): + out.putVarInt32(26) + out.putPrefixedString(self.dataset_id_) + if (self.has_namespace_): + out.putVarInt32(34) + out.putPrefixedString(self.namespace_) + + def OutputPartial(self, out): + if (self.has_dataset_id_): + out.putVarInt32(26) + out.putPrefixedString(self.dataset_id_) + if (self.has_namespace_): + out.putVarInt32(34) + out.putPrefixedString(self.namespace_) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 26: + self.set_dataset_id(d.getPrefixedString()) + continue + if tt == 34: + self.set_namespace(d.getPrefixedString()) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_dataset_id_: res+=prefix+("dataset_id: %s\n" % self.DebugFormatString(self.dataset_id_)) + if self.has_namespace_: res+=prefix+("namespace: %s\n" % self.DebugFormatString(self.namespace_)) + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kdataset_id = 3 + knamespace = 4 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 3: "dataset_id", + 4: "namespace", + }, 4) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 3: ProtocolBuffer.Encoder.STRING, + 4: ProtocolBuffer.Encoder.STRING, + }, 4, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.PartitionId' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WiRhcHBob3N0aW5nL2RhdGFzdG9yZS9lbnRpdHlfdjQucHJvdG8KI2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlBhcnRpdGlvbklkExoKZGF0YXNldF9pZCADKAIwCTgBFBMaCW5hbWVzcGFjZSAEKAIwCTgBFHN6CUNvbnN0YW50c4sBkgERTUFYX0RJTUVOU0lPTl9UQUeYAWSMAXS6AYYHCiRhcHBob3N0aW5nL2RhdGFzdG9yZS9lbnRpdHlfdjQucHJvdG8SF2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0IlgKC1BhcnRpdGlvbklkEhIKCmRhdGFzZXRfaWQYAyABKAkSEQoJbmFtZXNwYWNlGAQgASgJIiIKCUNvbnN0YW50cxIVChFNQVhfRElNRU5TSU9OX1RBRxBkIrgBCgNLZXkSOgoMcGFydGl0aW9uX2lkGAEgASgLMiQuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUGFydGl0aW9uSWQSPgoMcGF0aF9lbGVtZW50GAIgAygLMiguYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5LlBhdGhFbGVtZW50GjUKC1BhdGhFbGVtZW50EgwKBGtpbmQYASACKAkSCgoCaWQYAiABKAMSDAoEbmFtZRgDIAEoCSLDAgoFVmFsdWUSFQoNYm9vbGVhbl92YWx1ZRgBIAEoCBIVCg1pbnRlZ2VyX3ZhbHVlGAIgASgDEhQKDGRvdWJsZV92YWx1ZRgDIAEoARIkChx0aW1lc3RhbXBfbWljcm9zZWNvbmRzX3ZhbHVlGAQgASgDEi8KCWtleV92YWx1ZRgFIAEoCzIcLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktleRIWCg5ibG9iX2tleV92YWx1ZRgQIAEoCRIUCgxzdHJpbmdfdmFsdWUYESABKAkSEgoKYmxvYl92YWx1ZRgSIAEoDBI1CgxlbnRpdHlfdmFsdWUYBiABKAsyHy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHkSDwoHbWVhbmluZxgOIAEoBRIVCgdpbmRleGVkGA8gASgIOgR0cnVlIl0KCFByb3BlcnR5EgwKBG5hbWUYASACKAkSFAoFbXVsdGkYAiABKAg6BWZhbHNlEi0KBXZhbHVlGAMgAygLMh4uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuVmFsdWUiaAoGRW50aXR5EikKA2tleRgBIAEoCzIcLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktleRIzCghwcm9wZXJ0eRgCIAMoCzIhLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5QiEKH2NvbS5nb29nbGUuYXBwaG9zdGluZy5kYXRhc3RvcmU=")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class Key_PathElement(ProtocolBuffer.ProtocolMessage): + has_kind_ = 0 + kind_ = "" + has_id_ = 0 + id_ = 0 + has_name_ = 0 + name_ = "" + + def __init__(self, contents=None): + if contents is not None: self.MergeFromString(contents) + + def kind(self): return self.kind_ + + def set_kind(self, x): + self.has_kind_ = 1 + self.kind_ = x + + def clear_kind(self): + if self.has_kind_: + self.has_kind_ = 0 + self.kind_ = "" + + def has_kind(self): return self.has_kind_ + + def id(self): return self.id_ + + def set_id(self, x): + self.has_id_ = 1 + self.id_ = x + + def clear_id(self): + if self.has_id_: + self.has_id_ = 0 + self.id_ = 0 + + def has_id(self): return self.has_id_ + + def name(self): return self.name_ + + def set_name(self, x): + self.has_name_ = 1 + self.name_ = x + + def clear_name(self): + if self.has_name_: + self.has_name_ = 0 + self.name_ = "" + + def has_name(self): return self.has_name_ + + + def MergeFrom(self, x): + assert x is not self + if (x.has_kind()): self.set_kind(x.kind()) + if (x.has_id()): self.set_id(x.id()) + if (x.has_name()): self.set_name(x.name()) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.Key_PathElement', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.Key_PathElement') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.Key_PathElement') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.Key_PathElement', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.Key_PathElement', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.Key_PathElement', s) + + + def Equals(self, x): + if x is self: return 1 + if self.has_kind_ != x.has_kind_: return 0 + if self.has_kind_ and self.kind_ != x.kind_: return 0 + if self.has_id_ != x.has_id_: return 0 + if self.has_id_ and self.id_ != x.id_: return 0 + if self.has_name_ != x.has_name_: return 0 + if self.has_name_ and self.name_ != x.name_: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + if (not self.has_kind_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: kind not set.') + return initialized + + def ByteSize(self): + n = 0 + n += self.lengthString(len(self.kind_)) + if (self.has_id_): n += 1 + self.lengthVarInt64(self.id_) + if (self.has_name_): n += 1 + self.lengthString(len(self.name_)) + return n + 1 + + def ByteSizePartial(self): + n = 0 + if (self.has_kind_): + n += 1 + n += self.lengthString(len(self.kind_)) + if (self.has_id_): n += 1 + self.lengthVarInt64(self.id_) + if (self.has_name_): n += 1 + self.lengthString(len(self.name_)) + return n + + def Clear(self): + self.clear_kind() + self.clear_id() + self.clear_name() + + def OutputUnchecked(self, out): + out.putVarInt32(10) + out.putPrefixedString(self.kind_) + if (self.has_id_): + out.putVarInt32(16) + out.putVarInt64(self.id_) + if (self.has_name_): + out.putVarInt32(26) + out.putPrefixedString(self.name_) + + def OutputPartial(self, out): + if (self.has_kind_): + out.putVarInt32(10) + out.putPrefixedString(self.kind_) + if (self.has_id_): + out.putVarInt32(16) + out.putVarInt64(self.id_) + if (self.has_name_): + out.putVarInt32(26) + out.putPrefixedString(self.name_) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 10: + self.set_kind(d.getPrefixedString()) + continue + if tt == 16: + self.set_id(d.getVarInt64()) + continue + if tt == 26: + self.set_name(d.getPrefixedString()) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_kind_: res+=prefix+("kind: %s\n" % self.DebugFormatString(self.kind_)) + if self.has_id_: res+=prefix+("id: %s\n" % self.DebugFormatInt64(self.id_)) + if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_)) + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kkind = 1 + kid = 2 + kname = 3 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "kind", + 2: "id", + 3: "name", + }, 3) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.STRING, + 2: ProtocolBuffer.Encoder.NUMERIC, + 3: ProtocolBuffer.Encoder.STRING, + }, 3, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.Key_PathElement' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WiRhcHBob3N0aW5nL2RhdGFzdG9yZS9lbnRpdHlfdjQucHJvdG8KJ2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktleV9QYXRoRWxlbWVudBMaBGtpbmQgASgCMAk4AhQTGgJpZCACKAAwAzgBFBMaBG5hbWUgAygCMAk4ARTCASNhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5QYXJ0aXRpb25JZMoBJ2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktleS5QYXRoRWxlbWVudA==")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class Key(ProtocolBuffer.ProtocolMessage): + has_partition_id_ = 0 + partition_id_ = None + + def __init__(self, contents=None): + self.path_element_ = [] + self.lazy_init_lock_ = thread.allocate_lock() + if contents is not None: self.MergeFromString(contents) + + def partition_id(self): + if self.partition_id_ is None: + self.lazy_init_lock_.acquire() + try: + if self.partition_id_ is None: self.partition_id_ = PartitionId() + finally: + self.lazy_init_lock_.release() + return self.partition_id_ + + def mutable_partition_id(self): self.has_partition_id_ = 1; return self.partition_id() + + def clear_partition_id(self): + + if self.has_partition_id_: + self.has_partition_id_ = 0; + if self.partition_id_ is not None: self.partition_id_.Clear() + + def has_partition_id(self): return self.has_partition_id_ + + def path_element_size(self): return len(self.path_element_) + def path_element_list(self): return self.path_element_ + + def path_element(self, i): + return self.path_element_[i] + + def mutable_path_element(self, i): + return self.path_element_[i] + + def add_path_element(self): + x = Key_PathElement() + self.path_element_.append(x) + return x + + def clear_path_element(self): + self.path_element_ = [] + + def MergeFrom(self, x): + assert x is not self + if (x.has_partition_id()): self.mutable_partition_id().MergeFrom(x.partition_id()) + for i in xrange(x.path_element_size()): self.add_path_element().CopyFrom(x.path_element(i)) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.Key', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.Key') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.Key') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.Key', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.Key', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.Key', s) + + + def Equals(self, x): + if x is self: return 1 + if self.has_partition_id_ != x.has_partition_id_: return 0 + if self.has_partition_id_ and self.partition_id_ != x.partition_id_: return 0 + if len(self.path_element_) != len(x.path_element_): return 0 + for e1, e2 in zip(self.path_element_, x.path_element_): + if e1 != e2: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + if (self.has_partition_id_ and not self.partition_id_.IsInitialized(debug_strs)): initialized = 0 + for p in self.path_element_: + if not p.IsInitialized(debug_strs): initialized=0 + return initialized + + def ByteSize(self): + n = 0 + if (self.has_partition_id_): n += 1 + self.lengthString(self.partition_id_.ByteSize()) + n += 1 * len(self.path_element_) + for i in xrange(len(self.path_element_)): n += self.lengthString(self.path_element_[i].ByteSize()) + return n + + def ByteSizePartial(self): + n = 0 + if (self.has_partition_id_): n += 1 + self.lengthString(self.partition_id_.ByteSizePartial()) + n += 1 * len(self.path_element_) + for i in xrange(len(self.path_element_)): n += self.lengthString(self.path_element_[i].ByteSizePartial()) + return n + + def Clear(self): + self.clear_partition_id() + self.clear_path_element() + + def OutputUnchecked(self, out): + if (self.has_partition_id_): + out.putVarInt32(10) + out.putVarInt32(self.partition_id_.ByteSize()) + self.partition_id_.OutputUnchecked(out) + for i in xrange(len(self.path_element_)): + out.putVarInt32(18) + out.putVarInt32(self.path_element_[i].ByteSize()) + self.path_element_[i].OutputUnchecked(out) + + def OutputPartial(self, out): + if (self.has_partition_id_): + out.putVarInt32(10) + out.putVarInt32(self.partition_id_.ByteSizePartial()) + self.partition_id_.OutputPartial(out) + for i in xrange(len(self.path_element_)): + out.putVarInt32(18) + out.putVarInt32(self.path_element_[i].ByteSizePartial()) + self.path_element_[i].OutputPartial(out) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 10: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.mutable_partition_id().TryMerge(tmp) + continue + if tt == 18: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.add_path_element().TryMerge(tmp) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_partition_id_: + res+=prefix+"partition_id <\n" + res+=self.partition_id_.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + cnt=0 + for e in self.path_element_: + elm="" + if printElemNumber: elm="(%d)" % cnt + res+=prefix+("path_element%s <\n" % elm) + res+=e.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + cnt+=1 + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kpartition_id = 1 + kpath_element = 2 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "partition_id", + 2: "path_element", + }, 2) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.STRING, + 2: ProtocolBuffer.Encoder.STRING, + }, 2, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.Key' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WiRhcHBob3N0aW5nL2RhdGFzdG9yZS9lbnRpdHlfdjQucHJvdG8KG2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktleRMaDHBhcnRpdGlvbl9pZCABKAIwCzgBSiNhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5QYXJ0aXRpb25JZKMBqgEFY3R5cGWyAQZwcm90bzKkARQTGgxwYXRoX2VsZW1lbnQgAigCMAs4A0onYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5X1BhdGhFbGVtZW50owGqAQVjdHlwZbIBBnByb3RvMqQBFMIBI2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlBhcnRpdGlvbklk")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class Value(ProtocolBuffer.ProtocolMessage): + has_boolean_value_ = 0 + boolean_value_ = 0 + has_integer_value_ = 0 + integer_value_ = 0 + has_double_value_ = 0 + double_value_ = 0.0 + has_timestamp_microseconds_value_ = 0 + timestamp_microseconds_value_ = 0 + has_key_value_ = 0 + key_value_ = None + has_blob_key_value_ = 0 + blob_key_value_ = "" + has_string_value_ = 0 + string_value_ = "" + has_blob_value_ = 0 + blob_value_ = "" + has_entity_value_ = 0 + entity_value_ = None + has_meaning_ = 0 + meaning_ = 0 + has_indexed_ = 0 + indexed_ = 1 + + def __init__(self, contents=None): + self.lazy_init_lock_ = thread.allocate_lock() + if contents is not None: self.MergeFromString(contents) + + def boolean_value(self): return self.boolean_value_ + + def set_boolean_value(self, x): + self.has_boolean_value_ = 1 + self.boolean_value_ = x + + def clear_boolean_value(self): + if self.has_boolean_value_: + self.has_boolean_value_ = 0 + self.boolean_value_ = 0 + + def has_boolean_value(self): return self.has_boolean_value_ + + def integer_value(self): return self.integer_value_ + + def set_integer_value(self, x): + self.has_integer_value_ = 1 + self.integer_value_ = x + + def clear_integer_value(self): + if self.has_integer_value_: + self.has_integer_value_ = 0 + self.integer_value_ = 0 + + def has_integer_value(self): return self.has_integer_value_ + + def double_value(self): return self.double_value_ + + def set_double_value(self, x): + self.has_double_value_ = 1 + self.double_value_ = x + + def clear_double_value(self): + if self.has_double_value_: + self.has_double_value_ = 0 + self.double_value_ = 0.0 + + def has_double_value(self): return self.has_double_value_ + + def timestamp_microseconds_value(self): return self.timestamp_microseconds_value_ + + def set_timestamp_microseconds_value(self, x): + self.has_timestamp_microseconds_value_ = 1 + self.timestamp_microseconds_value_ = x + + def clear_timestamp_microseconds_value(self): + if self.has_timestamp_microseconds_value_: + self.has_timestamp_microseconds_value_ = 0 + self.timestamp_microseconds_value_ = 0 + + def has_timestamp_microseconds_value(self): return self.has_timestamp_microseconds_value_ + + def key_value(self): + if self.key_value_ is None: + self.lazy_init_lock_.acquire() + try: + if self.key_value_ is None: self.key_value_ = Key() + finally: + self.lazy_init_lock_.release() + return self.key_value_ + + def mutable_key_value(self): self.has_key_value_ = 1; return self.key_value() + + def clear_key_value(self): + + if self.has_key_value_: + self.has_key_value_ = 0; + if self.key_value_ is not None: self.key_value_.Clear() + + def has_key_value(self): return self.has_key_value_ + + def blob_key_value(self): return self.blob_key_value_ + + def set_blob_key_value(self, x): + self.has_blob_key_value_ = 1 + self.blob_key_value_ = x + + def clear_blob_key_value(self): + if self.has_blob_key_value_: + self.has_blob_key_value_ = 0 + self.blob_key_value_ = "" + + def has_blob_key_value(self): return self.has_blob_key_value_ + + def string_value(self): return self.string_value_ + + def set_string_value(self, x): + self.has_string_value_ = 1 + self.string_value_ = x + + def clear_string_value(self): + if self.has_string_value_: + self.has_string_value_ = 0 + self.string_value_ = "" + + def has_string_value(self): return self.has_string_value_ + + def blob_value(self): return self.blob_value_ + + def set_blob_value(self, x): + self.has_blob_value_ = 1 + self.blob_value_ = x + + def clear_blob_value(self): + if self.has_blob_value_: + self.has_blob_value_ = 0 + self.blob_value_ = "" + + def has_blob_value(self): return self.has_blob_value_ + + def entity_value(self): + if self.entity_value_ is None: + self.lazy_init_lock_.acquire() + try: + if self.entity_value_ is None: self.entity_value_ = Entity() + finally: + self.lazy_init_lock_.release() + return self.entity_value_ + + def mutable_entity_value(self): self.has_entity_value_ = 1; return self.entity_value() + + def clear_entity_value(self): + + if self.has_entity_value_: + self.has_entity_value_ = 0; + if self.entity_value_ is not None: self.entity_value_.Clear() + + def has_entity_value(self): return self.has_entity_value_ + + def meaning(self): return self.meaning_ + + def set_meaning(self, x): + self.has_meaning_ = 1 + self.meaning_ = x + + def clear_meaning(self): + if self.has_meaning_: + self.has_meaning_ = 0 + self.meaning_ = 0 + + def has_meaning(self): return self.has_meaning_ + + def indexed(self): return self.indexed_ + + def set_indexed(self, x): + self.has_indexed_ = 1 + self.indexed_ = x + + def clear_indexed(self): + if self.has_indexed_: + self.has_indexed_ = 0 + self.indexed_ = 1 + + def has_indexed(self): return self.has_indexed_ + + + def MergeFrom(self, x): + assert x is not self + if (x.has_boolean_value()): self.set_boolean_value(x.boolean_value()) + if (x.has_integer_value()): self.set_integer_value(x.integer_value()) + if (x.has_double_value()): self.set_double_value(x.double_value()) + if (x.has_timestamp_microseconds_value()): self.set_timestamp_microseconds_value(x.timestamp_microseconds_value()) + if (x.has_key_value()): self.mutable_key_value().MergeFrom(x.key_value()) + if (x.has_blob_key_value()): self.set_blob_key_value(x.blob_key_value()) + if (x.has_string_value()): self.set_string_value(x.string_value()) + if (x.has_blob_value()): self.set_blob_value(x.blob_value()) + if (x.has_entity_value()): self.mutable_entity_value().MergeFrom(x.entity_value()) + if (x.has_meaning()): self.set_meaning(x.meaning()) + if (x.has_indexed()): self.set_indexed(x.indexed()) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.Value', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.Value') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.Value') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.Value', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.Value', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.Value', s) + + + def Equals(self, x): + if x is self: return 1 + if self.has_boolean_value_ != x.has_boolean_value_: return 0 + if self.has_boolean_value_ and self.boolean_value_ != x.boolean_value_: return 0 + if self.has_integer_value_ != x.has_integer_value_: return 0 + if self.has_integer_value_ and self.integer_value_ != x.integer_value_: return 0 + if self.has_double_value_ != x.has_double_value_: return 0 + if self.has_double_value_ and self.double_value_ != x.double_value_: return 0 + if self.has_timestamp_microseconds_value_ != x.has_timestamp_microseconds_value_: return 0 + if self.has_timestamp_microseconds_value_ and self.timestamp_microseconds_value_ != x.timestamp_microseconds_value_: return 0 + if self.has_key_value_ != x.has_key_value_: return 0 + if self.has_key_value_ and self.key_value_ != x.key_value_: return 0 + if self.has_blob_key_value_ != x.has_blob_key_value_: return 0 + if self.has_blob_key_value_ and self.blob_key_value_ != x.blob_key_value_: return 0 + if self.has_string_value_ != x.has_string_value_: return 0 + if self.has_string_value_ and self.string_value_ != x.string_value_: return 0 + if self.has_blob_value_ != x.has_blob_value_: return 0 + if self.has_blob_value_ and self.blob_value_ != x.blob_value_: return 0 + if self.has_entity_value_ != x.has_entity_value_: return 0 + if self.has_entity_value_ and self.entity_value_ != x.entity_value_: return 0 + if self.has_meaning_ != x.has_meaning_: return 0 + if self.has_meaning_ and self.meaning_ != x.meaning_: return 0 + if self.has_indexed_ != x.has_indexed_: return 0 + if self.has_indexed_ and self.indexed_ != x.indexed_: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + if (self.has_key_value_ and not self.key_value_.IsInitialized(debug_strs)): initialized = 0 + if (self.has_entity_value_ and not self.entity_value_.IsInitialized(debug_strs)): initialized = 0 + return initialized + + def ByteSize(self): + n = 0 + if (self.has_boolean_value_): n += 2 + if (self.has_integer_value_): n += 1 + self.lengthVarInt64(self.integer_value_) + if (self.has_double_value_): n += 9 + if (self.has_timestamp_microseconds_value_): n += 1 + self.lengthVarInt64(self.timestamp_microseconds_value_) + if (self.has_key_value_): n += 1 + self.lengthString(self.key_value_.ByteSize()) + if (self.has_blob_key_value_): n += 2 + self.lengthString(len(self.blob_key_value_)) + if (self.has_string_value_): n += 2 + self.lengthString(len(self.string_value_)) + if (self.has_blob_value_): n += 2 + self.lengthString(len(self.blob_value_)) + if (self.has_entity_value_): n += 1 + self.lengthString(self.entity_value_.ByteSize()) + if (self.has_meaning_): n += 1 + self.lengthVarInt64(self.meaning_) + if (self.has_indexed_): n += 2 + return n + + def ByteSizePartial(self): + n = 0 + if (self.has_boolean_value_): n += 2 + if (self.has_integer_value_): n += 1 + self.lengthVarInt64(self.integer_value_) + if (self.has_double_value_): n += 9 + if (self.has_timestamp_microseconds_value_): n += 1 + self.lengthVarInt64(self.timestamp_microseconds_value_) + if (self.has_key_value_): n += 1 + self.lengthString(self.key_value_.ByteSizePartial()) + if (self.has_blob_key_value_): n += 2 + self.lengthString(len(self.blob_key_value_)) + if (self.has_string_value_): n += 2 + self.lengthString(len(self.string_value_)) + if (self.has_blob_value_): n += 2 + self.lengthString(len(self.blob_value_)) + if (self.has_entity_value_): n += 1 + self.lengthString(self.entity_value_.ByteSizePartial()) + if (self.has_meaning_): n += 1 + self.lengthVarInt64(self.meaning_) + if (self.has_indexed_): n += 2 + return n + + def Clear(self): + self.clear_boolean_value() + self.clear_integer_value() + self.clear_double_value() + self.clear_timestamp_microseconds_value() + self.clear_key_value() + self.clear_blob_key_value() + self.clear_string_value() + self.clear_blob_value() + self.clear_entity_value() + self.clear_meaning() + self.clear_indexed() + + def OutputUnchecked(self, out): + if (self.has_boolean_value_): + out.putVarInt32(8) + out.putBoolean(self.boolean_value_) + if (self.has_integer_value_): + out.putVarInt32(16) + out.putVarInt64(self.integer_value_) + if (self.has_double_value_): + out.putVarInt32(25) + out.putDouble(self.double_value_) + if (self.has_timestamp_microseconds_value_): + out.putVarInt32(32) + out.putVarInt64(self.timestamp_microseconds_value_) + if (self.has_key_value_): + out.putVarInt32(42) + out.putVarInt32(self.key_value_.ByteSize()) + self.key_value_.OutputUnchecked(out) + if (self.has_entity_value_): + out.putVarInt32(50) + out.putVarInt32(self.entity_value_.ByteSize()) + self.entity_value_.OutputUnchecked(out) + if (self.has_meaning_): + out.putVarInt32(112) + out.putVarInt32(self.meaning_) + if (self.has_indexed_): + out.putVarInt32(120) + out.putBoolean(self.indexed_) + if (self.has_blob_key_value_): + out.putVarInt32(130) + out.putPrefixedString(self.blob_key_value_) + if (self.has_string_value_): + out.putVarInt32(138) + out.putPrefixedString(self.string_value_) + if (self.has_blob_value_): + out.putVarInt32(146) + out.putPrefixedString(self.blob_value_) + + def OutputPartial(self, out): + if (self.has_boolean_value_): + out.putVarInt32(8) + out.putBoolean(self.boolean_value_) + if (self.has_integer_value_): + out.putVarInt32(16) + out.putVarInt64(self.integer_value_) + if (self.has_double_value_): + out.putVarInt32(25) + out.putDouble(self.double_value_) + if (self.has_timestamp_microseconds_value_): + out.putVarInt32(32) + out.putVarInt64(self.timestamp_microseconds_value_) + if (self.has_key_value_): + out.putVarInt32(42) + out.putVarInt32(self.key_value_.ByteSizePartial()) + self.key_value_.OutputPartial(out) + if (self.has_entity_value_): + out.putVarInt32(50) + out.putVarInt32(self.entity_value_.ByteSizePartial()) + self.entity_value_.OutputPartial(out) + if (self.has_meaning_): + out.putVarInt32(112) + out.putVarInt32(self.meaning_) + if (self.has_indexed_): + out.putVarInt32(120) + out.putBoolean(self.indexed_) + if (self.has_blob_key_value_): + out.putVarInt32(130) + out.putPrefixedString(self.blob_key_value_) + if (self.has_string_value_): + out.putVarInt32(138) + out.putPrefixedString(self.string_value_) + if (self.has_blob_value_): + out.putVarInt32(146) + out.putPrefixedString(self.blob_value_) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 8: + self.set_boolean_value(d.getBoolean()) + continue + if tt == 16: + self.set_integer_value(d.getVarInt64()) + continue + if tt == 25: + self.set_double_value(d.getDouble()) + continue + if tt == 32: + self.set_timestamp_microseconds_value(d.getVarInt64()) + continue + if tt == 42: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.mutable_key_value().TryMerge(tmp) + continue + if tt == 50: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.mutable_entity_value().TryMerge(tmp) + continue + if tt == 112: + self.set_meaning(d.getVarInt32()) + continue + if tt == 120: + self.set_indexed(d.getBoolean()) + continue + if tt == 130: + self.set_blob_key_value(d.getPrefixedString()) + continue + if tt == 138: + self.set_string_value(d.getPrefixedString()) + continue + if tt == 146: + self.set_blob_value(d.getPrefixedString()) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_boolean_value_: res+=prefix+("boolean_value: %s\n" % self.DebugFormatBool(self.boolean_value_)) + if self.has_integer_value_: res+=prefix+("integer_value: %s\n" % self.DebugFormatInt64(self.integer_value_)) + if self.has_double_value_: res+=prefix+("double_value: %s\n" % self.DebugFormat(self.double_value_)) + if self.has_timestamp_microseconds_value_: res+=prefix+("timestamp_microseconds_value: %s\n" % self.DebugFormatInt64(self.timestamp_microseconds_value_)) + if self.has_key_value_: + res+=prefix+"key_value <\n" + res+=self.key_value_.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + if self.has_blob_key_value_: res+=prefix+("blob_key_value: %s\n" % self.DebugFormatString(self.blob_key_value_)) + if self.has_string_value_: res+=prefix+("string_value: %s\n" % self.DebugFormatString(self.string_value_)) + if self.has_blob_value_: res+=prefix+("blob_value: %s\n" % self.DebugFormatString(self.blob_value_)) + if self.has_entity_value_: + res+=prefix+"entity_value <\n" + res+=self.entity_value_.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + if self.has_meaning_: res+=prefix+("meaning: %s\n" % self.DebugFormatInt32(self.meaning_)) + if self.has_indexed_: res+=prefix+("indexed: %s\n" % self.DebugFormatBool(self.indexed_)) + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kboolean_value = 1 + kinteger_value = 2 + kdouble_value = 3 + ktimestamp_microseconds_value = 4 + kkey_value = 5 + kblob_key_value = 16 + kstring_value = 17 + kblob_value = 18 + kentity_value = 6 + kmeaning = 14 + kindexed = 15 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "boolean_value", + 2: "integer_value", + 3: "double_value", + 4: "timestamp_microseconds_value", + 5: "key_value", + 6: "entity_value", + 14: "meaning", + 15: "indexed", + 16: "blob_key_value", + 17: "string_value", + 18: "blob_value", + }, 18) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.NUMERIC, + 2: ProtocolBuffer.Encoder.NUMERIC, + 3: ProtocolBuffer.Encoder.DOUBLE, + 4: ProtocolBuffer.Encoder.NUMERIC, + 5: ProtocolBuffer.Encoder.STRING, + 6: ProtocolBuffer.Encoder.STRING, + 14: ProtocolBuffer.Encoder.NUMERIC, + 15: ProtocolBuffer.Encoder.NUMERIC, + 16: ProtocolBuffer.Encoder.STRING, + 17: ProtocolBuffer.Encoder.STRING, + 18: ProtocolBuffer.Encoder.STRING, + }, 18, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.Value' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WiRhcHBob3N0aW5nL2RhdGFzdG9yZS9lbnRpdHlfdjQucHJvdG8KHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlZhbHVlExoNYm9vbGVhbl92YWx1ZSABKAAwCDgBFBMaDWludGVnZXJfdmFsdWUgAigAMAM4ARQTGgxkb3VibGVfdmFsdWUgAygBMAE4ARQTGhx0aW1lc3RhbXBfbWljcm9zZWNvbmRzX3ZhbHVlIAQoADADOAEUExoJa2V5X3ZhbHVlIAUoAjALOAFKG2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktleaMBqgEFY3R5cGWyAQZwcm90bzKkARQTGg5ibG9iX2tleV92YWx1ZSAQKAIwCTgBFBMaDHN0cmluZ192YWx1ZSARKAIwCTgBFBMaCmJsb2JfdmFsdWUgEigCMAk4ARQTGgxlbnRpdHlfdmFsdWUgBigCMAs4AUoeYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5owGqAQVjdHlwZbIBBnByb3RvMqQBFBMaB21lYW5pbmcgDigAMAU4ARQTGgdpbmRleGVkIA8oADAIOAFCBHRydWWjAaoBB2RlZmF1bHSyAQR0cnVlpAEUwgEjYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUGFydGl0aW9uSWQ=")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class Property(ProtocolBuffer.ProtocolMessage): + has_name_ = 0 + name_ = "" + has_multi_ = 0 + multi_ = 0 + + def __init__(self, contents=None): + self.value_ = [] + if contents is not None: self.MergeFromString(contents) + + def name(self): return self.name_ + + def set_name(self, x): + self.has_name_ = 1 + self.name_ = x + + def clear_name(self): + if self.has_name_: + self.has_name_ = 0 + self.name_ = "" + + def has_name(self): return self.has_name_ + + def multi(self): return self.multi_ + + def set_multi(self, x): + self.has_multi_ = 1 + self.multi_ = x + + def clear_multi(self): + if self.has_multi_: + self.has_multi_ = 0 + self.multi_ = 0 + + def has_multi(self): return self.has_multi_ + + def value_size(self): return len(self.value_) + def value_list(self): return self.value_ + + def value(self, i): + return self.value_[i] + + def mutable_value(self, i): + return self.value_[i] + + def add_value(self): + x = Value() + self.value_.append(x) + return x + + def clear_value(self): + self.value_ = [] + + def MergeFrom(self, x): + assert x is not self + if (x.has_name()): self.set_name(x.name()) + if (x.has_multi()): self.set_multi(x.multi()) + for i in xrange(x.value_size()): self.add_value().CopyFrom(x.value(i)) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.Property', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.Property') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.Property') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.Property', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.Property', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.Property', s) + + + def Equals(self, x): + if x is self: return 1 + if self.has_name_ != x.has_name_: return 0 + if self.has_name_ and self.name_ != x.name_: return 0 + if self.has_multi_ != x.has_multi_: return 0 + if self.has_multi_ and self.multi_ != x.multi_: return 0 + if len(self.value_) != len(x.value_): return 0 + for e1, e2 in zip(self.value_, x.value_): + if e1 != e2: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + if (not self.has_name_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: name not set.') + for p in self.value_: + if not p.IsInitialized(debug_strs): initialized=0 + return initialized + + def ByteSize(self): + n = 0 + n += self.lengthString(len(self.name_)) + if (self.has_multi_): n += 2 + n += 1 * len(self.value_) + for i in xrange(len(self.value_)): n += self.lengthString(self.value_[i].ByteSize()) + return n + 1 + + def ByteSizePartial(self): + n = 0 + if (self.has_name_): + n += 1 + n += self.lengthString(len(self.name_)) + if (self.has_multi_): n += 2 + n += 1 * len(self.value_) + for i in xrange(len(self.value_)): n += self.lengthString(self.value_[i].ByteSizePartial()) + return n + + def Clear(self): + self.clear_name() + self.clear_multi() + self.clear_value() + + def OutputUnchecked(self, out): + out.putVarInt32(10) + out.putPrefixedString(self.name_) + if (self.has_multi_): + out.putVarInt32(16) + out.putBoolean(self.multi_) + for i in xrange(len(self.value_)): + out.putVarInt32(26) + out.putVarInt32(self.value_[i].ByteSize()) + self.value_[i].OutputUnchecked(out) + + def OutputPartial(self, out): + if (self.has_name_): + out.putVarInt32(10) + out.putPrefixedString(self.name_) + if (self.has_multi_): + out.putVarInt32(16) + out.putBoolean(self.multi_) + for i in xrange(len(self.value_)): + out.putVarInt32(26) + out.putVarInt32(self.value_[i].ByteSizePartial()) + self.value_[i].OutputPartial(out) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 10: + self.set_name(d.getPrefixedString()) + continue + if tt == 16: + self.set_multi(d.getBoolean()) + continue + if tt == 26: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.add_value().TryMerge(tmp) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_)) + if self.has_multi_: res+=prefix+("multi: %s\n" % self.DebugFormatBool(self.multi_)) + cnt=0 + for e in self.value_: + elm="" + if printElemNumber: elm="(%d)" % cnt + res+=prefix+("value%s <\n" % elm) + res+=e.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + cnt+=1 + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kname = 1 + kmulti = 2 + kvalue = 3 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "name", + 2: "multi", + 3: "value", + }, 3) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.STRING, + 2: ProtocolBuffer.Encoder.NUMERIC, + 3: ProtocolBuffer.Encoder.STRING, + }, 3, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.Property' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WiRhcHBob3N0aW5nL2RhdGFzdG9yZS9lbnRpdHlfdjQucHJvdG8KIGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5ExoEbmFtZSABKAIwCTgCFBMaBW11bHRpIAIoADAIOAFCBWZhbHNlowGqAQdkZWZhdWx0sgEFZmFsc2WkARQTGgV2YWx1ZSADKAIwCzgDSh1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5WYWx1ZaMBqgEFY3R5cGWyAQZwcm90bzKkARTCASNhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5QYXJ0aXRpb25JZA==")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +class Entity(ProtocolBuffer.ProtocolMessage): + has_key_ = 0 + key_ = None + + def __init__(self, contents=None): + self.property_ = [] + self.lazy_init_lock_ = thread.allocate_lock() + if contents is not None: self.MergeFromString(contents) + + def key(self): + if self.key_ is None: + self.lazy_init_lock_.acquire() + try: + if self.key_ is None: self.key_ = Key() + finally: + self.lazy_init_lock_.release() + return self.key_ + + def mutable_key(self): self.has_key_ = 1; return self.key() + + def clear_key(self): + + if self.has_key_: + self.has_key_ = 0; + if self.key_ is not None: self.key_.Clear() + + def has_key(self): return self.has_key_ + + def property_size(self): return len(self.property_) + def property_list(self): return self.property_ + + def property(self, i): + return self.property_[i] + + def mutable_property(self, i): + return self.property_[i] + + def add_property(self): + x = Property() + self.property_.append(x) + return x + + def clear_property(self): + self.property_ = [] + + def MergeFrom(self, x): + assert x is not self + if (x.has_key()): self.mutable_key().MergeFrom(x.key()) + for i in xrange(x.property_size()): self.add_property().CopyFrom(x.property(i)) + + if _net_proto___parse__python is not None: + def _CMergeFromString(self, s): + _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.Entity', s) + + if _net_proto___parse__python is not None: + def _CEncode(self): + return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.Entity') + + if _net_proto___parse__python is not None: + def _CEncodePartial(self): + return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.Entity') + + if _net_proto___parse__python is not None: + def _CToASCII(self, output_format): + return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.Entity', output_format) + + + if _net_proto___parse__python is not None: + def ParseASCII(self, s): + _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.Entity', s) + + + if _net_proto___parse__python is not None: + def ParseASCIIIgnoreUnknown(self, s): + _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.Entity', s) + + + def Equals(self, x): + if x is self: return 1 + if self.has_key_ != x.has_key_: return 0 + if self.has_key_ and self.key_ != x.key_: return 0 + if len(self.property_) != len(x.property_): return 0 + for e1, e2 in zip(self.property_, x.property_): + if e1 != e2: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + if (self.has_key_ and not self.key_.IsInitialized(debug_strs)): initialized = 0 + for p in self.property_: + if not p.IsInitialized(debug_strs): initialized=0 + return initialized + + def ByteSize(self): + n = 0 + if (self.has_key_): n += 1 + self.lengthString(self.key_.ByteSize()) + n += 1 * len(self.property_) + for i in xrange(len(self.property_)): n += self.lengthString(self.property_[i].ByteSize()) + return n + + def ByteSizePartial(self): + n = 0 + if (self.has_key_): n += 1 + self.lengthString(self.key_.ByteSizePartial()) + n += 1 * len(self.property_) + for i in xrange(len(self.property_)): n += self.lengthString(self.property_[i].ByteSizePartial()) + return n + + def Clear(self): + self.clear_key() + self.clear_property() + + def OutputUnchecked(self, out): + if (self.has_key_): + out.putVarInt32(10) + out.putVarInt32(self.key_.ByteSize()) + self.key_.OutputUnchecked(out) + for i in xrange(len(self.property_)): + out.putVarInt32(18) + out.putVarInt32(self.property_[i].ByteSize()) + self.property_[i].OutputUnchecked(out) + + def OutputPartial(self, out): + if (self.has_key_): + out.putVarInt32(10) + out.putVarInt32(self.key_.ByteSizePartial()) + self.key_.OutputPartial(out) + for i in xrange(len(self.property_)): + out.putVarInt32(18) + out.putVarInt32(self.property_[i].ByteSizePartial()) + self.property_[i].OutputPartial(out) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 10: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.mutable_key().TryMerge(tmp) + continue + if tt == 18: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.add_property().TryMerge(tmp) + continue + + + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_key_: + res+=prefix+"key <\n" + res+=self.key_.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + cnt=0 + for e in self.property_: + elm="" + if printElemNumber: elm="(%d)" % cnt + res+=prefix+("property%s <\n" % elm) + res+=e.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + cnt+=1 + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kkey = 1 + kproperty = 2 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "key", + 2: "property", + }, 2) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.STRING, + 2: ProtocolBuffer.Encoder.STRING, + }, 2, ProtocolBuffer.Encoder.MAX_TYPE) + + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" + _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.Entity' + _SERIALIZED_DESCRIPTOR = array.array('B') + _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WiRhcHBob3N0aW5nL2RhdGFzdG9yZS9lbnRpdHlfdjQucHJvdG8KHmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eRMaA2tleSABKAIwCzgBShthcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXmjAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoIcHJvcGVydHkgAigCMAs4A0ogYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHmjAaoBBWN0eXBlsgEGcHJvdG8ypAEUwgEjYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUGFydGl0aW9uSWQ=")) + if _net_proto___parse__python is not None: + _net_proto___parse__python.RegisterType( + _SERIALIZED_DESCRIPTOR.tostring()) + +if _extension_runtime: + pass + +__all__ = ['PartitionId','Key_PathElement','Key','Value','Property','Entity'] diff --git a/python/google/appengine/ext/analytics/static/analytics_js.js b/python/google/appengine/ext/analytics/static/analytics_js.js dissimilarity index 83% index 3e20eaab..b311480c 100644 --- a/python/google/appengine/ext/analytics/static/analytics_js.js +++ b/python/google/appengine/ext/analytics/static/analytics_js.js @@ -1,25 +1,24 @@ -/* Copyright 2008-9 Google Inc. All Rights Reserved. */ (function(){var m=void 0,n=!0,p=null,q=!1,s,u=this,v=function(a){var b=typeof a;if("object"==b)if(a){if(a instanceof Array)return"array";if(a instanceof Object)return b;var c=Object.prototype.toString.call(a);if("[object Window]"==c)return"object";if("[object Array]"==c||"number"==typeof a.length&&"undefined"!=typeof a.splice&&"undefined"!=typeof a.propertyIsEnumerable&&!a.propertyIsEnumerable("splice"))return"array";if("[object Function]"==c||"undefined"!=typeof a.call&&"undefined"!=typeof a.propertyIsEnumerable&& -!a.propertyIsEnumerable("call"))return"function"}else return"null";else if("function"==b&&"undefined"==typeof a.call)return"object";return b},w=function(a){return"string"==typeof a},x="closure_uid_"+(1E9*Math.random()>>>0),y=0,aa=function(a,b){var c=Array.prototype.slice.call(arguments,1);return function(){var b=Array.prototype.slice.call(arguments);b.unshift.apply(b,c);return a.apply(this,b)}},A=function(a,b){var c=a.split("."),d=u;!(c[0]in d)&&d.execScript&&d.execScript("var "+c[0]);for(var e;c.length&& -(e=c.shift());)!c.length&&b!==m?d[e]=b:d=d[e]?d[e]:d[e]={}},B=function(a,b){function c(){}c.prototype=b.prototype;a.o=b.prototype;a.prototype=new c};var C=function(a){Error.captureStackTrace?Error.captureStackTrace(this,C):this.stack=Error().stack||"";a&&(this.message=String(a))};B(C,Error);var ba=function(a,b){for(var c=1;cc?Math.max(0,a.length+c):c;if(w(a))return!w(b)||1!=b.length?-1:a.indexOf(b,c);for(;c=arguments.length?E.slice.call(a,b):E.slice.call(a,b,c)};var G,H,I,J,ia=function(){return u.navigator?u.navigator.userAgent:p};J=I=H=G=q;var K;if(K=ia()){var ja=u.navigator;G=0==K.indexOf("Opera");H=!G&&-1!=K.indexOf("MSIE");I=!G&&-1!=K.indexOf("WebKit");J=!G&&!I&&"Gecko"==ja.product}var ka=G,L=H,M=J,N=I,la=function(){var a=u.document;return a?a.documentMode:m},O; -a:{var P="",Q;if(ka&&u.opera)var R=u.opera.version,P="function"==typeof R?R():R;else if(M?Q=/rv\:([^\);]+)(\)|;)/:L?Q=/MSIE\s+([^\);]+)(\)|;)/:N&&(Q=/WebKit\/(\S+)/),Q)var ma=Q.exec(ia()),P=ma?ma[1]:"";if(L){var na=la();if(na>parseFloat(P)){O=String(na);break a}}O=P} -var oa=O,pa={},S=function(a){var b;if(!(b=pa[a])){b=0;for(var c=String(oa).replace(/^[\s\xa0]+|[\s\xa0]+$/g,"").split("."),d=String(a).replace(/^[\s\xa0]+|[\s\xa0]+$/g,"").split("."),e=Math.max(c.length,d.length),f=0;0==b&&f(0==t[1].length?0:parseInt(t[1],10))?1:0)||((0==k[2].length)<(0==t[2].length)?-1:(0==k[2].length)>(0==t[2].length)?1:0)||(k[2]t[2]?1:0)}while(0==b)}b=pa[a]=0<=b}return b},qa=u.document,ra=!qa||!L?m:la()||("CSS1Compat"==qa.compatMode?parseInt(oa,10):5);!M&&!L||L&&L&&9<=ra||M&&S("1.9.1");L&&S("9");var sa=function(a){a=a.className;return w(a)&&a.match(/\S+/g)||[]},ta=function(a,b){for(var c=sa(a),d=ha(arguments,1),e=c,f=0;f=a.keyCode)a.keyCode=-1}catch(b){}};var za="closure_listenable_"+(1E6*Math.random()|0),Aa=0;var Ba=function(){};s=Ba.prototype;s.key=0;s.c=q;s.f=q;s.e=function(a,b,c,d,e,f){if("function"==v(a))this.m=n;else if(a&&a.handleEvent&&"function"==v(a.handleEvent))this.m=q;else throw Error("Invalid listener argument");this.d=a;this.k=b;this.src=c;this.type=d;this.capture=!!e;this.i=f;this.f=q;this.key=++Aa;this.c=q};s.handleEvent=function(a){return this.m?this.d.call(this.i||this.src,a):this.d.handleEvent.call(this.d,a)};var W={},X={},Y={},Z={},Da=function(){var a=Ca,b=xa?function(c){return a.call(b.src,b.d,c)}:function(c){c=a.call(b.src,b.d,c);if(!c)return c};return b},Ea=function(a,b,c,d,e){if("array"==v(b))for(var f=0;fe.keyCode||e.returnValue!=m)return n;a:{var l=q;if(0==e.keyCode)try{e.keyCode=-1;break a}catch(r){l=n}if(l||e.returnValue==m)e.returnValue=n}}l=new V;l.e(e,this);e=n;try{if(g){for(var k=[],t=l.currentTarget;t;t=t.parentNode)k.push(t);f=d[n];f.b=f.a;for(var z=k.length-1;!l.j&&0<=z&&f.b;z--)l.currentTarget=k[z],e&=Ja(f,k[z],c,n,l);if(h){f=d[q];f.b=f.a;for(z=0;!l.j&&z>>0),Fa=function(a){return"function"==v(a)?a:a[Ka]||(a[Ka]=function(b){return a.handleEvent(b)})};var Ga=function(a){D(a.g,"Event target is not initialized. Did you call superclass (goog.events.EventTarget) constructor?")};var $=function(){};$.p=function(){$.q||($.q=new $)};$.p();L||N&&S("525");A("ae.init",function(){La();Ma();Ea(window,"load",function(){});Na()}); -var La=function(){var a;if(a=w("ae-content")?document.getElementById("ae-content"):"ae-content"){a=T("table","ae-table-striped",a);for(var b=0,c;c=a[b];b++){c=T("tbody",p,c);for(var d=0,e;e=c[d];d++){e=T("tr",p,e);for(var f=0,g;g=e[f];f++)f%2&&ta(g,"ae-even")}}}},Ma=function(){var a=T(p,"ae-noscript",m);da(ga(a),function(a){va(a,"ae-noscript")})},Na=function(){u._gaq=u._gaq||[];u._gaq.push(function(){u._gaq._createAsyncTracker("UA-3739047-3","ae")._trackPageview()});(function(){var a=document.createElement("script"); -a.src=("https:"==document.location.protocol?"https://ssl":"http://www")+".google-analytics.com/ga.js";a.setAttribute("async","true");document.documentElement.firstChild.appendChild(a)})()};A("ae.trackPageView",function(){u._gaq&&u._gaq._getAsyncTracker("ae")._trackPageview()});var Pa=function(a){if(a==m||a==p||0==a.length)return 0;a=Math.max.apply(Math,a);return Oa(a)},Oa=function(a){var b=5;2>b&&(b=2);b-=1;return Math.ceil(a/b)*b},Qa=function(a,b,c){a=a.getSelection();1==a.length&&(a=a[0],a.row!=p&&(b.starttime!=p&&(c+="&starttime="+b.starttime),b.endtime!=p&&(c+="&endtime="+b.endtime),b.latency_lower!=p&&(c+="&latency_lower="+b.latency_lower),b.latency_upper!=p&&(c+="&latency_upper="+b.latency_upper),b=c+"&detail="+a.row,window.location.href=b))},Ra=function(a,b,c,d, -e){var f=new google.visualization.DataTable;f.addColumn("string","");f.addColumn("number","");f.addColumn({type:"string",role:"tooltip"});for(var g=0;gb.length&&(f=b.length);for(var g=0;g>>0),x=0,ba=function(a,b){var c=Array.prototype.slice.call(arguments,1);return function(){var b=Array.prototype.slice.call(arguments);b.unshift.apply(b,c);return a.apply(this,b)}},y=function(a,b){var c=a.split("."),f=u;!(c[0]in f)&&f.execScript&&f.execScript("var "+c[0]);for(var d;c.length&& +(d=c.shift());)!c.length&&b!==m?f[d]=b:f=f[d]?f[d]:f[d]={}},A=function(a,b){function c(){}c.prototype=b.prototype;a.m=b.prototype;a.prototype=new c};var B=function(a){Error.captureStackTrace?Error.captureStackTrace(this,B):this.stack=Error().stack||"";a&&(this.message=String(a))};A(B,Error);var ca=function(a,b){for(var c=1;cc?Math.max(0,a.length+c):c;if(v(a))return!v(b)||1!=b.length?-1:a.indexOf(b,c);for(;c=arguments.length?D.slice.call(a,b):D.slice.call(a,b,c)};var F,G,H,I,ja=function(){return u.navigator?u.navigator.userAgent:q};I=H=G=F=r;var J;if(J=ja()){var ka=u.navigator;F=0==J.indexOf("Opera");G=!F&&-1!=J.indexOf("MSIE");H=!F&&-1!=J.indexOf("WebKit");I=!F&&!H&&"Gecko"==ka.product}var la=F,K=G,L=I,M=H,ma=function(){var a=u.document;return a?a.documentMode:m},N; +e:{var O="",P;if(la&&u.opera)var Q=u.opera.version,O="function"==typeof Q?Q():Q;else if(L?P=/rv\:([^\);]+)(\)|;)/:K?P=/MSIE\s+([^\);]+)(\)|;)/:M&&(P=/WebKit\/(\S+)/),P)var na=P.exec(ja()),O=na?na[1]:"";if(K){var oa=ma();if(oa>parseFloat(O)){N=String(oa);break e}}N=O} +var pa=N,qa={},R=function(a){var b;if(!(b=qa[a])){b=0;for(var c=String(pa).replace(/^[\s\xa0]+|[\s\xa0]+$/g,"").split("."),f=String(a).replace(/^[\s\xa0]+|[\s\xa0]+$/g,"").split("."),d=Math.max(c.length,f.length),e=0;0==b&&e(0==t[1].length?0:parseInt(t[1],10))?1:0)||((0==k[2].length)<(0==t[2].length)?-1:(0==k[2].length)>(0==t[2].length)?1:0)||(k[2]t[2]?1:0)}while(0==b)}b=qa[a]=0<=b}return b},ra=u.document,sa=!ra||!K?m:ma()||("CSS1Compat"==ra.compatMode?parseInt(pa,10):5);!L&&!K||K&&K&&9<=sa||L&&R("1.9.1");K&&R("9");var ta=function(a){a=a.className;return v(a)&&a.match(/\S+/g)||[]},ua=function(a,b){for(var c=ta(a),f=ia(arguments,1),d=c,e=0;e=a.keyCode)a.keyCode=-1}catch(b){}};var Aa="closure_listenable_"+(1E6*Math.random()|0),Ba=0;var Ca=function(a,b,c,f,d,e){this.d=a;this.k=b;this.src=c;this.type=f;this.capture=!!d;this.e=e;this.key=++Ba;this.c=this.h=r};var W={},X={},Y={},Z={},Ea=function(){var a=Da,b=xa?function(c){return a.call(b.src,b.d,c)}:function(c){c=a.call(b.src,b.d,c);if(!c)return c};return b},Fa=function(a,b,c,f,d){if("array"==aa(b))for(var e=0;ed.keyCode||d.returnValue!=m)return n;e:{var l=r;if(0==d.keyCode)try{d.keyCode=-1;break e}catch(p){l=n}if(l||d.returnValue==m)d.returnValue=n}}l=new V;za(l,d,this);d=n;try{if(g){for(var k=[],t=l.currentTarget;t;t=t.parentNode)k.push(t);e=f[n];e.b=e.a;for(var z=k.length-1;!l.i&&0<=z&&e.b;z--)l.currentTarget=k[z],d&=Ja(e,k[z],c,n,l);if(h){e=f[r];e.b=e.a;for(z=0;!l.i&&z>>0),Ga=function(a){C(a,"Listener can not be null.");if("function"==aa(a))return a;C(a.handleEvent,"An object listener must have handleEvent method.");return a[Ka]||(a[Ka]=function(b){return a.handleEvent(b)})};var $=function(){};$.n=function(){$.o||($.o=new $)};$.n();K||M&&R("525");y("ae.init",function(){La();Ma();Fa(window,"load",function(){});Na()}); +var La=function(){var a;if(a=v("ae-content")?document.getElementById("ae-content"):"ae-content"){a=S("table","ae-table-striped",a);for(var b=0,c;c=a[b];b++){c=S("tbody",q,c);for(var f=0,d;d=c[f];f++){d=S("tr",q,d);for(var e=0,g;g=d[e];e++)e%2&&ua(g,"ae-even")}}}},Ma=function(){var a=S(q,"ae-noscript",m);ea(ha(a),function(a){wa(a,"ae-noscript")})},Na=function(){u._gaq=u._gaq||[];u._gaq.push(function(){u._gaq._createAsyncTracker("UA-3739047-3","ae")._trackPageview()});(function(){var a=document.createElement("script"); +a.src=("https:"==document.location.protocol?"https://ssl":"http://www")+".google-analytics.com/ga.js";a.setAttribute("async","true");document.documentElement.firstChild.appendChild(a)})()};y("ae.trackPageView",function(){u._gaq&&u._gaq._getAsyncTracker("ae")._trackPageview()});var Pa=function(a){if(a==m||a==q||0==a.length)return 0;a=Math.max.apply(Math,a);return Oa(a)},Oa=function(a){var b=5;2>b&&(b=2);b-=1;return Math.ceil(a/b)*b},Qa=function(a,b,c){a=a.getSelection();1==a.length&&(a=a[0],a.row!=q&&(b.starttime!=q&&(c+="&starttime="+b.starttime),b.endtime!=q&&(c+="&endtime="+b.endtime),b.latency_lower!=q&&(c+="&latency_lower="+b.latency_lower),b.latency_upper!=q&&(c+="&latency_upper="+b.latency_upper),b=c+"&detail="+a.row,window.location.href=b))},Ra=function(a,b,c,f, +d){var e=new google.visualization.DataTable;e.addColumn("string","");e.addColumn("number","");e.addColumn({type:"string",role:"tooltip"});for(var g=0;gb.length&&(e=b.length);for(var g=0;g>>0),ha=0,ia=function(a,b){var c=Array.prototype.slice.call(arguments,1);return function(){var b=Array.prototype.slice.call(arguments);b.unshift.apply(b,c);return a.apply(this,b)}},ja=function(a,b){var c=a.split("."),d=t;!(c[0]in d)&&d.execScript&&d.execScript("var "+c[0]);for(var e;c.length&&(e=c.shift());)!c.length&&b!==h?d[e]=b:d=d[e]?d[e]:d[e]={}},x=function(a,b){function c(){}c.prototype=b.prototype;a.f=b.prototype;a.prototype=new c;a.prototype.constructor= -a};var ka=function(a){Error.captureStackTrace?Error.captureStackTrace(this,ka):this.stack=Error().stack||"";a&&(this.message=String(a))};x(ka,Error);ka.prototype.name="CustomError";var la=function(a,b){for(var c=1;c")&&(a=a.replace(qa,">"));-1!=a.indexOf('"')&&(a=a.replace(ra,"""));return a},oa=/&/g,pa=//g,ra=/\"/g,na=/[&<>\"]/;var ta=function(a,b){b.unshift(a);ka.call(this,la.apply(m,b));b.shift()};x(ta,ka);ta.prototype.name="AssertionError";var ua=function(a,b,c){var d="Assertion failed";if(b)var d=d+(": "+b),e=c;else a&&(d+=": "+a,e=m);f(new ta(""+d,e||[]))},y=function(a,b,c){a||ua("",b,Array.prototype.slice.call(arguments,2))},va=function(a,b,c,d){a instanceof b||ua("instanceof check failed.",c,Array.prototype.slice.call(arguments,3))};var z=Array.prototype,wa=z.indexOf?function(a,b,c){y(a.length!=m);return z.indexOf.call(a,b,c)}:function(a,b,c){c=c==m?0:0>c?Math.max(0,a.length+c):c;if(u(a))return!u(b)||1!=b.length?-1:a.indexOf(b,c);for(;c=arguments.length?z.slice.call(a,b):z.slice.call(a,b,c)};var Ga=function(a,b){for(var c in a)b.call(h,a[c],c,a)},Ha=function(a,b,c){b in a&&f(Error('The object already contains the key "'+b+'"'));a[b]=c},Ia=function(a){var b={},c;for(c in a)b[a[c]]=c;return b},Ja="constructor hasOwnProperty isPrototypeOf propertyIsEnumerable toLocaleString toString valueOf".split(" "),Ka=function(a,b){for(var c,d,e=1;eparseFloat(Wa)){Va=String($a);break a}}Va=Wa} -var ab=Va,bb={},F=function(a){var b;if(!(b=bb[a])){b=0;for(var c=ma(String(ab)).split("."),d=ma(String(a)).split("."),e=Math.max(c.length,d.length),g=0;0==b&&g(0==A[1].length?0:parseInt(A[1],10))?1: -0)||((0==r[2].length)<(0==A[2].length)?-1:(0==r[2].length)>(0==A[2].length)?1:0)||(r[2]A[2]?1:0)}while(0==b)}b=bb[a]=0<=b}return b},cb=t.document,db=!cb||!B?h:Ua()||("CSS1Compat"==cb.compatMode?parseInt(ab,10):5);var eb,fb=!B||B&&9<=db;!C&&!B||B&&B&&9<=db||C&&F("1.9.1");var gb=B&&!F("9");var hb=function(a){a=a.className;return u(a)&&a.match(/\S+/g)||[]},G=function(a,b){for(var c=hb(a),d=Ea(arguments,1),e=c.length+d.length,g=c,k=0;k");c=c.join("")}c=a.createElement(c);d&&(u(d)?c.className=d:da(d)?G.apply(m,[c].concat(d)):qb(c,d));2a):q},zb=function(a,b,c){if(!(a.nodeName in wb))if(3==a.nodeType)c?b.push(String(a.nodeValue).replace(/(\r\n|\r|\n)/g,"")):b.push(a.nodeValue);else if(a.nodeName in xb)b.push(xb[a.nodeName]);else for(a=a.firstChild;a;)zb(a,b,c),a=a.nextSibling},kb=function(a){this.D=a||t.document||document};s=kb.prototype;s.jb=mb;s.a=function(a){return u(a)?this.D.getElementById(a):a};s.q=function(a,b,c){return sb(this.D,arguments)};s.createElement=function(a){return this.D.createElement(a)}; -s.createTextNode=function(a){return this.D.createTextNode(String(a))};s.appendChild=function(a,b){a.appendChild(b)};s.contains=ub;var Ab=function(a){Ab[" "](a);return a};Ab[" "]=aa;var Bb=!B||B&&9<=db,Cb=!B||B&&9<=db,Db=B&&!F("9");!D||F("528");C&&F("1.9b")||B&&F("8")||Sa&&F("9.5")||D&&F("528");C&&!F("8")||B&&F("9");var Eb=function(){};Eb.prototype.Ub=q;var H=function(a,b){this.type=a;this.currentTarget=this.target=b};s=H.prototype;s.U=q;s.defaultPrevented=q;s.Db=l;s.stopPropagation=function(){this.U=l};s.preventDefault=function(){this.defaultPrevented=l;this.Db=q};var I=function(a,b){a&&this.ja(a,b)};x(I,H);var Fb=[1,4,2];s=I.prototype;s.target=m;s.relatedTarget=m;s.offsetX=0;s.offsetY=0;s.clientX=0;s.clientY=0;s.screenX=0;s.screenY=0;s.button=0;s.keyCode=0;s.charCode=0;s.ctrlKey=q;s.altKey=q;s.shiftKey=q;s.metaKey=q;s.ab=q;s.T=m; -s.ja=function(a,b){var c=this.type=a.type;H.call(this,c);this.target=a.target||a.srcElement;this.currentTarget=b;var d=a.relatedTarget;if(d){if(C){var e;a:{try{Ab(d.nodeName);e=l;break a}catch(g){}e=q}e||(d=m)}}else"mouseover"==c?d=a.fromElement:"mouseout"==c&&(d=a.toElement);this.relatedTarget=d;this.offsetX=D||a.offsetX!==h?a.offsetX:a.layerX;this.offsetY=D||a.offsetY!==h?a.offsetY:a.layerY;this.clientX=a.clientX!==h?a.clientX:a.pageX;this.clientY=a.clientY!==h?a.clientY:a.pageY;this.screenX=a.screenX|| -0;this.screenY=a.screenY||0;this.button=a.button;this.keyCode=a.keyCode||0;this.charCode=a.charCode||("keypress"==c?a.keyCode:0);this.ctrlKey=a.ctrlKey;this.altKey=a.altKey;this.shiftKey=a.shiftKey;this.metaKey=a.metaKey;this.ab=E?a.metaKey:a.ctrlKey;this.state=a.state;this.T=a;a.defaultPrevented&&this.preventDefault();delete this.U};var Gb=function(a){return Bb?0==a.T.button:"click"==a.type?l:!!(a.T.button&Fb[0])}; -I.prototype.stopPropagation=function(){I.f.stopPropagation.call(this);this.T.stopPropagation?this.T.stopPropagation():this.T.cancelBubble=l};I.prototype.preventDefault=function(){I.f.preventDefault.call(this);var a=this.T;if(a.preventDefault)a.preventDefault();else if(a.returnValue=q,Db)try{if(a.ctrlKey||112<=a.keyCode&&123>=a.keyCode)a.keyCode=-1}catch(b){}};var Hb="closure_listenable_"+(1E6*Math.random()|0),Ib=0;var Jb=function(){};s=Jb.prototype;s.key=0;s.F=q;s.ca=q;s.ja=function(a,b,c,d,e,g){v(a)?this.zb=l:a&&a.handleEvent&&v(a.handleEvent)?this.zb=q:f(Error("Invalid listener argument"));this.H=a;this.ub=b;this.src=c;this.type=d;this.capture=!!e;this.ra=g;this.ca=q;this.key=++Ib;this.F=q};s.handleEvent=function(a){return this.zb?this.H.call(this.ra||this.src,a):this.H.handleEvent.call(this.H,a)};var Kb={},J={},Lb={},Mb={},K=function(a,b,c,d,e){if(da(b)){for(var g=0;ge.keyCode||e.returnValue!=h)return l;a:{var p=q;if(0==e.keyCode)try{e.keyCode=-1;break a}catch(N){p=l}if(p||e.returnValue==h)e.returnValue=l}}p=new I;p.ja(e,this);e=l;try{if(k){for(var r=[],A=p.currentTarget;A;A=A.parentNode)r.push(A);g=d[l];g.M=g.L;for(var S=r.length-1;!p.U&&0<=S&&g.M;S--)p.currentTarget= -r[S],e&=Xb(g,r[S],c,l,p);if(n){g=d[q];g.M=g.L;for(S=0;!p.U&&S>>0),Nb=function(a){return v(a)?a:a[Yb]||(a[Yb]=function(b){return a.handleEvent(b)})};var Zb=function(a){this.wb=a;this.Ia=[]};x(Zb,Eb);var $b=[];Zb.prototype.d=function(a,b,c,d,e){da(b)||($b[0]=b,b=$b);for(var g=0;g++d,"infinite loop")}c=this.dc;d=a.type||a;if(u(a))a=new H(a,c);else if(a instanceof H)a.target=a.target||c;else{var e=a;a=new H(d,c);Ka(a,e)}var e=l,g;if(b)for(var k=b.length-1;!a.U&&0<=k;k--)g=a.currentTarget=b[k],e=cc(g,d,l,a)&&e;a.U||(g=a.currentTarget=c,e=cc(g,d,l,a)&&e,a.U||(e=cc(g,d,q,a)&&e));if(b)for(k=0;!a.U&&kb||b>oc(this))&&f(Error("Child component index out of bounds"));if(!this.i||!this.r)this.i={},this.r=[];if(a.getParent()==this){var d=jc(a);this.i[d]=a;Ca(this.r,a)}else Ha(this.i,jc(a),a);mc(a,this);Fa(this.r,b,0,a);a.e&&this.e&&a.getParent()==this?(c=this.B(),c.insertBefore(a.a(),c.childNodes[b]||m)):c?(this.c||this.q(),c=O(this,b+1),b=this.B(),c=c?c.c:m,a.e&&f(Error("Component already rendered")),a.c||a.q(),b?b.insertBefore(a.c, -c||m):a.n.D.body.appendChild(a.c),(!a.o||a.o.e)&&a.G()):this.e&&(!a.e&&a.c&&a.c.parentNode&&1==a.c.parentNode.nodeType)&&a.G()};s.B=function(){return this.c}; -var pc=function(a){if(a.wa==m){var b;a:{b=a.e?a.c:a.n.D.body;var c=lb(b);if(c.defaultView&&c.defaultView.getComputedStyle&&(b=c.defaultView.getComputedStyle(b,m))){b=b.direction||b.getPropertyValue("direction")||"";break a}b=""}a.wa="rtl"==(b||((a.e?a.c:a.n.D.body).currentStyle?(a.e?a.c:a.n.D.body).currentStyle.direction:m)||(a.e?a.c:a.n.D.body).style&&(a.e?a.c:a.n.D.body).style.direction)}return a.wa};M.prototype.ua=function(a){this.e&&f(Error("Component already rendered"));this.wa=a}; -var oc=function(a){return a.r?a.r.length:0},O=function(a,b){return a.r?a.r[b]||m:m},nc=function(a,b,c){a.r&&xa(a.r,b,c)},qc=function(a,b){return a.r&&b?wa(a.r,b):-1};M.prototype.removeChild=function(a,b){if(a){var c=u(a)?a:jc(a);a=this.i&&c?(c in this.i?this.i[c]:h)||m:m;if(c&&a){var d=this.i;c in d&&delete d[c];Ca(this.r,a);b&&(a.ga(),a.c&&(c=a.c)&&c.parentNode&&c.parentNode.removeChild(c));mc(a,m)}}a||f(Error("Child is not in parent component"));return a};var rc=function(a,b,c){a.setAttribute("aria-"+b,c)};var tc=function(a,b,c,d,e){if(!B&&(!D||!F("525")))return l;if(E&&e)return sc(a);if(e&&!d||!c&&(17==b||18==b||E&&91==b))return q;if(D&&d&&c)switch(a){case 220:case 219:case 221:case 192:case 186:case 189:case 187:case 188:case 190:case 191:case 192:case 222:return q}if(B&&d&&b==a)return q;switch(a){case 13:return!(B&&B&&9<=db);case 27:return!D}return sc(a)},sc=function(a){if(48<=a&&57>=a||96<=a&&106>=a||65<=a&&90>=a||D&&0==a)return l;switch(a){case 32:case 63:case 107:case 109:case 110:case 111:case 186:case 59:case 189:case 187:case 61:case 188:case 190:case 191:case 192:case 222:case 219:case 220:case 221:return l; -default:return q}},uc=function(a){switch(a){case 61:return 187;case 59:return 186;case 224:return 91;case 0:return 224;default:return a}};var P=function(a,b){L.call(this);a&&vc(this,a,b)};x(P,L);s=P.prototype;s.c=m;s.Fa=m;s.Wa=m;s.Ga=m;s.s=-1;s.R=-1;s.hb=q; -var wc={3:13,12:144,63232:38,63233:40,63234:37,63235:39,63236:112,63237:113,63238:114,63239:115,63240:116,63241:117,63242:118,63243:119,63244:120,63245:121,63246:122,63247:123,63248:44,63272:46,63273:36,63275:35,63276:33,63277:34,63289:144,63302:45},xc={Up:38,Down:40,Left:37,Right:39,Enter:13,F1:112,F2:113,F3:114,F4:115,F5:116,F6:117,F7:118,F8:119,F9:120,F10:121,F11:122,F12:123,"U+007F":46,Home:36,End:35,PageUp:33,PageDown:34,Insert:45},yc=B||D&&F("525"),zc=E&&C; -P.prototype.Sb=function(a){if(D&&(17==this.s&&!a.ctrlKey||18==this.s&&!a.altKey||E&&91==this.s&&!a.metaKey))this.R=this.s=-1;-1==this.s&&(a.ctrlKey&&17!=a.keyCode?this.s=17:a.altKey&&18!=a.keyCode?this.s=18:a.metaKey&&91!=a.keyCode&&(this.s=91));yc&&!tc(a.keyCode,this.s,a.shiftKey,a.ctrlKey,a.altKey)?this.handleEvent(a):(this.R=C?uc(a.keyCode):a.keyCode,zc&&(this.hb=a.altKey))};P.prototype.Tb=function(a){this.R=this.s=-1;this.hb=a.altKey}; -P.prototype.handleEvent=function(a){var b=a.T,c,d,e=b.altKey;B&&"keypress"==a.type?(c=this.R,d=13!=c&&27!=c?b.keyCode:0):D&&"keypress"==a.type?(c=this.R,d=0<=b.charCode&&63232>b.charCode&&sc(c)?b.charCode:0):Sa?(c=this.R,d=sc(c)?b.keyCode:0):(c=b.keyCode||this.R,d=b.charCode||0,zc&&(e=this.hb),E&&(63==d&&224==c)&&(c=191));var g=c,k=b.keyIdentifier;c?63232<=c&&c in wc?g=wc[c]:25==c&&a.shiftKey&&(g=9):k&&k in xc&&(g=xc[k]);a=g==this.s;this.s=g;b=new Ac(g,d,a,b);b.altKey=e;this.dispatchEvent(b)}; -P.prototype.a=function(){return this.c};var vc=function(a,b,c){a.Ga&&a.detach();a.c=b;a.Fa=K(a.c,"keypress",a,c);a.Wa=K(a.c,"keydown",a.Sb,c,a);a.Ga=K(a.c,"keyup",a.Tb,c,a)};P.prototype.detach=function(){this.Fa&&(Tb(this.Fa),Tb(this.Wa),Tb(this.Ga),this.Ga=this.Wa=this.Fa=m);this.c=m;this.R=this.s=-1};var Ac=function(a,b,c,d){d&&this.ja(d,h);this.type="key";this.keyCode=a;this.charCode=b;this.repeat=c};x(Ac,I);var Cc=function(a,b){a||f(Error("Invalid class name "+a));v(b)||f(Error("Invalid decorator function "+b));Bc[a]=b},Dc={},Bc={};var Q=function(){};ba(Q);Q.prototype.Z=function(){};var Ec=function(a,b){a&&(a.tabIndex=b?0:-1)};s=Q.prototype;s.q=function(a){return a.jb().q("div",this.xa(a).join(" "))};s.B=function(a){return a};s.da=function(a){return"DIV"==a.tagName};s.O=function(a,b){b.id&&kc(a,b.id);var c=this.w(),d=q,e=hb(b);e&&xa(e,function(b){b==c?d=l:b&&this.$a(a,b,c)},this);d||G(b,c);Fc(a,this.B(b));return b};s.$a=function(a,b,c){b==c+"-disabled"?a.ta(q):b==c+"-horizontal"?Gc(a,"horizontal"):b==c+"-vertical"&&Gc(a,"vertical")}; -var Fc=function(a,b){if(b)for(var c=b.firstChild,d;c&&c.parentNode==b;){d=c.nextSibling;if(1==c.nodeType){var e;a:{e=h;for(var g=hb(c),k=0,n=g.length;ka?c-1:a},0)},ad=function(a){bd(a,function(a,c){return(a+1)%c},a.j)},$c=function(a){bd(a,function(a,c){a--;return 0>a?c-1:a},a.j)},bd=function(a,b,c){c=0>c?qc(a,a.h):c;var d=oc(a);c=b.call(a,c,d);for(var e=0;e<=d;){var g=O(a,c);if(g&&g.t()&&g.isEnabled()&&g.m&2){a.Ua(c);break}e++;c=b.call(a,c,d)}};W.prototype.Ua=function(a){Xc(this,a)};var cd=function(){};x(cd,R);ba(cd);s=cd.prototype;s.w=function(){return"goog-tab"};s.Z=function(){return"tab"};s.q=function(a){var b=cd.f.q.call(this,a);(a=a.Sa())&&this.Va(b,a);return b};s.O=function(a,b){b=cd.f.O.call(this,a,b);var c=this.Sa(b);c&&(a.qb=c);if(a.g&8&&(c=a.getParent())&&v(c.ba))a.v(8,q),c.ba(a);return b};s.Sa=function(a){return a.title||""};s.Va=function(a,b){a&&(a.title=b||"")};var dd=function(a,b,c){T.call(this,a,b||cd.ea(),c);Sc(this,8,l);this.$|=9};x(dd,T);dd.prototype.Sa=function(){return this.qb};dd.prototype.Va=function(a){this.Ab().Va(this.a(),a);this.qb=a};Cc("goog-tab",function(){return new dd(m)});var X=function(){};x(X,Q);ba(X);X.prototype.w=function(){return"goog-tab-bar"};X.prototype.Z=function(){return"tablist"};X.prototype.$a=function(a,b,c){this.Bb||(this.Ja||ed(this),this.Bb=Ia(this.Ja));var d=this.Bb[b];d?(Gc(a,fd(d)),a.xb=d):X.f.$a.call(this,a,b,c)};X.prototype.xa=function(a){var b=X.f.xa.call(this,a);this.Ja||ed(this);b.push(this.Ja[a.xb]);return b};var ed=function(a){var b=a.w();a.Ja={top:b+"-top",bottom:b+"-bottom",start:b+"-start",end:b+"-end"}};var Y=function(a,b,c){a=a||"top";Gc(this,fd(a));this.xb=a;W.call(this,this.P,b||X.ea(),c);gd(this)};x(Y,W);s=Y.prototype;s.ac=l;s.K=m;s.G=function(){Y.f.G.call(this);gd(this)};s.removeChild=function(a,b){hd(this,a);return Y.f.removeChild.call(this,a,b)};s.Ua=function(a){Y.f.Ua.call(this,a);this.ac&&this.ba(O(this,a))};s.ba=function(a){a?Qc(a,l):this.K&&Qc(this.K,q)}; -var hd=function(a,b){if(b&&b==a.K){for(var c=qc(a,b),d=c-1;b=O(a,d);d--)if(b.t()&&b.isEnabled()){a.ba(b);return}for(c+=1;b=O(a,c);c++)if(b.t()&&b.isEnabled()){a.ba(b);return}a.ba(m)}};s=Y.prototype;s.Zb=function(a){this.K&&this.K!=a.target&&Qc(this.K,q);this.K=a.target};s.$b=function(a){a.target==this.K&&(this.K=m)};s.Xb=function(a){hd(this,a.target)};s.Yb=function(a){hd(this,a.target)};s.pa=function(){O(this,this.j)||this.C(this.K||O(this,0))}; -var gd=function(a){lc(a).d(a,"select",a.Zb).d(a,"unselect",a.$b).d(a,"disable",a.Xb).d(a,"hide",a.Yb)},fd=function(a){return"start"==a||"end"==a?"vertical":"horizontal"};Cc("goog-tab-bar",function(){return new Y});var Z=function(a,b,c,d,e){function g(a){a&&(a.tabIndex=0,a.setAttribute("role",k.Z()),G(a,"goog-zippy-header"),id(k,a),a&&k.Ob.d(a,"keydown",k.Pb))}L.call(this);this.n=e||mb();this.X=this.n.a(a)||m;this.Ca=this.n.a(d||m);this.ia=(this.Oa=v(b)?b:m)||!b?m:this.n.a(b);this.l=c==l;this.Ob=new Zb(this);this.ob=new Zb(this);var k=this;g(this.X);g(this.Ca);this.Y(this.l)};x(Z,L);s=Z.prototype;s.fa=l;s.Z=function(){return"tab"};s.B=function(){return this.ia};s.toggle=function(){this.Y(!this.l)}; -s.Y=function(a){this.ia?dc(this.ia,a):a&&this.Oa&&(this.ia=this.Oa());this.ia&&G(this.ia,"goog-zippy-content");if(this.Ca)dc(this.X,!a),dc(this.Ca,a);else if(this.X){var b=this.X;a?G(b,"goog-zippy-expanded"):jb(b,"goog-zippy-expanded");b=this.X;!a?G(b,"goog-zippy-collapsed"):jb(b,"goog-zippy-collapsed");rc(this.X,"expanded",a)}this.l=a;this.dispatchEvent(new jd("toggle",this))};s.nb=function(){return this.fa};s.Na=function(a){this.fa!=a&&((this.fa=a)?(id(this,this.X),id(this,this.Ca)):bc(this.ob))}; -var id=function(a,b){b&&a.ob.d(b,"click",a.bc)};Z.prototype.Pb=function(a){if(13==a.keyCode||32==a.keyCode)this.toggle(),this.dispatchEvent(new H("action",this)),a.preventDefault(),a.stopPropagation()};Z.prototype.bc=function(){this.toggle();this.dispatchEvent(new H("action",this))};var jd=function(a,b){H.call(this,a,b)};x(jd,H);var ld=function(a,b){this.mb=[];for(var c=nb(a),c=ob("span","ae-zippy",c),d=0,e;e=c[d];d++){var g;if(e.parentNode.parentNode.parentNode.nextElementSibling!=h)g=e.parentNode.parentNode.parentNode.nextElementSibling;else for(g=e.parentNode.parentNode.parentNode.nextSibling;g&&1!=g.nodeType;)g=g.nextSibling;e=new Z(e,g,q);this.mb.push(e)}this.hc=new kd(this.mb,nb(b))};ld.prototype.kc=function(){return this.hc};ld.prototype.lc=function(){return this.mb}; -var kd=function(a,b){this.ya=a;if(this.ya.length)for(var c=0,d;d=this.ya[c];c++)K(d,"toggle",this.Wb,q,this);this.La=0;this.l=q;c="ae-toggle ae-plus ae-action";this.ya.length||(c+=" ae-disabled");this.V=tb("span",{className:c},"Expand All");K(this.V,"click",this.Vb,q,this);b&&b.appendChild(this.V)};kd.prototype.Vb=function(){this.ya.length&&this.Y(!this.l)}; -kd.prototype.Wb=function(a){a=a.currentTarget;this.La=a.l?this.La+1:this.La-1;a.l!=this.l&&(a.l?(this.l=l,md(this,l)):0==this.La&&(this.l=q,md(this,q)))};kd.prototype.Y=function(a){this.l=a;a=0;for(var b;b=this.ya[a];a++)b.l!=this.l&&b.Y(this.l);md(this)}; -var md=function(a,b){(b!==h?b:a.l)?(jb(a.V,"ae-plus"),G(a.V,"ae-minus"),vb(a.V,"Collapse All")):(jb(a.V,"ae-minus"),G(a.V,"ae-plus"),vb(a.V,"Expand All"))},nd=function(a){this.cc=a;this.Fb={};var b,c=tb("div",{},b=tb("div",{id:"ae-stats-details-tabs",className:"goog-tab-bar goog-tab-bar-top"}),tb("div",{className:"goog-tab-bar-clear"}),a=tb("div",{id:"ae-stats-details-tabs-content",className:"goog-tab-content"})),d=new Y;d.O(b);K(d,"select",this.Eb,q,this);K(d,"unselect",this.Eb,q,this);b=0;for(var e;e= -this.cc[b];b++)if(e=nb("ae-stats-details-"+e)){var g=ob("h2",m,e)[0],k;k=g;var n=h;gb&&"innerText"in k?n=k.innerText.replace(/(\r\n|\r|\n)/g,"\n"):(n=[],zb(k,n,l),n=n.join(""));n=n.replace(/ \xAD /g," ").replace(/\xAD/g,"");n=n.replace(/\u200B/g,"");gb||(n=n.replace(/ +/g," "));" "!=n&&(n=n.replace(/^\s*/,""));k=n;g&&g.parentNode&&g.parentNode.removeChild(g);g=new dd(k);this.Fb[w(g)]=e;d.Ea(g,l);a.appendChild(e);0==b?d.ba(g):dc(e,q)}nb("bd").appendChild(c)}; -nd.prototype.Eb=function(a){var b=this.Fb[w(a.target)];dc(b,"select"==a.type)};ja("ae.Stats.Details.Tabs",nd);ja("goog.ui.Zippy",Z);Z.prototype.setExpanded=Z.prototype.Y;ja("ae.Stats.MakeZippys",ld);ld.prototype.getExpandCollapse=ld.prototype.kc;ld.prototype.getZippys=ld.prototype.lc;kd.prototype.setExpanded=kd.prototype.Y;var $=function(){this.bb=[];this.gb=[]},od=[[5,0.2,1],[6,0.2,1.2],[5,0.25,1.25],[6,0.25,1.5],[4,0.5,2],[5,0.5,2.5],[6,0.5,3],[4,1,4],[5,1,5],[6,1,6],[4,2,8],[5,2,10]],pd=function(a){if(0>=a)return[2,0.5,1];for(var b=1;1>a;)a*=10,b/=10;for(;10<=a;)a/=10,b*=10;for(var c=0;c');a.write('
');for(var e=0;e<=b;e++)a.write(''),a.write(''),a.write(" "+e*c+"");a.write("
\n")}; -$.prototype.jc=function(){this.gb=[];var a=pd(this.eb),b=a[0],c=a[1],a=100/a[2];this.write('\n');qd(this,b,c,a);for(var d=0;d\n\n")}qd(this,b,c,a);this.write("
');0'),this.write(e.label),0"));this.write("");this.write('
');0');this.write('');0'));0 '),this.write(e.yb),this.write(""));0");this.write("
\n");return this.gb.join("")};$.prototype.ic=function(a,b,c,d,e,g){this.eb=Math.max(this.eb,Math.max(b+c,b+d));this.bb.push({label:a,start:b,duration:c,cb:d,yb:e,la:g})};ja("Gantt",$);$.prototype.add_bar=$.prototype.ic;$.prototype.draw=$.prototype.jc;})(); +/* Copyright 2008-10 Google Inc. All Rights Reserved. */ (function(){function f(a){throw a;} +var h=void 0,l=!0,m=null,q=!1,s,t=this,aa=function(){},ba=function(a){a.ea=function(){return a.Eb?a.Eb:a.Eb=new a}},ca=function(a){var b=typeof a;if("object"==b)if(a){if(a instanceof Array)return"array";if(a instanceof Object)return b;var c=Object.prototype.toString.call(a);if("[object Window]"==c)return"object";if("[object Array]"==c||"number"==typeof a.length&&"undefined"!=typeof a.splice&&"undefined"!=typeof a.propertyIsEnumerable&&!a.propertyIsEnumerable("splice"))return"array";if("[object Function]"== +c||"undefined"!=typeof a.call&&"undefined"!=typeof a.propertyIsEnumerable&&!a.propertyIsEnumerable("call"))return"function"}else return"null";else if("function"==b&&"undefined"==typeof a.call)return"object";return b},da=function(a){return"array"==ca(a)},ea=function(a){var b=ca(a);return"array"==b||"object"==b&&"number"==typeof a.length},u=function(a){return"string"==typeof a},v=function(a){return"function"==ca(a)},fa=function(a){var b=typeof a;return"object"==b&&a!=m||"function"==b},w=function(a){return a[ga]|| +(a[ga]=++ha)},ga="closure_uid_"+(1E9*Math.random()>>>0),ha=0,ia=function(a,b){var c=Array.prototype.slice.call(arguments,1);return function(){var b=Array.prototype.slice.call(arguments);b.unshift.apply(b,c);return a.apply(this,b)}},ja=function(a,b){var c=a.split("."),d=t;!(c[0]in d)&&d.execScript&&d.execScript("var "+c[0]);for(var e;c.length&&(e=c.shift());)!c.length&&b!==h?d[e]=b:d=d[e]?d[e]:d[e]={}},x=function(a,b){function c(){}c.prototype=b.prototype;a.f=b.prototype;a.prototype=new c;a.prototype.constructor= +a};var ka=function(a){Error.captureStackTrace?Error.captureStackTrace(this,ka):this.stack=Error().stack||"";a&&(this.message=String(a))};x(ka,Error);ka.prototype.name="CustomError";var la=function(a,b){for(var c=1;c")&&(a=a.replace(qa,">"));-1!=a.indexOf('"')&&(a=a.replace(ra,"""));return a},oa=/&/g,pa=//g,ra=/\"/g,na=/[&<>\"]/;var ta=function(a,b){b.unshift(a);ka.call(this,la.apply(m,b));b.shift()};x(ta,ka);ta.prototype.name="AssertionError";var ua=function(a,b,c){var d="Assertion failed";if(b)var d=d+(": "+b),e=c;else a&&(d+=": "+a,e=m);f(new ta(""+d,e||[]))},y=function(a,b,c){a||ua("",b,Array.prototype.slice.call(arguments,2))},va=function(a,b,c,d){a instanceof b||ua("instanceof check failed.",c,Array.prototype.slice.call(arguments,3))};var z=Array.prototype,wa=z.indexOf?function(a,b,c){y(a.length!=m);return z.indexOf.call(a,b,c)}:function(a,b,c){c=c==m?0:0>c?Math.max(0,a.length+c):c;if(u(a))return!u(b)||1!=b.length?-1:a.indexOf(b,c);for(;c=arguments.length?z.slice.call(a,b):z.slice.call(a,b,c)};var Ga=function(a,b){for(var c in a)b.call(h,a[c],c,a)},Ha=function(a,b){for(var c in a)if(a[c]==b)return l;return q},Ia=function(a,b,c){b in a&&f(Error('The object already contains the key "'+b+'"'));a[b]=c},Ja=function(a){var b={},c;for(c in a)b[a[c]]=c;return b},Ka="constructor hasOwnProperty isPrototypeOf propertyIsEnumerable toLocaleString toString valueOf".split(" "),La=function(a,b){for(var c,d,e=1;eparseFloat(Xa)){Wa=String(ab);break t}}Wa=Xa} +var bb=Wa,cb={},F=function(a){var b;if(!(b=cb[a])){b=0;for(var c=ma(String(bb)).split("."),d=ma(String(a)).split("."),e=Math.max(c.length,d.length),g=0;0==b&&g(0==A[1].length?0:parseInt(A[1],10))?1: +0)||((0==r[2].length)<(0==A[2].length)?-1:(0==r[2].length)>(0==A[2].length)?1:0)||(r[2]A[2]?1:0)}while(0==b)}b=cb[a]=0<=b}return b},db=t.document,eb=!db||!B?h:Va()||("CSS1Compat"==db.compatMode?parseInt(bb,10):5);var fb,gb=!B||B&&9<=eb;!C&&!B||B&&B&&9<=eb||C&&F("1.9.1");var hb=B&&!F("9");var ib=function(a){a=a.className;return u(a)&&a.match(/\S+/g)||[]},G=function(a,b){for(var c=ib(a),d=Ea(arguments,1),e=c.length+d.length,g=c,k=0;k");c=c.join("")}c=a.createElement(c);d&&(u(d)?c.className=d:da(d)?G.apply(m,[c].concat(d)):rb(c,d));2a):q},Ab=function(a,b,c){if(!(a.nodeName in xb))if(3==a.nodeType)c?b.push(String(a.nodeValue).replace(/(\r\n|\r|\n)/g,"")):b.push(a.nodeValue);else if(a.nodeName in yb)b.push(yb[a.nodeName]);else for(a=a.firstChild;a;)Ab(a,b,c),a=a.nextSibling},lb=function(a){this.D=a||t.document||document};s=lb.prototype;s.ib=nb;s.a=function(a){return u(a)?this.D.getElementById(a):a};s.q=function(a,b,c){return sb(this.D,arguments)};s.createElement=function(a){return this.D.createElement(a)}; +s.createTextNode=function(a){return this.D.createTextNode(String(a))};s.appendChild=function(a,b){a.appendChild(b)};s.contains=vb;var Bb=function(a){Bb[" "](a);return a};Bb[" "]=aa;var Cb=!B||B&&9<=eb,Db=!B||B&&9<=eb,Eb=B&&!F("9");!D||F("528");C&&F("1.9b")||B&&F("8")||Ta&&F("9.5")||D&&F("528");C&&!F("8")||B&&F("9");var Fb=function(){};Fb.prototype.Sb=q;var H=function(a,b){this.type=a;this.currentTarget=this.target=b};s=H.prototype;s.U=q;s.defaultPrevented=q;s.xb=l;s.stopPropagation=function(){this.U=l};s.preventDefault=function(){this.defaultPrevented=l;this.xb=q};var I=function(a,b){a&&Gb(this,a,b)};x(I,H);var Hb=[1,4,2];s=I.prototype;s.target=m;s.relatedTarget=m;s.offsetX=0;s.offsetY=0;s.clientX=0;s.clientY=0;s.screenX=0;s.screenY=0;s.button=0;s.keyCode=0;s.charCode=0;s.ctrlKey=q;s.altKey=q;s.shiftKey=q;s.metaKey=q;s.$a=q;s.T=m; +var Gb=function(a,b,c){var d=a.type=b.type;H.call(a,d);a.target=b.target||b.srcElement;a.currentTarget=c;if(c=b.relatedTarget){if(C){var e;t:{try{Bb(c.nodeName);e=l;break t}catch(g){}e=q}e||(c=m)}}else"mouseover"==d?c=b.fromElement:"mouseout"==d&&(c=b.toElement);a.relatedTarget=c;a.offsetX=D||b.offsetX!==h?b.offsetX:b.layerX;a.offsetY=D||b.offsetY!==h?b.offsetY:b.layerY;a.clientX=b.clientX!==h?b.clientX:b.pageX;a.clientY=b.clientY!==h?b.clientY:b.pageY;a.screenX=b.screenX||0;a.screenY=b.screenY|| +0;a.button=b.button;a.keyCode=b.keyCode||0;a.charCode=b.charCode||("keypress"==d?b.keyCode:0);a.ctrlKey=b.ctrlKey;a.altKey=b.altKey;a.shiftKey=b.shiftKey;a.metaKey=b.metaKey;a.$a=E?b.metaKey:b.ctrlKey;a.state=b.state;a.T=b;b.defaultPrevented&&a.preventDefault();delete a.U},Ib=function(a){return Cb?0==a.T.button:"click"==a.type?l:!!(a.T.button&Hb[0])};I.prototype.stopPropagation=function(){I.f.stopPropagation.call(this);this.T.stopPropagation?this.T.stopPropagation():this.T.cancelBubble=l}; +I.prototype.preventDefault=function(){I.f.preventDefault.call(this);var a=this.T;if(a.preventDefault)a.preventDefault();else if(a.returnValue=q,Eb)try{if(a.ctrlKey||112<=a.keyCode&&123>=a.keyCode)a.keyCode=-1}catch(b){}};var Jb="closure_listenable_"+(1E6*Math.random()|0),Kb=0;var Lb=function(a,b,c,d,e,g){this.H=a;this.zb=b;this.src=c;this.type=d;this.capture=!!e;this.Z=g;this.key=++Kb;this.G=this.ja=q};var Mb={},J={},Nb={},Ob={},K=function(a,b,c,d,e){if(da(b)){for(var g=0;ge.keyCode||e.returnValue!=h)return l;t:{var p=q;if(0==e.keyCode)try{e.keyCode=-1;break t}catch(M){p=l}if(p||e.returnValue==h)e.returnValue=l}}p=new I;Gb(p,e,this);e=l;try{if(k){for(var r=[],A=p.currentTarget;A;A=A.parentNode)r.push(A);g=d[l];g.M=g.L;for(var R=r.length-1;!p.U&&0<=R&&g.M;R--)p.currentTarget=r[R],e&=Yb(g, +r[R],c,l,p);if(n){g=d[q];g.M=g.L;for(R=0;!p.U&&R>>0),Pb=function(a){y(a,"Listener can not be null.");if(v(a))return a;y(a.handleEvent,"An object listener must have handleEvent method.");return a[Zb]||(a[Zb]=function(b){return a.handleEvent(b)})};var $b=function(a){this.tb=a;this.Ha={}};x($b,Fb);var ac=[];$b.prototype.d=function(a,b,c,d,e){da(b)||(ac[0]=b,b=ac);for(var g=0;g++d,"infinite loop")}c=this.bc;d=a.type||a;if(u(a))a=new H(a,c);else if(a instanceof H)a.target=a.target||c;else{var e=a;a=new H(d,c);La(a,e)}var e=l,g;if(b)for(var k=b.length-1;!a.U&&0<=k;k--)g=a.currentTarget=b[k],e=ec(g,d,l,a)&&e;a.U||(g=a.currentTarget=c,e=ec(g,d,l,a)&&e,a.U||(e=ec(g,d,q,a)&&e));if(b)for(k=0;!a.U&&kb||b>qc(this))&&f(Error("Child component index out of bounds"));if(!this.i||!this.r)this.i={},this.r=[];if(a.getParent()==this){var d=lc(a);this.i[d]=a;Ca(this.r,a)}else Ia(this.i,lc(a),a);oc(a,this);Fa(this.r,b,0,a);a.e&&this.e&&a.getParent()==this?(c=this.B(),c.insertBefore(a.a(),c.childNodes[b]||m)):c?(this.c||this.q(),c=O(this,b+1),b=this.B(),c=c?c.c:m,a.e&&f(Error("Component already rendered")),a.c||a.q(),b?b.insertBefore(a.c, +c||m):a.n.D.body.appendChild(a.c),(!a.o||a.o.e)&&a.F()):this.e&&(!a.e&&a.c&&a.c.parentNode&&1==a.c.parentNode.nodeType)&&a.F()};s.B=function(){return this.c}; +var rc=function(a){if(a.va==m){var b;t:{b=a.e?a.c:a.n.D.body;var c=mb(b);if(c.defaultView&&c.defaultView.getComputedStyle&&(b=c.defaultView.getComputedStyle(b,m))){b=b.direction||b.getPropertyValue("direction")||"";break t}b=""}a.va="rtl"==(b||((a.e?a.c:a.n.D.body).currentStyle?(a.e?a.c:a.n.D.body).currentStyle.direction:m)||(a.e?a.c:a.n.D.body).style&&(a.e?a.c:a.n.D.body).style.direction)}return a.va};N.prototype.ta=function(a){this.e&&f(Error("Component already rendered"));this.va=a}; +var qc=function(a){return a.r?a.r.length:0},O=function(a,b){return a.r?a.r[b]||m:m},pc=function(a,b,c){a.r&&xa(a.r,b,c)},sc=function(a,b){return a.r&&b?wa(a.r,b):-1};N.prototype.removeChild=function(a,b){if(a){var c=u(a)?a:lc(a);a=this.i&&c?(c in this.i?this.i[c]:h)||m:m;if(c&&a){var d=this.i;c in d&&delete d[c];Ca(this.r,a);b&&(a.ga(),a.c&&(c=a.c)&&c.parentNode&&c.parentNode.removeChild(c));oc(a,m)}}a||f(Error("Child is not in parent component"));return a};var tc,uc={kc:"activedescendant",pc:"atomic",qc:"autocomplete",sc:"busy",vc:"checked",Ac:"controls",Cc:"describedby",Fc:"disabled",Hc:"dropeffect",Ic:"expanded",Jc:"flowto",Lc:"grabbed",Pc:"haspopup",Rc:"hidden",Tc:"invalid",Uc:"label",Vc:"labelledby",Wc:"level",ad:"live",ld:"multiline",md:"multiselectable",qd:"orientation",rd:"owns",sd:"posinset",ud:"pressed",yd:"readonly",Ad:"relevant",Bd:"required",Hd:"selected",Jd:"setsize",Ld:"sort",Yd:"valuemax",Zd:"valuemin",$d:"valuenow",ae:"valuetext"};var vc={lc:"alert",mc:"alertdialog",nc:"application",oc:"article",rc:"banner",tc:"button",uc:"checkbox",wc:"columnheader",xc:"combobox",yc:"complementary",zc:"contentinfo",Bc:"definition",Dc:"dialog",Ec:"directory",Gc:"document",Kc:"form",Mc:"grid",Nc:"gridcell",Oc:"group",Qc:"heading",Sc:"img",Xc:"link",Yc:"list",Zc:"listbox",$c:"listitem",bd:"log",cd:"main",dd:"marquee",ed:"math",fd:"menu",gd:"menubar",hd:"menuitem",jd:"menuitemcheckbox",kd:"menuitemradio",nd:"navigation",od:"note",pd:"option", +td:"presentation",vd:"progressbar",wd:"radio",xd:"radiogroup",zd:"region",Cd:"row",Dd:"rowgroup",Ed:"rowheader",Fd:"scrollbar",Gd:"search",Id:"separator",Kd:"slider",Md:"spinbutton",Nd:"status",Od:"tab",Pd:"tablist",Qd:"tabpanel",Rd:"textbox",Sd:"timer",Td:"toolbar",Ud:"tooltip",Vd:"tree",Wd:"treegrid",Xd:"treeitem"};var wc=function(a,b){b?(y(Ha(vc,b),"No such ARIA role "+b),a.setAttribute("role",b)):a.removeAttribute("role")},yc=function(a,b,c){ea(c)&&(c=c.join(" "));var d=xc(b);""===c||c==h?(tc||(tc={atomic:q,autocomplete:"none",dropeffect:"none",haspopup:q,live:"off",multiline:q,multiselectable:q,orientation:"vertical",readonly:q,relevant:"additions text",required:q,sort:"none",busy:q,disabled:q,hidden:q,invalid:"false"}),c=tc,b in c?a.setAttribute(d,c[b]):a.removeAttribute(d)):a.setAttribute(d,c)},xc=function(a){y(a, +"ARIA attribute cannot be empty.");y(Ha(uc,a),"No such ARIA attribute "+a);return"aria-"+a};var Ac=function(a,b,c,d,e){if(!B&&(!D||!F("525")))return l;if(E&&e)return zc(a);if(e&&!d||!c&&(17==b||18==b||E&&91==b))return q;if(D&&d&&c)switch(a){case 220:case 219:case 221:case 192:case 186:case 189:case 187:case 188:case 190:case 191:case 192:case 222:return q}if(B&&d&&b==a)return q;switch(a){case 13:return!(B&&B&&9<=eb);case 27:return!D}return zc(a)},zc=function(a){if(48<=a&&57>=a||96<=a&&106>=a||65<=a&&90>=a||D&&0==a)return l;switch(a){case 32:case 63:case 107:case 109:case 110:case 111:case 186:case 59:case 189:case 187:case 61:case 188:case 190:case 191:case 192:case 222:case 219:case 220:case 221:return l; +default:return q}},Bc=function(a){switch(a){case 61:return 187;case 59:return 186;case 224:return 91;case 0:return 224;default:return a}};var P=function(a,b){L.call(this);a&&Cc(this,a,b)};x(P,L);s=P.prototype;s.c=m;s.Ea=m;s.Va=m;s.Fa=m;s.s=-1;s.R=-1;s.gb=q; +var Dc={3:13,12:144,63232:38,63233:40,63234:37,63235:39,63236:112,63237:113,63238:114,63239:115,63240:116,63241:117,63242:118,63243:119,63244:120,63245:121,63246:122,63247:123,63248:44,63272:46,63273:36,63275:35,63276:33,63277:34,63289:144,63302:45},Ec={Up:38,Down:40,Left:37,Right:39,Enter:13,F1:112,F2:113,F3:114,F4:115,F5:116,F6:117,F7:118,F8:119,F9:120,F10:121,F11:122,F12:123,"U+007F":46,Home:36,End:35,PageUp:33,PageDown:34,Insert:45},Fc=B||D&&F("525"),Gc=E&&C; +P.prototype.Qb=function(a){if(D&&(17==this.s&&!a.ctrlKey||18==this.s&&!a.altKey||E&&91==this.s&&!a.metaKey))this.R=this.s=-1;-1==this.s&&(a.ctrlKey&&17!=a.keyCode?this.s=17:a.altKey&&18!=a.keyCode?this.s=18:a.metaKey&&91!=a.keyCode&&(this.s=91));Fc&&!Ac(a.keyCode,this.s,a.shiftKey,a.ctrlKey,a.altKey)?this.handleEvent(a):(this.R=C?Bc(a.keyCode):a.keyCode,Gc&&(this.gb=a.altKey))};P.prototype.Rb=function(a){this.R=this.s=-1;this.gb=a.altKey}; +P.prototype.handleEvent=function(a){var b=a.T,c,d,e=b.altKey;B&&"keypress"==a.type?(c=this.R,d=13!=c&&27!=c?b.keyCode:0):D&&"keypress"==a.type?(c=this.R,d=0<=b.charCode&&63232>b.charCode&&zc(c)?b.charCode:0):Ta?(c=this.R,d=zc(c)?b.keyCode:0):(c=b.keyCode||this.R,d=b.charCode||0,Gc&&(e=this.gb),E&&(63==d&&224==c)&&(c=191));var g=c,k=b.keyIdentifier;c?63232<=c&&c in Dc?g=Dc[c]:25==c&&a.shiftKey&&(g=9):k&&k in Ec&&(g=Ec[k]);a=g==this.s;this.s=g;b=new Hc(g,d,a,b);b.altKey=e;this.dispatchEvent(b)}; +P.prototype.a=function(){return this.c};var Cc=function(a,b,c){a.Fa&&a.detach();a.c=b;a.Ea=K(a.c,"keypress",a,c);a.Va=K(a.c,"keydown",a.Qb,c,a);a.Fa=K(a.c,"keyup",a.Rb,c,a)};P.prototype.detach=function(){this.Ea&&(Ub(this.Ea),Ub(this.Va),Ub(this.Fa),this.Fa=this.Va=this.Ea=m);this.c=m;this.R=this.s=-1};var Hc=function(a,b,c,d){d&&Gb(this,d,h);this.type="key";this.keyCode=a;this.charCode=b;this.repeat=c};x(Hc,I);var Jc=function(a,b){a||f(Error("Invalid class name "+a));v(b)||f(Error("Invalid decorator function "+b));Ic[a]=b},Kc={},Ic={};var Q=function(){};ba(Q);Q.prototype.$=function(){};var Lc=function(a,b){a&&(a.tabIndex=b?0:-1)};s=Q.prototype;s.q=function(a){return a.ib().q("div",this.wa(a).join(" "))};s.B=function(a){return a};s.da=function(a){return"DIV"==a.tagName};s.O=function(a,b){b.id&&mc(a,b.id);var c=this.w(),d=q,e=ib(b);e&&xa(e,function(b){b==c?d=l:b&&this.Za(a,b,c)},this);d||G(b,c);Mc(a,this.B(b));return b};s.Za=function(a,b,c){b==c+"-disabled"?a.sa(q):b==c+"-horizontal"?Nc(a,"horizontal"):b==c+"-vertical"&&Nc(a,"vertical")}; +var Mc=function(a,b){if(b)for(var c=b.firstChild,d;c&&c.parentNode==b;){d=c.nextSibling;if(1==c.nodeType){var e;t:{e=h;for(var g=ib(c),k=0,n=g.length;ka?c-1:a},0)},hd=function(a){id(a,function(a,c){return(a+1)%c},a.j)},gd=function(a){id(a,function(a,c){a--;return 0>a?c-1:a},a.j)},id=function(a,b,c){c=0>c?sc(a,a.h):c;var d=qc(a);c=b.call(a,c,d);for(var e=0;e<=d;){var g=O(a,c);if(g&&g.t()&&g.isEnabled()&&g.m&2){a.Ta(c);break}e++;c=b.call(a,c,d)}};W.prototype.Ta=function(a){dd(this,a)};var jd=function(){};x(jd,S);ba(jd);s=jd.prototype;s.w=function(){return"goog-tab"};s.$=function(){return"tab"};s.q=function(a){var b=jd.f.q.call(this,a);(a=a.Ra())&&this.Ua(b,a);return b};s.O=function(a,b){b=jd.f.O.call(this,a,b);var c=this.Ra(b);c&&(a.pb=c);if(a.g&8&&(c=a.getParent())&&v(c.ca))a.v(8,q),c.ca(a);return b};s.Ra=function(a){return a.title||""};s.Ua=function(a,b){a&&(a.title=b||"")};var kd=function(a,b,c){T.call(this,a,b||jd.ea(),c);Zc(this,8,l);this.aa|=9};x(kd,T);kd.prototype.Ra=function(){return this.pb};kd.prototype.Ua=function(a){this.Ab().Ua(this.a(),a);this.pb=a};Jc("goog-tab",function(){return new kd(m)});var X=function(){};x(X,Q);ba(X);X.prototype.w=function(){return"goog-tab-bar"};X.prototype.$=function(){return"tablist"};X.prototype.Za=function(a,b,c){this.Bb||(this.Ia||ld(this),this.Bb=Ja(this.Ia));var d=this.Bb[b];d?(Nc(a,md(d)),a.vb=d):X.f.Za.call(this,a,b,c)};X.prototype.wa=function(a){var b=X.f.wa.call(this,a);this.Ia||ld(this);b.push(this.Ia[a.vb]);return b};var ld=function(a){var b=a.w();a.Ia={top:b+"-top",bottom:b+"-bottom",start:b+"-start",end:b+"-end"}};var Y=function(a,b,c){a=a||"top";Nc(this,md(a));this.vb=a;W.call(this,this.P,b||X.ea(),c);nd(this)};x(Y,W);s=Y.prototype;s.Zb=l;s.K=m;s.F=function(){Y.f.F.call(this);nd(this)};s.removeChild=function(a,b){od(this,a);return Y.f.removeChild.call(this,a,b)};s.Ta=function(a){Y.f.Ta.call(this,a);this.Zb&&this.ca(O(this,a))};s.ca=function(a){a?Xc(a,l):this.K&&Xc(this.K,q)}; +var od=function(a,b){if(b&&b==a.K){for(var c=sc(a,b),d=c-1;b=O(a,d);d--)if(b.t()&&b.isEnabled()){a.ca(b);return}for(c+=1;b=O(a,c);c++)if(b.t()&&b.isEnabled()){a.ca(b);return}a.ca(m)}};s=Y.prototype;s.Xb=function(a){this.K&&this.K!=a.target&&Xc(this.K,q);this.K=a.target};s.Yb=function(a){a.target==this.K&&(this.K=m)};s.Vb=function(a){od(this,a.target)};s.Wb=function(a){od(this,a.target)};s.pa=function(){O(this,this.j)||this.C(this.K||O(this,0))}; +var nd=function(a){nc(a).d(a,"select",a.Xb).d(a,"unselect",a.Yb).d(a,"disable",a.Vb).d(a,"hide",a.Wb)},md=function(a){return"start"==a||"end"==a?"vertical":"horizontal"};Jc("goog-tab-bar",function(){return new Y});var Z=function(a,b,c,d,e){function g(a){a&&(a.tabIndex=0,wc(a,k.$()),G(a,"goog-zippy-header"),pd(k,a),a&&k.Mb.d(a,"keydown",k.Nb))}L.call(this);this.n=e||nb();this.X=this.n.a(a)||m;this.Ba=this.n.a(d||m);this.ia=(this.Na=v(b)?b:m)||!b?m:this.n.a(b);this.l=c==l;this.Mb=new $b(this);this.nb=new $b(this);var k=this;g(this.X);g(this.Ba);this.Y(this.l)};x(Z,L);s=Z.prototype;s.fa=l;s.$=function(){return"tab"};s.B=function(){return this.ia};s.toggle=function(){this.Y(!this.l)}; +s.Y=function(a){this.ia?fc(this.ia,a):a&&this.Na&&(this.ia=this.Na());this.ia&&G(this.ia,"goog-zippy-content");if(this.Ba)fc(this.X,!a),fc(this.Ba,a);else if(this.X){var b=this.X;a?G(b,"goog-zippy-expanded"):kb(b,"goog-zippy-expanded");b=this.X;!a?G(b,"goog-zippy-collapsed"):kb(b,"goog-zippy-collapsed");yc(this.X,"expanded",a)}this.l=a;this.dispatchEvent(new qd("toggle",this))};s.mb=function(){return this.fa};s.Ma=function(a){this.fa!=a&&((this.fa=a)?(pd(this,this.X),pd(this,this.Ba)):cc(this.nb))}; +var pd=function(a,b){b&&a.nb.d(b,"click",a.$b)};Z.prototype.Nb=function(a){if(13==a.keyCode||32==a.keyCode)this.toggle(),this.dispatchEvent(new H("action",this)),a.preventDefault(),a.stopPropagation()};Z.prototype.$b=function(){this.toggle();this.dispatchEvent(new H("action",this))};var qd=function(a,b){H.call(this,a,b)};x(qd,H);var sd=function(a,b){this.lb=[];for(var c=ob(a),c=pb("span","ae-zippy",c),d=0,e;e=c[d];d++){var g;if(e.parentNode.parentNode.parentNode.nextElementSibling!=h)g=e.parentNode.parentNode.parentNode.nextElementSibling;else for(g=e.parentNode.parentNode.parentNode.nextSibling;g&&1!=g.nodeType;)g=g.nextSibling;e=new Z(e,g,q);this.lb.push(e)}this.fc=new rd(this.lb,ob(b))};sd.prototype.ic=function(){return this.fc};sd.prototype.jc=function(){return this.lb}; +var rd=function(a,b){this.xa=a;if(this.xa.length)for(var c=0,d;d=this.xa[c];c++)K(d,"toggle",this.Ub,q,this);this.Ja=0;this.l=q;c="ae-toggle ae-plus ae-action";this.xa.length||(c+=" ae-disabled");this.V=tb("span",{className:c},"Expand All");K(this.V,"click",this.Tb,q,this);b&&b.appendChild(this.V)};rd.prototype.Tb=function(){this.xa.length&&this.Y(!this.l)}; +rd.prototype.Ub=function(a){a=a.currentTarget;this.Ja=a.l?this.Ja+1:this.Ja-1;a.l!=this.l&&(a.l?(this.l=l,td(this,l)):0==this.Ja&&(this.l=q,td(this,q)))};rd.prototype.Y=function(a){this.l=a;a=0;for(var b;b=this.xa[a];a++)b.l!=this.l&&b.Y(this.l);td(this)}; +var td=function(a,b){(b!==h?b:a.l)?(kb(a.V,"ae-plus"),G(a.V,"ae-minus"),wb(a.V,"Collapse All")):(kb(a.V,"ae-minus"),G(a.V,"ae-plus"),wb(a.V,"Expand All"))},ud=function(a){this.ac=a;this.Db={};var b,c=tb("div",{},b=tb("div",{id:"ae-stats-details-tabs",className:"goog-tab-bar goog-tab-bar-top"}),tb("div",{className:"goog-tab-bar-clear"}),a=tb("div",{id:"ae-stats-details-tabs-content",className:"goog-tab-content"})),d=new Y;d.O(b);K(d,"select",this.Cb,q,this);K(d,"unselect",this.Cb,q,this);b=0;for(var e;e= +this.ac[b];b++)if(e=ob("ae-stats-details-"+e)){var g=pb("h2",m,e)[0],k;k=g;var n=h;hb&&"innerText"in k?n=k.innerText.replace(/(\r\n|\r|\n)/g,"\n"):(n=[],Ab(k,n,l),n=n.join(""));n=n.replace(/ \xAD /g," ").replace(/\xAD/g,"");n=n.replace(/\u200B/g,"");hb||(n=n.replace(/ +/g," "));" "!=n&&(n=n.replace(/^\s*/,""));k=n;g&&g.parentNode&&g.parentNode.removeChild(g);g=new kd(k);this.Db[w(g)]=e;d.Da(g,l);a.appendChild(e);0==b?d.ca(g):fc(e,q)}ob("bd").appendChild(c)}; +ud.prototype.Cb=function(a){var b=this.Db[w(a.target)];fc(b,"select"==a.type)};ja("ae.Stats.Details.Tabs",ud);ja("goog.ui.Zippy",Z);Z.prototype.setExpanded=Z.prototype.Y;ja("ae.Stats.MakeZippys",sd);sd.prototype.getExpandCollapse=sd.prototype.ic;sd.prototype.getZippys=sd.prototype.jc;rd.prototype.setExpanded=rd.prototype.Y;var $=function(){this.ab=[];this.fb=[]},vd=[[5,0.2,1],[6,0.2,1.2],[5,0.25,1.25],[6,0.25,1.5],[4,0.5,2],[5,0.5,2.5],[6,0.5,3],[4,1,4],[5,1,5],[6,1,6],[4,2,8],[5,2,10]],wd=function(a){if(0>=a)return[2,0.5,1];for(var b=1;1>a;)a*=10,b/=10;for(;10<=a;)a/=10,b*=10;for(var c=0;c');a.write('
');for(var e=0;e<=b;e++)a.write(''),a.write(''),a.write(" "+e*c+"");a.write("
\n")}; +$.prototype.hc=function(){this.fb=[];var a=wd(this.cb),b=a[0],c=a[1],a=100/a[2];this.write('\n');xd(this,b,c,a);for(var d=0;d\n\n")}xd(this,b,c,a);this.write("
');0'),this.write(e.label),0"));this.write("");this.write('
');0');this.write('');0'));0 '),this.write(e.yb),this.write(""));0");this.write("
\n");return this.fb.join("")};$.prototype.gc=function(a,b,c,d,e,g){this.cb=Math.max(this.cb,Math.max(b+c,b+d));this.ab.push({label:a,start:b,duration:c,bb:d,yb:e,la:g})};ja("Gantt",$);$.prototype.add_bar=$.prototype.gc;$.prototype.draw=$.prototype.hc;})(); diff --git a/python/google/appengine/ext/blobstore/blobstore.py b/python/google/appengine/ext/blobstore/blobstore.py index 70f60b74..777fca95 100644 --- a/python/google/appengine/ext/blobstore/blobstore.py +++ b/python/google/appengine/ext/blobstore/blobstore.py @@ -73,6 +73,7 @@ __all__ = ['BLOB_INFO_KIND', 'fetch_data_async', 'create_gs_key', 'create_gs_key_async', + 'GS_PREFIX', 'get', 'parse_blob_info', 'parse_file_info'] @@ -102,6 +103,7 @@ BLOB_RANGE_HEADER = blobstore.BLOB_RANGE_HEADER MAX_BLOB_FETCH_SIZE = blobstore.MAX_BLOB_FETCH_SIZE UPLOAD_INFO_CREATION_HEADER = blobstore.UPLOAD_INFO_CREATION_HEADER CLOUD_STORAGE_OBJECT_HEADER = blobstore.CLOUD_STORAGE_OBJECT_HEADER +GS_PREFIX = blobstore.GS_PREFIX class BlobInfoParseError(Error): diff --git a/python/google/appengine/ext/cloudstorage/cloudstorage_stub.py b/python/google/appengine/ext/cloudstorage/cloudstorage_stub.py index 168515fa..596cfd2d 100644 --- a/python/google/appengine/ext/cloudstorage/cloudstorage_stub.py +++ b/python/google/appengine/ext/cloudstorage/cloudstorage_stub.py @@ -23,8 +23,9 @@ import calendar import datetime import hashlib import StringIO -import time +from google.appengine.api import datastore +from google.appengine.api import namespace_manager from google.appengine.api.blobstore import blobstore_stub from google.appengine.ext import db from google.appengine.ext.cloudstorage import common @@ -38,6 +39,7 @@ class _AE_GCSFileInfo_(db.Model): Key name is blobkey. """ + filename = db.StringProperty(required=True) finalized = db.BooleanProperty(required=True) @@ -49,7 +51,7 @@ class _AE_GCSFileInfo_(db.Model): size = db.IntegerProperty() creation = db.DateTimeProperty() - content_type = db.ByteStringProperty() + content_type = db.StringProperty() etag = db.ByteStringProperty() def get_options(self): @@ -73,22 +75,23 @@ class _AE_GCSFileInfo_(db.Model): class _AE_GCSPartialFile_(db.Model): """Store partial content for uploading files.""" - start = db.IntegerProperty(required=True) - end = db.IntegerProperty(required=True) + + end = db.IntegerProperty(required=True) + partial_content = db.TextProperty(required=True) class CloudStorageStub(object): - """Cloud Storage stub implementation. + """Google Cloud Storage stub implementation. We use blobstore stub to store files. All metadata are stored in _AE_GCSFileInfo_. - Note: this Cloud Storage stub is designed to work with + Note: this Google Cloud Storage stub is designed to work with apphosting.ext.cloudstorage.storage_api.py. It only implements the part of GCS storage_api.py uses, and its interface maps to GCS XML APIs. @@ -107,7 +110,7 @@ class CloudStorageStub(object): """Get blobkey for filename. Args: - filename: gs filename of form /bucket/filename. + filename: gcs filename of form /bucket/filename. Returns: blobinfo's datastore's key name, aka, blobkey. @@ -115,7 +118,7 @@ class CloudStorageStub(object): common.validate_file_path(filename) return blobstore_stub.BlobstoreServiceStub.CreateEncodedGoogleStorageKey( - filename) + filename[1:]) def post_start_creation(self, filename, options): """Start object creation with a POST. @@ -123,25 +126,29 @@ class CloudStorageStub(object): This implements the resumable upload XML API. Args: - filename: gs filename of form /bucket/filename. + filename: gcs filename of form /bucket/filename. options: a dict containing all user specified request headers. e.g. {'content-type': 'foo', 'x-goog-meta-bar': 'bar'}. Returns: - a token used for continuing upload. Also used as blobkey to store - the content. + a token (blobkey) used for continuing upload. """ - common.validate_file_path(filename) - token = self._filename_to_blobkey(filename) - gcs_file = _AE_GCSFileInfo_.get_by_key_name(token) - - self._cleanup_old_file(gcs_file) - new_file = _AE_GCSFileInfo_(key_name=token, - filename=filename, - finalized=False) - new_file.options = options - new_file.put() - return token + ns = namespace_manager.get_namespace() + try: + namespace_manager.set_namespace('') + common.validate_file_path(filename) + token = self._filename_to_blobkey(filename) + gcs_file = _AE_GCSFileInfo_.get_by_key_name(token) + + self._cleanup_old_file(gcs_file) + new_file = _AE_GCSFileInfo_(key_name=token, + filename=filename, + finalized=False) + new_file.options = options + new_file.put() + return token + finally: + namespace_manager.set_namespace(ns) def _cleanup_old_file(self, gcs_file): @@ -162,7 +169,9 @@ class CloudStorageStub(object): db.delete(_AE_GCSPartialFile_.all().ancestor(gcs_file)) gcs_file.delete() - def put_continue_creation(self, token, content, content_range, last=False): + def put_continue_creation(self, token, content, content_range, + last=False, + _upload_filename=None): """Continue object upload with PUTs. This implements the resumable upload XML API. @@ -173,33 +182,48 @@ class CloudStorageStub(object): content_range: a (start, end) tuple specifying the content range of this chunk. Both are inclusive according to XML API. last: True if this is the last chunk of file content. + _upload_filename: internal use. Might be removed any time! This is + used by blobstore to pass in the upload filename from user. + + Returns: + _AE_GCSFileInfo entity for this file if the file is finalized. Raises: ValueError: if token is invalid. """ - gcs_file = _AE_GCSFileInfo_.get_by_key_name(token) - if not gcs_file: - raise ValueError('Invalid token') - if content: - start, end = content_range - if len(content) != (end - start + 1): - raise ValueError('Invalid content range %d-%d' % content_range) - blobkey = '%s-%d-%d' % (token, content_range[0], content_range[1]) - self.blob_storage.StoreBlob(blobkey, StringIO.StringIO(content)) - new_content = _AE_GCSPartialFile_(parent=gcs_file, - partial_content=blobkey, - start=start, - end=end + 1) - new_content.put() - if last: - self._end_creation(token) - - def _end_creation(self, token): + ns = namespace_manager.get_namespace() + try: + namespace_manager.set_namespace('') + gcs_file = _AE_GCSFileInfo_.get_by_key_name(token) + if not gcs_file: + raise ValueError('Invalid token') + if content: + start, end = content_range + if len(content) != (end - start + 1): + raise ValueError('Invalid content range %d-%d' % content_range) + blobkey = '%s-%d-%d' % (token, content_range[0], content_range[1]) + self.blob_storage.StoreBlob(blobkey, StringIO.StringIO(content)) + new_content = _AE_GCSPartialFile_(parent=gcs_file, + + key_name='{:020}'.format(start), + partial_content=blobkey, + start=start, + end=end + 1) + new_content.put() + if last: + return self._end_creation(token, _upload_filename) + finally: + namespace_manager.set_namespace(ns) + + def _end_creation(self, token, _upload_filename): """End object upload. Args: token: upload token returned by post_start_creation. + Returns: + _AE_GCSFileInfo Entity for this file. + Raises: ValueError: if token is invalid. Or file is corrupted during upload. @@ -217,10 +241,21 @@ class CloudStorageStub(object): gcs_file.creation = datetime.datetime.utcnow() gcs_file.size = len(content) + + + blob_info = datastore.Entity('__BlobInfo__', name=str(token), namespace='') + blob_info['content_type'] = gcs_file.content_type + blob_info['creation'] = gcs_file.creation + blob_info['filename'] = _upload_filename + blob_info['md5_hash'] = gcs_file.etag + blob_info['size'] = gcs_file.size + datastore.Put(blob_info) + self.blob_storage.StoreBlob(token, StringIO.StringIO(content)) gcs_file.finalized = True gcs_file.put() + return gcs_file @db.transactional def _get_content(self, gcs_file): @@ -237,11 +272,13 @@ class CloudStorageStub(object): content = '' previous_end = 0 error_msg = '' - for partial in _AE_GCSPartialFile_.all().ancestor(gcs_file).order('start'): + for partial in (_AE_GCSPartialFile_.all(namespace='').ancestor(gcs_file). + order('__key__')): + start = int(partial.key().name()) if not error_msg: - if partial.start < previous_end: + if start < previous_end: error_msg = 'File is corrupted due to missing chunks.' - elif partial.start > previous_end: + elif start > previous_end: error_msg = 'File is corrupted due to overlapping chunks' previous_end = partial.end content += self.blob_storage.OpenBlob(partial.partial_content).read() @@ -260,7 +297,7 @@ class CloudStorageStub(object): """Get bucket listing with a GET. Args: - bucketpath: gs bucket path of form '/bucket' + bucketpath: gcs bucket path of form '/bucket' prefix: prefix to limit listing. marker: a str after which to start listing. max_keys: max size of listing. @@ -269,7 +306,7 @@ class CloudStorageStub(object): for details. Returns: - A list of CSFileStat sorted by filename. + A list of GCSFileStat sorted by filename. """ common.validate_bucket_path(bucketpath) q = _AE_GCSFileInfo_.all(namespace='') @@ -282,18 +319,21 @@ class CloudStorageStub(object): for info in q.run(limit=max_keys): if not info.filename.startswith(fully_qualified_prefix): break - result.append(common.CSFileStat( - filename=info.filename, - st_size=info.size, - st_ctime=calendar.timegm(info.creation.utctimetuple()), - etag=info.etag)) + + info = db.get(info.key()) + if info: + result.append(common.GCSFileStat( + filename=info.filename, + st_size=info.size, + st_ctime=calendar.timegm(info.creation.utctimetuple()), + etag=info.etag)) return result def get_object(self, filename, start=0, end=None): """Get file content with a GET. Args: - filename: gs filename of form '/bucket/filename'. + filename: gcs filename of form '/bucket/filename'. start: start offset to request. Inclusive. end: end offset to request. Inclusive. @@ -305,8 +345,9 @@ class CloudStorageStub(object): """ common.validate_file_path(filename) blobkey = self._filename_to_blobkey(filename) - gsfileinfo = _AE_GCSFileInfo_.get_by_key_name(blobkey) - if not gsfileinfo or not gsfileinfo.finalized: + key = blobstore_stub.BlobstoreServiceStub.ToDatastoreBlobKey(blobkey) + gcsfileinfo = db.get(key) + if not gcsfileinfo or not gcsfileinfo.finalized: raise ValueError('File does not exist.') local_file = self.blob_storage.OpenBlob(blobkey) local_file.seek(start) @@ -319,17 +360,18 @@ class CloudStorageStub(object): """Get file stat with a HEAD. Args: - filename: gs filename of form '/bucket/filename' + filename: gcs filename of form '/bucket/filename' Returns: - A CSFileStat object containing file stat. None if file doesn't exist. + A GCSFileStat object containing file stat. None if file doesn't exist. """ common.validate_file_path(filename) blobkey = self._filename_to_blobkey(filename) - info = _AE_GCSFileInfo_.get_by_key_name(blobkey) + key = blobstore_stub.BlobstoreServiceStub.ToDatastoreBlobKey(blobkey) + info = db.get(key) if info and info.finalized: metadata = common.get_metadata(info.options) - filestat = common.CSFileStat( + filestat = common.GCSFileStat( filename=info.filename, st_size=info.size, etag=info.etag, @@ -343,16 +385,17 @@ class CloudStorageStub(object): """Delete file with a DELETE. Args: - filename: gs filename of form '/bucket/filename' + filename: gcs filename of form '/bucket/filename' Returns: True if file is deleted. False if file doesn't exist. """ common.validate_file_path(filename) blobkey = self._filename_to_blobkey(filename) - gsfileinfo = _AE_GCSFileInfo_.get_by_key_name(blobkey) - if not gsfileinfo: + key = blobstore_stub.BlobstoreServiceStub.ToDatastoreBlobKey(blobkey) + gcsfileinfo = db.get(key) + if not gcsfileinfo: return False - gsfileinfo.delete() - self.blob_storage.DeleteBlob(blobkey) + + blobstore_stub.BlobstoreServiceStub.DeleteBlob(blobkey, self.blob_storage) return True diff --git a/python/google/appengine/ext/cloudstorage/common.py b/python/google/appengine/ext/cloudstorage/common.py index 092cae28..b6b68fb1 100644 --- a/python/google/appengine/ext/cloudstorage/common.py +++ b/python/google/appengine/ext/cloudstorage/common.py @@ -45,6 +45,7 @@ __all__ = ['CS_XML_NS', 'local_run', 'get_access_token', 'get_metadata', + 'GCSFileStat', 'http_time_to_posix', 'memory_usage', 'posix_time_to_http', @@ -70,10 +71,10 @@ except ImportError: from google.appengine.api import runtime -_CS_BUCKET_REGEX = re.compile(r'/[a-z0-9\.\-_]{3,}$') -_CS_FULLPATH_REGEX = re.compile(r'/[a-z0-9\.\-_]{3,}/.*') -_CS_OPTIONS = ('x-goog-acl', - 'x-goog-meta-') +_GCS_BUCKET_REGEX = re.compile(r'/[a-z0-9\.\-_]{3,}$') +_GCS_FULLPATH_REGEX = re.compile(r'/[a-z0-9\.\-_]{3,}/.*') +_GCS_OPTIONS = ('x-goog-acl', + 'x-goog-meta-') CS_XML_NS = 'http://doc.s3.amazonaws.com/2006-03-01' @@ -83,14 +84,14 @@ _access_token = '' def set_access_token(access_token): - """Set the shared access token to authenticate with Cloud Storage. + """Set the shared access token to authenticate with Google Cloud Storage. When set, the library will always attempt to communicate with the - real Cloud Storage with this token even when running on dev appserver. + real Google Cloud Storage with this token even when running on dev appserver. Note the token could expire so it's up to you to renew it. When absent, the library will automatically request and refresh a token - on appserver, or when on dev appserver, talk to a Cloud Storage + on appserver, or when on dev appserver, talk to a Google Cloud Storage stub. Args: @@ -106,8 +107,8 @@ def get_access_token(): return _access_token -class CSFileStat(object): - """Container for CS file stat.""" +class GCSFileStat(object): + """Container for GCS file stat.""" def __init__(self, filename, @@ -119,7 +120,7 @@ class CSFileStat(object): """Initialize. Args: - filename: a Google Storage filename of form '/bucket/filename'. + filename: a Google Cloud Storage filename of form '/bucket/filename'. st_size: file size in bytes. long compatible. etag: hex digest of the md5 hash of the file's content. str. st_ctime: posix file creation time. float compatible. @@ -150,6 +151,10 @@ class CSFileStat(object): metadata=self.metadata)) + +CSFileStat = GCSFileStat + + def get_metadata(headers): """Get user defined metadata from HTTP response headers.""" return dict((k, v) for k, v in headers.iteritems() @@ -157,23 +162,22 @@ def get_metadata(headers): def validate_bucket_path(path): - """Validate a Google Storage bucket path. + """Validate a Google Cloud Storage bucket path. Args: path: a Google Storage bucket path. It should have form '/bucket'. - is_bucket: whether this is a bucket path or file path. Raises: ValueError: if path is invalid. """ _validate_path(path) - if not _CS_BUCKET_REGEX.match(path): + if not _GCS_BUCKET_REGEX.match(path): raise ValueError('Bucket should have format /bucket ' 'but got %s' % path) def validate_file_path(path): - """Validate a Google Storage file path. + """Validate a Google Cloud Storage file path. Args: path: a Google Storage file path. It should have form '/bucket/filename'. @@ -182,7 +186,7 @@ def validate_file_path(path): ValueError: if path is invalid. """ _validate_path(path) - if not _CS_FULLPATH_REGEX.match(path): + if not _GCS_FULLPATH_REGEX.match(path): raise ValueError('Path should have format /bucket/filename ' 'but got %s' % path) @@ -206,10 +210,10 @@ def _validate_path(path): def validate_options(options): - """Validate Cloud Storage options. + """Validate Google Cloud Storage options. Args: - options: a str->basestring dict of options to pass to Cloud Storage. + options: a str->basestring dict of options to pass to Google Cloud Storage. Raises: ValueError: if option is not supported. @@ -222,7 +226,7 @@ def validate_options(options): for k, v in options.iteritems(): if not isinstance(k, str): raise TypeError('option %r should be a str.' % k) - if not any(k.startswith(valid) for valid in _CS_OPTIONS): + if not any(k.startswith(valid) for valid in _GCS_OPTIONS): raise ValueError('option %s is not supported.' % k) if not isinstance(v, basestring): raise TypeError('value %r for option %s should be of type basestring.' % diff --git a/python/google/appengine/ext/cloudstorage/stub_dispatcher.py b/python/google/appengine/ext/cloudstorage/stub_dispatcher.py index b5e1131d..d970550e 100644 --- a/python/google/appengine/ext/cloudstorage/stub_dispatcher.py +++ b/python/google/appengine/ext/cloudstorage/stub_dispatcher.py @@ -24,6 +24,7 @@ +import httplib import re import urllib import urlparse @@ -100,19 +101,19 @@ def dispatch(method, headers, url, payload): ValueError: invalid request method. """ method, headers, filename, param_dict = _preprocess(method, headers, url) - gs_stub = cloudstorage_stub.CloudStorageStub( + gcs_stub = cloudstorage_stub.CloudStorageStub( apiproxy_stub_map.apiproxy.GetStub('blobstore').storage) if method == 'POST': - return _handle_post(gs_stub, filename, headers) + return _handle_post(gcs_stub, filename, headers) elif method == 'PUT': - return _handle_put(gs_stub, filename, param_dict, headers, payload) + return _handle_put(gcs_stub, filename, param_dict, headers, payload) elif method == 'GET': - return _handle_get(gs_stub, filename, param_dict, headers) + return _handle_get(gcs_stub, filename, param_dict, headers) elif method == 'HEAD': - return _handle_head(gs_stub, filename) + return _handle_head(gcs_stub, filename) elif method == 'DELETE': - return _handle_delete(gs_stub, filename) + return _handle_delete(gcs_stub, filename) raise ValueError('Unrecognized request method %r.' % method) @@ -120,7 +121,7 @@ def _preprocess(method, headers, url): """Unify input. Example: - _preprocess('POST', {'Content-Type': 'Foo'}, http://gs.com/b/f?foo=bar) + _preprocess('POST', {'Content-Type': 'Foo'}, http://gcs.com/b/f?foo=bar) -> 'POST', {'content-type': 'Foo'}, '/b/f', {'foo':'bar'} Args: @@ -144,10 +145,10 @@ def _preprocess(method, headers, url): return method, headers, filename, param_dict -def _handle_post(gs_stub, filename, headers): +def _handle_post(gcs_stub, filename, headers): """Handle POST that starts object creation.""" content_type = _ContentType(headers) - token = gs_stub.post_start_creation(filename, headers) + token = gcs_stub.post_start_creation(filename, headers) response_headers = { 'location': 'https://storage.googleapis.com/%s?%s' % ( filename, @@ -158,48 +159,39 @@ def _handle_post(gs_stub, filename, headers): return _FakeUrlFetchResult(201, response_headers, '') -def _handle_put(gs_stub, filename, param_dict, headers, payload): +def _handle_put(gcs_stub, filename, param_dict, headers, payload): """Handle PUT that continues object creation.""" token = _get_param('upload_id', param_dict) content_range = _ContentRange(headers) - if content_range.value and not content_range.finished: - gs_stub.put_continue_creation(token, - payload, - (content_range.start, content_range.end)) - response_headers = {} - response_status = 308 - elif content_range.value and content_range.finished: - gs_stub.put_continue_creation(token, - payload, - (content_range.start, content_range.end), - last=True) - filestat = gs_stub.head_object(filename) - response_headers = { - 'content-length': filestat.st_size, - } - response_status = 200 - elif not payload: + if not content_range.value: + raise ValueError('Missing header content-range.') - gs_stub.put_continue_creation(token, '', None, True) - filestat = gs_stub.head_object(filename) + gcs_stub.put_continue_creation(token, + payload, + content_range.range, + content_range.last) + if content_range.last: + filestat = gcs_stub.head_object(filename) response_headers = { 'content-length': filestat.st_size, } - response_status = 200 + response_status = httplib.OK else: - raise ValueError('Missing header content-range but has payload') + response_headers = {} + response_status = 308 + return _FakeUrlFetchResult(response_status, response_headers, '') -def _handle_get(gs_stub, filename, param_dict, headers): +def _handle_get(gcs_stub, filename, param_dict, headers): """Handle GET object and GET bucket.""" if filename.rfind('/') == 0: - return _handle_get_bucket(gs_stub, filename, param_dict) + return _handle_get_bucket(gcs_stub, filename, param_dict) else: - result = _handle_head(gs_stub, filename) + result = _handle_head(gcs_stub, filename) if result.status_code == 404: return result start, end = _Range(headers).value @@ -209,20 +201,20 @@ def _handle_get(gs_stub, filename, param_dict, headers): result.headers['content-range'] = 'bytes: %d-%d/%d' % (start, end, st_size) - result.content = gs_stub.get_object(filename, start, end) + result.content = gcs_stub.get_object(filename, start, end) return result -def _handle_get_bucket(gs_stub, bucketpath, param_dict): +def _handle_get_bucket(gcs_stub, bucketpath, param_dict): """Handle get bucket request.""" prefix = _get_param('prefix', param_dict, '') max_keys = _get_param('max-keys', param_dict, _MAX_GET_BUCKET_RESULT) marker = _get_param('marker', param_dict, '') - stats = gs_stub.get_bucket(bucketpath, - prefix, - marker, - max_keys) + stats = gcs_stub.get_bucket(bucketpath, + prefix, + marker, + max_keys) builder = ET.TreeBuilder() builder.start('ListBucketResult', {'xmlns': common.CS_XML_NS}) @@ -269,9 +261,9 @@ def _handle_get_bucket(gs_stub, bucketpath, param_dict): return _FakeUrlFetchResult(200, response_headers, body) -def _handle_head(gs_stub, filename): +def _handle_head(gcs_stub, filename): """Handle HEAD request.""" - filestat = gs_stub.head_object(filename) + filestat = gcs_stub.head_object(filename) if not filestat: return _FakeUrlFetchResult(404, {}, '') @@ -290,9 +282,9 @@ def _handle_head(gs_stub, filename): return _FakeUrlFetchResult(200, response_headers, '') -def _handle_delete(gs_stub, filename): +def _handle_delete(gcs_stub, filename): """Handle DELETE object.""" - if gs_stub.delete_object(filename): + if gcs_stub.delete_object(filename): return _FakeUrlFetchResult(204, {}, '') else: return _FakeUrlFetchResult(404, {}, '') @@ -333,12 +325,13 @@ class _ContentType(_Header): class _ContentRange(_Header): """Content-Range header. - Used by resumable upload. Possible formats: - Content-Range: bytes 2-4/5 or Content-Range: bytes 1-3/* + Used by resumable upload of unknown size. Possible formats: + Content-Range: bytes 1-3/* (for uploading of unknown size) + Content-Range: bytes */5 (for finalizing with no data) """ HEADER = 'Content-Range' - RE_PATTERN = re.compile(r'^bytes ([0-9]+)-([0-9]+)/([0-9]+|\*)$') + RE_PATTERN = re.compile(r'^bytes (([0-9]+)-([0-9]+)|\*)/([0-9]+|\*)$') def __init__(self, headers): super(_ContentRange, self).__init__(headers) @@ -346,9 +339,15 @@ class _ContentRange(_Header): result = self.RE_PATTERN.match(self.value) if not result: raise ValueError('Invalid content-range header %s' % self.value) - self.start = long(result.group(1)) - self.end = long(result.group(2)) - self.finished = result.group(3) != '*' + + self.no_data = result.group(1) == '*' + self.last = result.group(4) != '*' + if self.no_data and not self.last: + raise ValueError('Invalid content-range header %s' % self.value) + + self.range = None + if not self.no_data: + self.range = (long(result.group(2)), long(result.group(3))) class _Range(_Header): diff --git a/python/google/appengine/ext/datastore_admin/backup_handler.py b/python/google/appengine/ext/datastore_admin/backup_handler.py index 4ae16a05..5e28fcaa 100644 --- a/python/google/appengine/ext/datastore_admin/backup_handler.py +++ b/python/google/appengine/ext/datastore_admin/backup_handler.py @@ -1572,7 +1572,7 @@ class RestoreEntity(object): if not self.kind_filter or entity.kind() in self.kind_filter: yield op.db.Put(entity) if self.app_id: - yield utils.AllocateMaxId(entity.key(), self.app_id) + yield utils.ReserveKey(entity.key(), self.app_id) def validate_gs_bucket_name(bucket_name): diff --git a/python/google/appengine/ext/datastore_admin/copy_handler.py b/python/google/appengine/ext/datastore_admin/copy_handler.py index 8785e9c6..6dcba3b6 100644 --- a/python/google/appengine/ext/datastore_admin/copy_handler.py +++ b/python/google/appengine/ext/datastore_admin/copy_handler.py @@ -236,7 +236,7 @@ class CopyEntity(object): target_entity = datastore.Entity._FromPb(entity_proto) yield operation.db.Put(target_entity) - yield utils.AllocateMaxId(key, target_app) + yield utils.ReserveKey(key, target_app) yield operation.counters.Increment(KindPathFromKey(key)) diff --git a/python/google/appengine/ext/datastore_admin/static/css/compiled.css b/python/google/appengine/ext/datastore_admin/static/css/compiled.css index 62a8dd43..7bba4f31 100644 --- a/python/google/appengine/ext/datastore_admin/static/css/compiled.css +++ b/python/google/appengine/ext/datastore_admin/static/css/compiled.css @@ -1,2 +1,2 @@ /* Copyright 2013 Google Inc. All Rights Reserved. */ -html,body,div,h1,h2,h3,h4,h5,h6,p,img,dl,dt,dd,ol,ul,li,table,caption,tbody,tfoot,thead,tr,th,td,form,fieldset,embed,object,applet{margin:0;padding:0;border:0;}body{font-size:62.5%;font-family:Arial,sans-serif;color:#000;background:#fff}a{color:#00c}a:active{color:#f00}a:visited{color:#551a8b}table{border-collapse:collapse;border-width:0;empty-cells:show}ul{padding:0 0 1em 1em}ol{padding:0 0 1em 1.3em}li{line-height:1.5em;padding:0 0 .5em 0}p{padding:0 0 1em 0}h1,h2,h3,h4,h5{padding:0 0 1em 0}h1,h2{font-size:1.3em}h3{font-size:1.1em}h4,h5,table{font-size:1em}sup,sub{font-size:.7em}input,select,textarea,option{font-family:inherit;font-size:inherit}.g-doc,.g-doc-1024,.g-doc-800{font-size:130%}.g-doc{width:100%;text-align:left}.g-section{width:100%;vertical-align:top;display:inline-block}*:first-child+html .g-section{display:block}* html .g-section{overflow:hidden}@-moz-document url-prefix(''){.g-section{overflow:hidden}}@-moz-document url-prefix(''){.g-section,tt:default{overflow:visible}}.g-section,.g-unit{zoom:1}.g-split .g-unit{text-align:right}.g-split .g-first{text-align:left}.g-doc-1024{width:73.074em;min-width:950px;margin:0 auto;text-align:left}* html .g-doc-1024{width:71.313em}*+html .g-doc-1024{width:71.313em}.g-doc-800{width:57.69em;min-width:750px;margin:0 auto;text-align:left}* html .g-doc-800{width:56.3em}*+html .g-doc-800{width:56.3em}.g-tpl-160 .g-unit,.g-unit .g-tpl-160 .g-unit,.g-unit .g-unit .g-tpl-160 .g-unit,.g-unit .g-unit .g-unit .g-tpl-160 .g-unit{margin:0 0 0 160px;width:auto;float:none}.g-unit .g-unit .g-unit .g-tpl-160 .g-first,.g-unit .g-unit .g-tpl-160 .g-first,.g-unit .g-tpl-160 .g-first,.g-tpl-160 .g-first{margin:0;width:160px;float:left}.g-tpl-160-alt .g-unit,.g-unit .g-tpl-160-alt .g-unit,.g-unit .g-unit .g-tpl-160-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-160-alt .g-unit{margin:0 160px 0 0;width:auto;float:none}.g-unit .g-unit .g-unit .g-tpl-160-alt .g-first,.g-unit .g-unit .g-tpl-160-alt .g-first,.g-unit .g-tpl-160-alt .g-first,.g-tpl-160-alt .g-first{margin:0;width:160px;float:right}.g-tpl-180 .g-unit,.g-unit .g-tpl-180 .g-unit,.g-unit .g-unit .g-tpl-180 .g-unit,.g-unit .g-unit .g-unit .g-tpl-180 .g-unit{margin:0 0 0 180px;width:auto;float:none}.g-unit .g-unit .g-unit .g-tpl-180 .g-first,.g-unit .g-unit .g-tpl-180 .g-first,.g-unit .g-tpl-180 .g-first,.g-tpl-180 .g-first{margin:0;width:180px;float:left}.g-tpl-180-alt .g-unit,.g-unit .g-tpl-180-alt .g-unit,.g-unit .g-unit .g-tpl-180-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-180-alt .g-unit{margin:0 180px 0 0;width:auto;float:none}.g-unit .g-unit .g-unit .g-tpl-180-alt .g-first,.g-unit .g-unit .g-tpl-180-alt .g-first,.g-unit .g-tpl-180-alt .g-first,.g-tpl-180-alt .g-first{margin:0;width:180px;float:right}.g-tpl-300 .g-unit,.g-unit .g-tpl-300 .g-unit,.g-unit .g-unit .g-tpl-300 .g-unit,.g-unit .g-unit .g-unit .g-tpl-300 .g-unit{margin:0 0 0 300px;width:auto;float:none}.g-unit .g-unit .g-unit .g-tpl-300 .g-first,.g-unit .g-unit .g-tpl-300 .g-first,.g-unit .g-tpl-300 .g-first,.g-tpl-300 .g-first{margin:0;width:300px;float:left}.g-tpl-300-alt .g-unit,.g-unit .g-tpl-300-alt .g-unit,.g-unit .g-unit .g-tpl-300-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-300-alt .g-unit{margin:0 300px 0 0;width:auto;float:none}.g-unit .g-unit .g-unit .g-tpl-300-alt .g-first,.g-unit .g-unit .g-tpl-300-alt .g-first,.g-unit .g-tpl-300-alt .g-first,.g-tpl-300-alt .g-first{margin:0;width:300px;float:right}.g-tpl-25-75 .g-unit,.g-unit .g-tpl-25-75 .g-unit,.g-unit .g-unit .g-tpl-25-75 .g-unit,.g-unit .g-unit .g-unit .g-tpl-25-75 .g-unit{width:74.999%;float:right;margin:0}.g-unit .g-unit .g-unit .g-tpl-25-75 .g-first,.g-unit .g-unit .g-tpl-25-75 .g-first,.g-unit .g-tpl-25-75 .g-first,.g-tpl-25-75 .g-first{width:24.999%;float:left;margin:0}.g-tpl-25-75-alt .g-unit,.g-unit .g-tpl-25-75-alt .g-unit,.g-unit .g-unit .g-tpl-25-75-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-25-75-alt .g-unit{width:24.999%;float:left;margin:0}.g-unit .g-unit .g-unit .g-tpl-25-75-alt .g-first,.g-unit .g-unit .g-tpl-25-75-alt .g-first,.g-unit .g-tpl-25-75-alt .g-first,.g-tpl-25-75-alt .g-first{width:74.999%;float:right;margin:0}.g-tpl-75-25 .g-unit,.g-unit .g-tpl-75-25 .g-unit,.g-unit .g-unit .g-tpl-75-25 .g-unit,.g-unit .g-unit .g-unit .g-tpl-75-25 .g-unit{width:24.999%;float:right;margin:0}.g-unit .g-unit .g-unit .g-tpl-75-25 .g-first,.g-unit .g-unit .g-tpl-75-25 .g-first,.g-unit .g-tpl-75-25 .g-first,.g-tpl-75-25 .g-first{width:74.999%;float:left;margin:0}.g-tpl-75-25-alt .g-unit,.g-unit .g-tpl-75-25-alt .g-unit,.g-unit .g-unit .g-tpl-75-25-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-75-25-alt .g-unit{width:74.999%;float:left;margin:0}.g-unit .g-unit .g-unit .g-tpl-75-25-alt .g-first,.g-unit .g-unit .g-tpl-75-25-alt .g-first,.g-unit .g-tpl-75-25-alt .g-first,.g-tpl-75-25-alt .g-first{width:24.999%;float:right;margin:0}.g-tpl-33-67 .g-unit,.g-unit .g-tpl-33-67 .g-unit,.g-unit .g-unit .g-tpl-33-67 .g-unit,.g-unit .g-unit .g-unit .g-tpl-33-67 .g-unit{width:66.999%;float:right;margin:0}.g-unit .g-unit .g-unit .g-tpl-33-67 .g-first,.g-unit .g-unit .g-tpl-33-67 .g-first,.g-unit .g-tpl-33-67 .g-first,.g-tpl-33-67 .g-first{width:32.999%;float:left;margin:0}.g-tpl-33-67-alt .g-unit,.g-unit .g-tpl-33-67-alt .g-unit,.g-unit .g-unit .g-tpl-33-67-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-33-67-alt .g-unit{width:32.999%;float:left;margin:0}.g-unit .g-unit .g-unit .g-tpl-33-67-alt .g-first,.g-unit .g-unit .g-tpl-33-67-alt .g-first,.g-unit .g-tpl-33-67-alt .g-first,.g-tpl-33-67-alt .g-first{width:66.999%;float:right;margin:0}.g-tpl-67-33 .g-unit,.g-unit .g-tpl-67-33 .g-unit,.g-unit .g-unit .g-tpl-67-33 .g-unit,.g-unit .g-unit .g-unit .g-tpl-67-33 .g-unit{width:32.999%;float:right;margin:0}.g-unit .g-unit .g-unit .g-tpl-67-33 .g-first,.g-unit .g-unit .g-tpl-67-33 .g-first,.g-unit .g-tpl-67-33 .g-first,.g-tpl-67-33 .g-first{width:66.999%;float:left;margin:0}.g-tpl-67-33-alt .g-unit,.g-unit .g-tpl-67-33-alt .g-unit,.g-unit .g-unit .g-tpl-67-33-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-67-33-alt .g-unit{width:66.999%;float:left;margin:0}.g-unit .g-unit .g-unit .g-tpl-67-33-alt .g-first,.g-unit .g-unit .g-tpl-67-33-alt .g-first,.g-unit .g-tpl-67-33-alt .g-first,.g-tpl-67-33-alt .g-first{width:32.999%;float:right;margin:0}.g-tpl-50-50 .g-unit,.g-unit .g-tpl-50-50 .g-unit,.g-unit .g-unit .g-tpl-50-50 .g-unit,.g-unit .g-unit .g-unit .g-tpl-50-50 .g-unit{width:49.999%;float:right;margin:0}.g-unit .g-unit .g-unit .g-tpl-50-50 .g-first,.g-unit .g-unit .g-tpl-50-50 .g-first,.g-unit .g-tpl-50-50 .g-first,.g-tpl-50-50 .g-first{width:49.999%;float:left;margin:0}.g-tpl-50-50-alt .g-unit,.g-unit .g-tpl-50-50-alt .g-unit,.g-unit .g-unit .g-tpl-50-50-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-50-50-alt .g-unit{width:49.999%;float:left;margin:0}.g-unit .g-unit .g-unit .g-tpl-50-50-alt .g-first,.g-unit .g-unit .g-tpl-50-50-alt .g-first,.g-unit .g-tpl-50-50-alt .g-first,.g-tpl-50-50-alt .g-first{width:49.999%;float:right;margin:0}.g-tpl-nest{width:auto}.g-tpl-nest .g-section{display:inline}.g-tpl-nest .g-unit,.g-unit .g-tpl-nest .g-unit,.g-unit .g-unit .g-tpl-nest .g-unit,.g-unit .g-unit .g-unit .g-tpl-nest .g-unit{float:left;width:auto;margin:0}.g-tpl-nest-alt .g-unit,.g-unit .g-tpl-nest-alt .g-unit,.g-unit .g-unit .g-tpl-nest-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-nest-alt .g-unit{float:right;width:auto;margin:0}.goog-button{border-width:1px;border-style:solid;border-color:#bbb #999 #999 #bbb;border-radius:2px;-webkit-border-radius:2px;-moz-border-radius:2px;font:normal normal normal 13px/13px Arial,sans-serif;color:#000;text-align:middle;text-decoration:none;text-shadow:0 1px 1px rgba(255,255,255,1);background:#eee;background-image:-webkit-gradient(linear,0 0,0 100%,from(#fff),to(#ddd));background:-moz-linear-gradient(top,#fff,#ddd);filter:progid:DXImageTransform.Microsoft.Gradient(EndColorstr='#dddddd',StartColorstr='#ffffff',GradientType=0);cursor:pointer;margin:0;display:inline;display:-moz-inline-box;display:inline-block;*overflow:visible;padding:4px 8px 5px}a.goog-button,span.goog-button,div.goog-button{padding:4px 8px 5px}.goog-button:visited{color:#000}.goog-button{*display:inline}.goog-button:focus,.goog-button:hover{border-color:#000}.goog-button:active,.goog-button-active{color:#000;background-color:#bbb;border-color:#999 #bbb #bbb #999;background-image:-webkit-gradient(linear,0 0,0 100%,from(#ddd),to(#fff));background-image:-moz-linear-gradient(top,#ddd,#fff);filter:progid:DXImageTransform.Microsoft.Gradient(EndColorstr='#ffffff',StartColorstr='#dddddd',GradientType=0)}.goog-button[disabled],.goog-button[disabled]:active,.goog-button[disabled]:hover{color:#666;border-color:#ddd;background-color:#f3f3f3;background-image:none;text-shadow:none;cursor:auto}.goog-button{padding:5px 8px 4px }.goog-button{*padding:4px 7px 2px}html>body input.goog-button,x:-moz-any-link,x:default,html>body button.goog-button,x:-moz-any-link,x:default{padding-top:3px;padding-bottom:2px}a.goog-button,x:-moz-any-link,x:default,span.goog-button,x:-moz-any-link,x:default,div.goog-button,x:-moz-any-link,x:default{padding:4px 8px 5px}.goog-button-fixed{padding-left:0!important;padding-right:0!important;width:100%}button.goog-button-icon-c{padding-top:1px;padding-bottom:1px}button.goog-button-icon-c{padding-top:3px ;padding-bottom:2px }button.goog-button-icon-c{*padding-top:0;*padding-bottom:0}html>body button.goog-button-icon-c,x:-moz-any-link,x:default{padding-top:1px;padding-bottom:1px}.goog-button-icon{display:block;margin:0 auto;height:18px;width:18px}html>body .goog-inline-block{display:-moz-inline-box;display:inline-block;}.goog-inline-block{position:relative;display:inline-block}* html .goog-inline-block{display:inline}*:first-child+html .goog-inline-block{display:inline}.goog-custom-button{margin:0 2px 2px;border:0;padding:0;font:normal Tahoma,Arial,sans-serif;color:#000;text-decoration:none;list-style:none;vertical-align:middle;cursor:pointer;outline:none;background:#eee;background-image:-webkit-gradient(linear,0 0,0 100%,from(#fff),to(#ddd));background:-moz-linear-gradient(top,#fff,#ddd);filter:progid:DXImageTransform.Microsoft.Gradient(EndColorstr='#dddddd',StartColorstr='#ffffff',GradientType=0)}.goog-custom-button-outer-box,.goog-custom-button-inner-box{border-style:solid;border-color:#bbb #999 #999 #bbb;vertical-align:top}.goog-custom-button-outer-box{margin:0;border-width:1px 0;padding:0}.goog-custom-button-inner-box{margin:0 -1px;border-width:0 1px;padding:3px 4px}* html .goog-custom-button-inner-box{left:-1px}* html .goog-custom-button-rtl .goog-custom-button-outer-box{left:-1px}* html .goog-custom-button-rtl .goog-custom-button-inner-box{left:0}*:first-child+html .goog-custom-button-inner-box{left:-1px}*:first-child+html .goog-custom-button-collapse-right .goog-custom-button-inner-box{border-left-width:2px}*:first-child+html .goog-custom-button-collapse-left .goog-custom-button-inner-box{border-right-width:2px}*:first-child+html .goog-custom-button-collapse-right.goog-custom-button-collapse-left .goog-custom-button-inner-box{border-width:0 1px}*:first-child+html .goog-custom-button-rtl .goog-custom-button-inner-box{left:1px}::root .goog-custom-button,::root .goog-custom-button-outer-box{line-height:0}::root .goog-custom-button-inner-box{line-height:normal}.goog-custom-button-disabled{background-image:none!important;opacity:0.4;-moz-opacity:0.4;filter:alpha(opacity=40)}.goog-custom-button-disabled .goog-custom-button-outer-box,.goog-custom-button-disabled .goog-custom-button-inner-box{color:#333!important;border-color:#999!important}* html .goog-custom-button-disabled{margin:2px 1px!important;padding:0 1px!important}*:first-child+html .goog-custom-button-disabled{margin:2px 1px!important;padding:0 1px!important}.goog-custom-button-hover .goog-custom-button-outer-box,.goog-custom-button-hover .goog-custom-button-inner-box{border-color:#000!important;}.goog-custom-button-active,.goog-custom-button-checked{background-color:#bbb;background-position:bottom left;background-image:-webkit-gradient(linear,0 0,0 100%,from(#ddd),to(#fff));background:-moz-linear-gradient(top,#ddd,#fff);filter:progid:DXImageTransform.Microsoft.Gradient(EndColorstr='#ffffff',StartColorstr='#dddddd',GradientType=0)}.goog-custom-button-focused .goog-custom-button-outer-box,.goog-custom-button-focused .goog-custom-button-inner-box,.goog-custom-button-focused.goog-custom-button-collapse-left .goog-custom-button-inner-box,.goog-custom-button-focused.goog-custom-button-collapse-left.goog-custom-button-checked .goog-custom-button-inner-box{border-color:#000}.goog-custom-button-collapse-right,.goog-custom-button-collapse-right .goog-custom-button-outer-box,.goog-custom-button-collapse-right .goog-custom-button-inner-box{margin-right:0}.goog-custom-button-collapse-left,.goog-custom-button-collapse-left .goog-custom-button-outer-box,.goog-custom-button-collapse-left .goog-custom-button-inner-box{margin-left:0}.goog-custom-button-collapse-left .goog-custom-button-inner-box{border-left:1px solid #fff}.goog-custom-button-collapse-left.goog-custom-button-checked .goog-custom-button-inner-box{border-left:1px solid #ddd}* html .goog-custom-button-collapse-left .goog-custom-button-inner-box{left:0}*:first-child+html .goog-custom-button-collapse-left .goog-custom-button-inner-box{left:0}.goog-date-picker th,.goog-date-picker td{font-family:arial,sans-serif;text-align:center}.goog-date-picker th{font-size:.9em;font-weight:bold;color:#666667;background-color:#c3d9ff}.goog-date-picker td{vertical-align:middle;padding:2px 3px}.goog-date-picker{-moz-user-focus:normal;-moz-user-select:none;position:absolute;border:1px solid gray;float:left;font-family:arial,sans-serif;padding-left:1px;background:white}.goog-date-picker-menu{position:absolute;background:threedface;border:1px solid gray;-moz-user-focus:normal}.goog-date-picker-menu ul{list-style:none;margin:0;padding:0}.goog-date-picker-menu ul li{cursor:default}.goog-date-picker-menu-selected{background-color:#aaccee}.goog-date-picker td div{float:left}.goog-date-picker button{padding:0;margin:1px;border:1px outset gray}.goog-date-picker-week{padding:1px 3px}.goog-date-picker-wday{padding:1px 3px}.goog-date-picker-today-cont{text-align:left!important}.goog-date-picker-none-cont{text-align:right!important}.goog-date-picker-head td{text-align:center}.goog-date-picker-month{width:12ex}.goog-date-picker-year{width:6ex}.goog-date-picker table{border-collapse:collapse}.goog-date-picker-selected{background-color:#aaccee!important;color:blue!important}.goog-date-picker-today{font-weight:bold!important}.goog-date-picker-other-month{-moz-opacity:0.3;filter:Alpha(Opacity=30)}.sat,.sun{background:#eee}#button1,#button2{display:block;width:60px;text-align:center;margin:10px;padding:10px;font:normal .8em arial,sans-serif;border:1px solid #000}.goog-menu{position:absolute;color:#000;border:1px solid #b5b6b5;background-color:#f3f3f7;cursor:default;font:normal small arial,helvetica,sans-serif;margin:0;padding:0;outline:none}.goog-menuitem{padding:2px 5px;margin:0;list-style:none}.goog-menuitem-highlight{background-color:#4279a5;color:#fff}.goog-menuitem-disabled{color:#999}.goog-option{padding-left:15px!important}.goog-option-selected{background-image:url('/img/check.gif');background-position:4px 50%;background-repeat:no-repeat}.goog-menuseparator{position:relative;margin:2px 0;border-top:1px solid #999;padding:0;outline:none}.goog-submenu{position:relative}.goog-submenu-arrow{position:absolute;display:block;width:11px;height:11px;right:3px;top:4px;background-image:url('/img/menu-arrows.gif');background-repeat:no-repeat;background-position:0 0;font-size:1px}.goog-menuitem-highlight .goog-submenu-arrow{background-position:0 -11px}.goog-menuitem-disabled .goog-submenu-arrow{display:none}.goog-menu-filter{margin:2px;border:1px solid silver;background:white;overflow:hidden}.goog-menu-filter div{color:gray;position:absolute;padding:1px}.goog-menu-filter input{margin:0;border:0;background:transparent;width:100%}.goog-menuitem-partially-checked{background-image:url('/img/check-outline.gif');background-position:4px 50%;background-repeat:no-repeat}.goog-menuitem-fully-checked{background-image:url('/img/check.gif');background-position:4px 50%;background-repeat:no-repeat}.goog-menu-button{margin:0 2px 2px 2px;border:0;padding:0;font:normal Tahoma,Arial,sans-serif;color:#000;background:#ddd url("/img/button-bg.gif") repeat-x top left;text-decoration:none;list-style:none;vertical-align:middle;cursor:pointer;outline:none}.goog-menu-button-outer-box,.goog-menu-button-inner-box{border-style:solid;border-color:#aaa;vertical-align:middle}.goog-menu-button-outer-box{margin:0;border-width:1px 0;padding:0}.goog-menu-button-inner-box{margin:0 -1px;border-width:0 1px;padding:0 4px 2px 4px}* html .goog-menu-button-inner-box{left:-1px}* html .goog-menu-button-rtl .goog-menu-button-outer-box{left:-1px}* html .goog-menu-button-rtl .goog-menu-button-inner-box{left:0}*:first-child+html .goog-menu-button-inner-box{left:-1px}*:first-child+html .goog-menu-button-rtl .goog-menu-button-inner-box{left:1px}::root .goog-menu-button,::root .goog-menu-button-outer-box,::root .goog-menu-button-inner-box{line-height:0}::root .goog-menu-button-caption,::root .goog-menu-button-dropdown{line-height:normal}.goog-menu-button-disabled{background-image:none!important;opacity:0.4;-moz-opacity:0.4;filter:alpha(opacity=40)}.goog-menu-button-disabled .goog-menu-button-outer-box,.goog-menu-button-disabled .goog-menu-button-inner-box,.goog-menu-button-disabled .goog-menu-button-caption,.goog-menu-button-disabled .goog-menu-button-dropdown{color:#333!important;border-color:#999!important}* html .goog-menu-button-disabled{margin:2px 1px!important;padding:0 1px!important}*:first-child+html .goog-menu-button-disabled{margin:2px 1px!important;padding:0 1px!important}.goog-menu-button-hover .goog-menu-button-outer-box,.goog-menu-button-hover .goog-menu-button-inner-box{border-color:#9cf #69e #69e #7af!important;}.goog-menu-button-active,.goog-menu-button-open{background-color:#bbb;background-position:bottom left}.goog-menu-button-focused .goog-menu-button-outer-box,.goog-menu-button-focused .goog-menu-button-inner-box{border-color:#3366cc}.goog-menu-button-caption{padding:0 4px 0 0;vertical-align:middle}.goog-menu-button-rtl .goog-menu-button-caption{padding:0 0 0 4px}.goog-menu-button-dropdown{width:7px;background:url('/img/toolbar_icons.gif') no-repeat -176px;vertical-align:middle}.goog-flat-menu-button{margin:0 2px;padding:1px 4px;font:normal 95% Tahoma,Arial,sans-serif;color:#333;text-decoration:none;list-style:none;vertical-align:middle;cursor:pointer;outline:none;-moz-outline:none;border-width:1px;border-style:solid;border-color:#c9c9c9;background-color:#fff}.goog-flat-menu-button-disabled *{color:#999;border-color:#ccc;cursor:default}.goog-flat-menu-button-hover,.goog-flat-menu-button-hover{border-color:#9cf #69e #69e #7af!important;}.goog-flat-menu-button-active{background-color:#bbb;background-position:bottom left}.goog-flat-menu-button-focused{border-color:#3366cc}.goog-flat-menu-button-caption{padding-right:10px;vertical-align:middle}.goog-flat-menu-button-dropdown{width:7px;background:url('/img/toolbar_icons.gif') no-repeat -176px;vertical-align:middle}h1{font-size:1.8em}.g-doc{width:auto;margin:0 10px}.g-doc-1024{margin-left:10px}#ae-logo{background:url('//www.google.com/images/logos/app_engine_logo_sm.gif') 0 0 no-repeat;display:block;width:178px;height:30px;margin:4px 0 0 0}.ae-ir span{position:absolute;display:block;width:0;height:0;overflow:hidden}.ae-noscript{position:absolute;left:-5000px}#ae-lhs-nav{border-right:3px solid #e5ecf9}.ae-notification{margin-bottom:.6em;text-align:center}.ae-notification strong{display:block;width:55%;margin:0 auto;text-align:center;padding:.6em;background-color:#fff1a8;font-weight:bold}.ae-alert{font-weight:bold;background:url('/img/icn/warning.png') no-repeat;margin-bottom:.5em;padding-left:1.8em}.ae-info{background:url('/img/icn/icn-info.gif') no-repeat;margin-bottom:.5em;padding-left:1.8em}.ae-promo{padding:.5em .8em;margin:.6em 0;background-color:#fffbe8;border:1px solid #fff1a9;text-align:left}.ae-promo strong{position:relative;top:.3em}.ae-alert-text,.ae-warning-text{background-color:transparent;background-position:right 1px;padding:0 18px 0 0}.ae-alert-text{color:#c00}.ae-warning-text{color:#f90}.ae-alert-c span{display:inline-block}.ae-message{border:1px solid #e5ecf9;background-color:#f6f9ff;margin-bottom:1em;padding:.5em}.ae-errorbox{border:1px solid #f00;background-color:#fee;margin-bottom:1em;padding:1em}#bd .ae-errorbox ul{padding-bottom:0}.ae-form dt{font-weight:bold}.ae-form dt em,.ae-field-hint{margin-top:.2em;color:#666667;font-size:.85em}.ae-field-hint-inline{color:#666667;font-size:.85em;display:inline}.ae-field-yyyymmdd,.ae-field-hhmmss{width:6em}.ae-field-hint-hhmmss{margin-left:2.3em}.ae-form label{display:block;margin:0 0 .2em 0;font-weight:bold}.ae-radio{margin-bottom:.3em}.ae-radio label{display:inline}.ae-form dd,.ae-input-row{margin-bottom:.6em}.ae-input-row-group{border:1px solid #fff1a9;background:#fffbe8;padding:8px}.ae-btn-row{margin-top:1.4em;margin-bottom:1em}.ae-btn-row-note{padding:5px 0 6px 0}.ae-btn-row-note span{padding-left:18px;padding-right:.5em;background:transparent url('/img/icn/icn-info.gif') 0 0 no-repeat}.ae-btn-primary{font-weight:bold}form .ae-cancel{margin-left:.5em}.ae-submit-inline{margin-left:.8em}.ae-radio-bullet{width:20px;float:left}.ae-label-hanging-indent{margin-left:5px}.ae-divider{margin:0 .6em 0 .5em}.ae-nowrap{white-space:nowrap}.ae-pre-wrap{white-space:pre-wrap;white-space:-moz-pre-wrap;white-space:-pre-wrap;white-space:-o-pre-wrap;word-wrap:break-word;_white-space:pre;}wbr:after{content:"\00200b"}a button{text-decoration:none}.ae-alert ul{margin-bottom:.75em;margin-top:.25em;line-height:1.5em}.ae-alert h4{color:#000;font-weight:bold;padding:0 0 .5em}.ae-form-simple-list{list-style-type:none;padding:0;margin-bottom:1em}.ae-form-simple-list li{padding:.3em 0 .5em .5em;border-bottom:1px solid #c3d9ff}div.ae-datastore-index-to-delete,div.ae-datastore-index-to-build{color:#aaa}#hd p{padding:0}#hd li{display:inline}ul{padding:0 0 1em 1.2em}#ae-userinfo{text-align:right;white-space:nowrap;}#ae-userinfo ul{padding-bottom:0;padding-top:5px}#ae-appbar-lrg{margin:0 0 1.25em 0;padding:.25em .5em;background-color:#e5ecf9;border-top:1px solid #36c}#ae-appbar-lrg h1{font-size:1.2em;padding:0}#ae-appbar-lrg h1 span{font-size:80%;font-weight:normal}#ae-appbar-lrg form{display:inline;padding-right:.1em;margin-right:.5em}#ae-appbar-lrg strong{white-space:nowrap}#ae-appbar-sml{margin:0 0 1.25em 0;height:8px;padding:0 .5em;background:#e5ecf9}.ae-rounded-sml{border-radius:3px;-moz-border-radius:3px;-webkit-border-radius:3px}#ae-appbar-lrg a{margin-top:.3em}#ae-engine-version-bar{margin:0 0 1em}a.ae-ext-link,a span.ae-ext-link{background:url('/img/icn/icn-open-in-new-window.png') no-repeat right;padding-right:18px;margin-right:8px}.ae-no-pad{padding-left:1em}.ae-message h4{margin-bottom:.3em;padding-bottom:0}#ft{text-align:center;margin:2.5em 0 1em;padding-top:.5em;border-top:2px solid #c3d9ff}#bd h3{font-weight:bold;font-size:1.4em}#bd h3 .ae-apps-switch{font-weight:normal;font-size:.7em;margin-left:2em}#bd p{padding:0 0 1em 0}#ae-content{padding-left:1em}.ae-unimportant{color:#666}.ae-new-usr td{border-top:1px solid #ccccce;background-color:#ffe}.ae-error-td td{border:2px solid #f00;background-color:#fee}.ae-delete{cursor:pointer;border:none;background:transparent;}.ae-btn-large{background:#039 url('/img/icn/button_back.png') repeat-x;color:#fff;font-weight:bold;font-size:1.2em;padding:.5em;border:2px outset #000;cursor:pointer}.ae-breadcrumb{margin:0 0 1em}.ae-disabled,a.ae-disabled,a.ae-disabled:hover,a.ae-disabled:active{color:#666!important;text-decoration:none!important;cursor:default!important;opacity:.4!important;-moz-opacity:.4!important;filter:alpha(opacity=40)!important}input.ae-readonly{border:2px solid transparent;border-left:0;background-color:transparent}span.ae-text-input-clone{padding:5px 5px 5px 0}.ae-loading{opacity:.4;-moz-opacity:.4;filter:alpha(opacity=40)}.ae-tip{margin:1em 0;background:url('/img/tip.png') top left no-repeat;padding:2px 0 0 25px}sup.ae-new-sup{color:red}sup.ae-new-sup a{border-bottom:1px solid red;color:red;text-decoration:none}.ae-action{color:#00c;cursor:pointer;text-decoration:underline}.ae-toggle{padding-left:16px;background-position:left center;background-repeat:no-repeat;cursor:pointer}.ae-minus{background-image:url('/img/wgt/minus.gif')}.ae-plus{background-image:url('/img/wgt/plus.gif')}.ae-print{background-image:url('/img/print.gif');padding-left:19px}.ae-download{background:url('/img/download.png') left center no-repeat;padding-left:22px}.ae-currency,.ae-table thead th.ae-currency{text-align:right;white-space:nowrap}#ae-loading{font-size:1.2em;position:absolute;text-align:center;top:0;width:100%}#ae-loading div{margin:0 auto;background:#fff1a9;width:5em;font-weight:bold;padding:4px 10px;-moz-border-radius-bottomleft:3px;-moz-border-radius-bottomright:3px;-webkit-border-radius-bottomleft:3px;-webkit-border-radius-bottomright:3px}.ae-occlude{filter:alpha(opacity=0);position:absolute}.g-tpl-66-34 .g-unit,.g-unit .g-tpl-66-34 .g-unit,.g-unit .g-unit .g-tpl-66-34 .g-unit,.g-unit .g-unit .g-unit .g-tpl-66-34 .g-unit{display:inline;margin:0;width:33.999%;float:right}.g-unit .g-unit .g-unit .g-tpl-66-34 .g-first,.g-unit .g-unit .g-tpl-66-34 .g-first,.g-unit .g-tpl-66-34 .g-first,.g-tpl-66-34 .g-first{display:inline;margin:0;width:65.999%;float:left}.ae-ie6-c{_margin-right:-2000px;_position:relative;_width:100%;background:#fff}h2.ae-section-header{background:#e5ecf9;padding:.2em .4em;margin-bottom:.5em}.ae-field-span{padding:3px 0}ul.ae-admin-list li{margin:0 0;padding:.1em 0}#ae-feedback-bar{background-color:#f9edbe;border:1px solid #f0c36d;-moz-border-radius:2px;-webkit-border-radius:2px;border-radius:2px;-webkit-box-shadow:0 2px 4px rgba(0,0,0,0.2);-moz-box-shadow:0 2px 4px rgba(0,0,0,0.2);box-shadow:0 2px 4px rgba(0,0,0,0.2);left:280px;margin-top:-5px;overflow:hidden;padding:10px 16px;position:fixed;text-align:center;z-index:999}.ae-feedback-info{margin-bottom:7px}.ae-feedback-option{color:#222;display:inline-block;font-size:0.87em;vertical-align:bottom;width:75px}.ae-feedback-close-icon{background:url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAA8AAAAPCAYAAAA71pVKAAAARUlEQVR42mMon7n7fwWZmAFEkAOorxmXYdjUoWhG9g+6QnRxnDYjK8RnIFY/YwtZkgIMn0ba2Ey2n6kS2mTFM/2TJ7kYAJSLDRhvVX1GAAAAAElFTkSuQmCC') no-repeat 0 0;float:right;height:15px;margin-right:-11px;width:15px}select{font:13px/13px Arial,sans-serif;color:#000;border-width:1px;border-style:solid;border-color:#bbb #999 #999 #bbb;-webkit-border-radius:2px;-moz-border-radius:2px;background:#eee;background:-webkit-gradient(linear,0 0,0 100%,from(#fff),to(#ddd));background:-moz-linear-gradient(top,#fff,#ddd);filter:progid:DXImageTransform.Microsoft.Gradient(EndColorstr='#dddddd',StartColorstr='#ffffff',GradientType=0);cursor:pointer;padding:2px 1px;margin:0}select:hover{border-color:#000}select[disabled],select[disabled]:active{color:#666;border-color:#ddd;background-color:#f3f3f3;background-image:none;text-shadow:none;cursor:auto}.ae-table-plain{border-collapse:collapse;width:100%}.ae-table{border:1px solid #c5d7ef;border-collapse:collapse;width:100%}#bd h2.ae-table-title{background:#e5ecf9;margin:0;color:#000;font-size:1em;padding:3px 0 3px 5px;border-left:1px solid #c5d7ef;border-right:1px solid #c5d7ef;border-top:1px solid #c5d7ef}.ae-table-caption,.ae-table caption{border:1px solid #c5d7ef;background:#e5ecf9;-moz-margin-start:-1px}.ae-table caption{padding:3px 5px;text-align:left}.ae-table th,.ae-table td{background-color:#fff;padding:.35em 1em .25em .35em;margin:0}.ae-table thead th{font-weight:bold;text-align:left;background:#c5d7ef;vertical-align:bottom}.ae-table thead th .ae-no-bold{font-weight:normal}.ae-table tfoot tr td{border-top:1px solid #c5d7ef;background-color:#e5ecf9}.ae-table td{border-top:1px solid #c5d7ef;border-bottom:1px solid #c5d7ef}.ae-even>td,.ae-even th,.ae-even-top td,.ae-even-tween td,.ae-even-bottom td,ol.ae-even{background-color:#e9e9e9;border-top:1px solid #c5d7ef;border-bottom:1px solid #c5d7ef}.ae-even-top td{border-bottom:0}.ae-even-bottom td{border-top:0}.ae-even-tween td{border:0}.ae-table .ae-tween td{border:0}.ae-table .ae-tween-top td{border-bottom:0}.ae-table .ae-tween-bottom td{border-top:0}#bd .ae-table .cbc{width:1.5em;padding-right:0}.ae-table #ae-live td{background-color:#ffeac0}.ae-table-fixed{table-layout:fixed}.ae-table-fixed td,.ae-table-nowrap{overflow:hidden;white-space:nowrap}.ae-paginate strong{margin:0 .5em}tfoot .ae-paginate{text-align:right}.ae-table-caption .ae-paginate,.ae-table-caption .ae-orderby{padding:2px 5px}.modal-dialog{background:#c1d9ff;border:1px solid #3a5774;color:#000;padding:4px;position:absolute;font-size:1.3em;-moz-box-shadow:0 1px 4px #333;-webkit-box-shadow:0 1px 4px #333;box-shadow:0 1px 4px #333}.modal-dialog a,.modal-dialog a:link,.modal-dialog a:visited{color:#06c;cursor:pointer}.modal-dialog-bg{background:#666;left:0;position:absolute;top:0}.modal-dialog-title{background:#e0edfe;color:#000;cursor:pointer;font-size:120%;font-weight:bold;padding:8px 15px 8px 8px;position:relative;_zoom:1;}.modal-dialog-title-close{background:#e0edfe url('https://ssl.gstatic.com/editor/editortoolbar.png') no-repeat -528px 0;cursor:default;height:15px;position:absolute;right:10px;top:8px;width:15px;vertical-align:middle}.modal-dialog-buttons,.modal-dialog-content{background-color:#fff;padding:8px}.modal-dialog-buttons button{margin-right:.75em}.goog-buttonset-default{font-weight:bold}.goog-tab{position:relative;border:1px solid #8ac;padding:4px 9px;color:#000;background:#e5ecf9;border-top-left-radius:2px;border-top-right-radius:2px;-moz-border-radius-topleft:2px;-webkit-border-top-left-radius:2px;-moz-border-radius-topright:2px;-webkit-border-top-right-radius:2px}.goog-tab-bar-top .goog-tab{margin:1px 4px 0 0;border-bottom:0;float:left}.goog-tab-bar-bottom .goog-tab{margin:0 4px 1px 0;border-top:0;float:left}.goog-tab-bar-start .goog-tab{margin:0 0 4px 1px;border-right:0}.goog-tab-bar-end .goog-tab{margin:0 1px 4px 0;border-left:0}.goog-tab-hover{text-decoration:underline;cursor:pointer}.goog-tab-disabled{color:#fff;background:#ccc;border-color:#ccc}.goog-tab-selected{background:#fff!important;color:black;font-weight:bold}.goog-tab-bar-top .goog-tab-selected{top:1px;margin-top:0;padding-bottom:5px}.goog-tab-bar-bottom .goog-tab-selected{top:-1px;margin-bottom:0;padding-top:5px}.goog-tab-bar-start .goog-tab-selected{left:1px;margin-left:0;padding-right:9px}.goog-tab-bar-end .goog-tab-selected{left:-1px;margin-right:0;padding-left:9px}.goog-tab-content{padding:.1em .8em .8em .8em;border:1px solid #8ac;border-top:none}.goog-tab-bar{position:relative;margin:0 0 0 5px;border:0;padding:0;list-style:none;cursor:default;outline:none}.goog-tab-bar-clear{border-top:1px solid #8ac;clear:both;height:0;overflow:hidden}.goog-tab-bar-start{float:left}.goog-tab-bar-end{float:right}* html .goog-tab-bar-start{margin-right:-3px}* html .goog-tab-bar-end{margin-left:-3px}#ae-nav ul{list-style-type:none;margin:0;padding:1em 0}#ae-nav ul li{padding-left:.5em}#ae-nav .ae-nav-selected{color:#000;display:block;font-weight:bold;background-color:#e5ecf9;margin-right:-1px;border-top-left-radius:4px;-moz-border-radius-topleft:4px;-webkit-border-top-left-radius:4px;border-bottom-left-radius:4px;-moz-border-radius-bottomleft:4px;-webkit-border-bottom-left-radius:4px}#ae-nav .ae-nav-bold{font-weight:bold}#ae-nav ul li span.ae-nav-disabled{color:#666}#ae-nav ul ul{margin:0;padding:0 0 0 .5em}#ae-nav ul ul li{padding-left:.5em}#ae-nav ul li a,#ae-nav ul li span,#ae-nav ul ul li a{padding-left:.5em}#ae-nav li a:link,#ae-nav li a:visited{color:#00c}.ae-nav-group{padding:.5em;margin:0 .75em 0 0;background-color:#fffbe8;border:1px solid #fff1a9}.ae-nav-group h4{font-weight:bold;padding:auto auto .5em .5em;padding-left:.4em;margin-bottom:.5em;padding-bottom:0}.ae-nav-group ul{margin:0 0 .5em 0;padding:0 0 0 1.3em;list-style-type:none}.ae-nav-group ul li{padding-bottom:.5em}.ae-nav-group li a:link,.ae-nav-group li a:visited{color:#00c}.ae-nav-group li a:hover{color:#00c}@media print{body{font-size:13px;width:8.5in;background:#fff}table,.ae-table-fixed{table-layout:automatic}tr{display:table-row!important}.g-doc-1024{width:8.5in}#ae-appbar-lrg,.ae-table-caption,.ae-table-nowrap,.ae-nowrap,th,td{overflow:visible!important;white-space:normal!important;background:#fff!important}.ae-print,.ae-toggle{display:none}#ae-lhs-nav-c{display:none}#ae-content{margin:0;padding:0}.goog-zippy-collapsed,.goog-zippy-expanded{background:none!important;padding:0!important}}#ae-admin-dev-table{margin:0 0 1em 0}.ae-admin-dev-tip,.ae-admin-dev-tip.ae-tip{margin:-0.31em 0 2.77em}#ae-sms-countryselect{margin-right:.5em}#ae-admin-enable-form{margin-bottom:1em}#ae-admin-services-c{margin-top:2em}#ae-admin-services{padding:0 0 0 3em;margin-bottom:1em;font-weight:bold}#ae-admin-logs-table-c{_margin-right:-2000px;_position:relative;_width:100%;background:#fff}#ae-admin-logs-table{margin:0;padding:0}#ae-admin-logs-filters{padding:3px 0 3px 5px}#ae-admin-logs-pagination{padding:6px 5px 0 0;text-align:right;width:45%}#ae-admin-logs-pagination span.ae-disabled{color:#666;background-color:transparent}#ae-admin-logs-table td{white-space:nowrap}#ae-admin-logs-timezone{float:right;margin-bottom:7px}#ae-storage-content div.ae-alert{padding-bottom:5px}#ae-admin-performance-form input[type=text]{width:2em}.ae-admin-performance-value{font-weight:normal}.ae-admin-performance-static-value{color:#666}#ae-admin-performance-frontend-class{margin-left:0.5em}.goog-slider-horizontal,.goog-twothumbslider-horizontal{position:relative;width:502px;height:7px;display:block;outline:0;margin:1.0em 0 0.9em 3em}.ae-slider-rail:before{position:relative;top:-0.462em;float:left;content:'Min';margin:0 0 0 -3em;color:#999}.ae-slider-rail{position:absolute;background-color:#d9d9d9;top:0;right:8px;bottom:0;left:8px;border:solid 1px;border-color:#a6a6a6 #b3b3b3 #bfbfbf;border-radius:5px}.ae-slider-rail:after{position:relative;top:-0.462em;float:right;content:'Max';margin:0 -3em 0 0;color:#999}.goog-slider-horizontal .goog-slider-thumb,.goog-twothumbslider-horizontal .goog-twothumbslider-value-thumb,.goog-twothumbslider-horizontal .goog-twothumbslider-extent-thumb{position:absolute;width:17px;height:17px;background:transparent url('/img/slider_thumb-down.png') no-repeat;outline:0}.goog-slider-horizontal .goog-slider-thumb{top:-5px}.goog-twothumbslider-horizontal .goog-twothumbslider-value-thumb{top:-11px}.goog-twothumbslider-horizontal .goog-twothumbslider-extent-thumb{top:2px;background-image:url('/img/slider_thumb-up.png')}.ae-admin-performance-scale{position:relative;display:inline-block;width:502px;margin:0 0 2.7em 3em}.ae-admin-performance-scale .ae-admin-performance-scale-start{position:absolute;display:inline-block;top:0;width:100%;text-align:left}.ae-admin-performance-scale .ae-admin-performance-scale-mid{position:absolute;display:inline-block;top:0;width:100%;text-align:center}.ae-admin-performance-scale .ae-admin-performance-scale-end{position:absolute;display:inline-block;top:0;width:100%;text-align:right}.ae-pagespeed-controls{margin:0 0 1em 8px}.ae-pagespeed-controls label{display:inline;font-weight:normal}#ae-pagespeed-flush-cache{margin-left:1em}#ae-pagespeed-flush-cache-status{margin-left:1em;font-weight:bold}.ae-absolute-container{display:inline-block;width:100%}.ae-hidden-range{display:none}.ae-default-version-radio-column{width:1em}#ae-settings-builtins-change{margin-bottom:1em}.prettyprint{background-color:#fafafa;font:1em "Droid Sans Mono",monospace;margin-right:20px}#formatted-performance-settings>div{float:left}#ae-billing-form-c{_margin-right:-3000px;_position:relative;_width:100%}.ae-rounded-top-small{-moz-border-radius-topleft:3px;-webkit-border-top-left-radius:3px;-moz-border-radius-topright:3px;-webkit-border-top-right-radius:3px}.ae-progress-content{height:400px}#ae-billing-tos{text-align:left;width:100%;margin-bottom:.5em}.ae-billing-budget-section{margin-bottom:1.5em}.ae-billing-budget-section .g-unit,.g-unit .ae-billing-budget-section .g-unit,.g-unit .g-unit .ae-billing-budget-section .g-unit{margin:0 0 0 11em;width:auto;float:none}.g-unit .g-unit .ae-billing-budget-section .g-first,.g-unit .ae-billing-budget-section .g-first,.ae-billing-budget-section .g-first{margin:0;width:11em;float:left}#ae-billing-form .ae-btn-row{margin-left:11em}#ae-billing-form .ae-btn-row .ae-info{margin-top:10px}#ae-billing-checkout{width:150px;float:left}#ae-billing-alloc-table{border:1px solid #c5d7ef;border-bottom:none;width:100%;margin-top:.5em}#ae-billing-alloc-table th,#ae-billing-alloc-table td{padding:.35em 1em .25em .35em;border-bottom:1px solid #c5d7ef;color:#000;white-space:nowrap}.ae-billing-resource{background-color:transparent;font-weight:normal}#ae-billing-alloc-table tr th span{font-weight:normal}#ae-billing-alloc-table tr{vertical-align:baseline}#ae-billing-alloc-table th{white-space:nowrap}#ae-billing-alloc-table .ae-editable span.ae-text-input-clone,#ae-billing-alloc-table .ae-readonly input{display:none}#ae-billing-alloc-table .ae-readonly span.ae-text-input-clone,#ae-billing-alloc-table .ae-editable input{display:inline}#ae-billing-alloc-table td span.ae-billing-warn-note,#ae-billing-table-errors .ae-billing-warn-note{margin:0;background-repeat:no-repeat;display:inline-block;background-image:url('/img/icn/warning.png');text-align:right;padding-left:16px;padding-right:.1em;height:16px;font-weight:bold}#ae-billing-alloc-table td span.ae-billing-warn-note span,#ae-billing-table-errors .ae-billing-warn-note span{vertical-align:super;font-size:80%}#ae-billing-alloc-table td span.ae-billing-error-hidden,#ae-billing-table-errors .ae-billing-error-hidden{display:none}.ae-billing-percent{font-size:80%;color:#666;margin-left:3px}#ae-billing-week-info{margin-top:5px;line-height:1.4}#ae-billing-table-errors{margin-top:.3em}#ae-billing-allocation-noscript{margin-top:1.5em}#ae-billing-allocation-custom-opts{margin-left:2.2em}#ae-billing-settings h2{font-size:1em;display:inline}#ae-billing-settings p{padding:.3em 0 .5em}#ae-billing-settings-table{margin:.4em 0 .5em}#ae-settings-resource-col{width:19%}#ae-settings-budget-col{width:11%}#ae-billing-settings-table .ae-settings-budget-col{padding-right:2em}.ae-table th.ae-settings-unit-cell,.ae-table td.ae-settings-unit-cell,.ae-table th.ae-total-unit-cell,.ae-table td.ae-total-unit-cell{padding-left:1.2em}#ae-settings-unit-col{width:18%}#ae-settings-paid-col{width:15%}#ae-settings-free-col{width:15%}#ae-settings-total-col{width:22%}.ae-billing-inline-link{margin-left:.5em}.ae-billing-settings-section{margin-bottom:2em}#ae-billing-settings form{display:inline-block}#ae-billing-settings .ae-btn-row{margin-top:0.5em}#ae-billing-budget-setup-checkout{margin-bottom:0}#ae-billing-vat-c .ae-field-hint{width:85%}#ae-billing-checkout-note{margin-top:.8em}.ae-drachma-preset{background-color:#f6f9ff;margin-left:11em}.ae-drachma-preset p{margin-top:.5em}.ae-table thead th.ae-currency-th{text-align:right}#ae-billing-logs-date{width:15%}#ae-billing-logs-event{width:69%}#ae-billing-logs-amount{text-align:right;width:8%}#ae-billing-logs-balance{text-align:right;width:8%}#ae-billing-history-expand .ae-action{margin-left:1em}.ae-table .ae-billing-usage-premier,.ae-table .ae-billing-usage-report{width:100%;*width:auto;margin:0 0 1em 0}.ae-table .ae-billing-usage-report th,.ae-table .ae-billing-usage-premier th,.ae-billing-charges th{color:#666;border-top:0}.ae-table .ae-billing-usage-report th,.ae-table .ae-billing-usage-report td,.ae-table .ae-billing-usage-premier th,.ae-table .ae-billing-usage-premier td,.ae-billing-charges th,.ae-billing-charges td{background-color:transparent;padding:.4em 0;border-bottom:1px solid #ddd}.ae-table .ae-billing-usage-report tfoot td,.ae-billing-charges tfoot td{border-bottom:none}table.ae-billing-usage-report col.ae-billing-report-resource{width:30%}table.ae-billing-usage-report col.ae-billing-report-used{width:20%}table.ae-billing-usage-report col.ae-billing-report-free{width:16%}table.ae-billing-usage-report col.ae-billing-report-paid{width:17%}table.ae-billing-usage-report col.ae-billing-report-charge{width:17%}table.ae-billing-usage-premier col.ae-billing-report-resource{width:50%}table.ae-billing-usage-premier col.ae-billing-report-used{width:30%}table.ae-billing-usage-premier col.ae-billing-report-unit{width:20%}.ae-billing-change-resource{width:85%}.ae-billing-change-budget{width:15%}#ae-billing-always-on-label{display:inline}#ae-billing-budget-buffer-label{display:inline}.ae-billing-charges{width:50%}.ae-billing-charges-charge{text-align:right}.ae-billing-usage-report-container{padding:1em 1em 0 1em}#ae-billing-new-usage{background-color:#f6f9ff}.goog-zippy-expanded{background-image:url('/img/wgt/minus.gif');cursor:pointer;background-repeat:no-repeat;padding-left:17px}.goog-zippy-collapsed{background-image:url('/img/wgt/plus.gif');cursor:pointer;background-repeat:no-repeat;padding-left:17px}#ae-admin-logs-pagination{width:auto}.ae-usage-cycle-note{color:#555}#ae-billing-budget-widget .g-content{margin-bottom:0.5em;margin-right:0.5em}#ae-request-billing-dialog{width:800px}#ae-manage-billing-admins-form-cancel{color:#000}.ae-grace-period-resignup-outstanding-balance{color:red;font-weight:bold}.ae-billing-admins-table{width:100%}.ae-billing-admins-table td{width:33%;word-break:break-all;padding-bottom:5px}.b3iframe{width:100%}iframe{border:none}#ae-createapp-start{background-color:#c6d5f1;padding:1em;padding-bottom:2em;text-align:center}#ae-admin-app_id_alias-check,#ae-createapp-id-check{margin:0 0 0 1em}#ae-admin-app_id_alias-message{display:block;margin:.4em 0}#ae-createapp-id-content{width:100%}#ae-createapp-id-content td{vertical-align:top}#ae-createapp-id-td{white-space:nowrap;width:1%}#ae-createapp-id-td #ae-createapp-id-error{position:absolute;width:24em;padding-left:1em;white-space:normal}#ae-createapp-id-error-td{padding-left:1em}#ae-admin-dev-invite label{float:left;width:3.6em;position:relative;top:.3em}#ae-admin-dev-invite .ae-radio{margin-left:3.6em}#ae-admin-dev-invite .ae-radio label{float:none;width:auto;font-weight:normal;position:static}#ae-admin-dev-invite .goog-button{margin-left:3.6em}#ae-admin-dev-invite .ae-field-hint{margin-left:4.2em}#ae-admin-dev-invite .ae-radio .ae-field-hint{margin-left:0}.ae-you{color:#008000}#ae-authdomain-opts{margin-bottom:1em}#ae-authdomain-content .ae-input-text,#ae-authdomain-content .ae-field-hint{margin:.3em 0 .4em 2.5em}#ae-authdomain-opts a{margin-left:1em}#ae-authdomain-opts-hint{color:#666667;font-size:.85em;margin-top:.2em}#ae-authdomain-content #ae-authdomain-desc .ae-field-hint{margin-left:0}#ae-storage-opts{margin-bottom:1em}#ae-storage-content .ae-input-text,#ae-storage-content .ae-field-hint{margin:.3em 0 .4em 2.5em}#ae-storage-opts a{margin-left:1em}#ae-storage-opts-hint{color:#666667;font-size:.85em;margin-top:.2em}#ae-storage-content #ae-storage-desc .ae-field-hint{margin-left:0}#ae-location-opts{margin-bottom:1em}#ae-location-content .ae-input-text,#ae-location-content .ae-field-hint{margin:.3em 0 .4em 2.5em}#ae-location-opts a{margin-left:1em}#ae-location-opts-hint{color:#666667;font-size:.85em;margin-top:.2em}#ae-location-content #ae-storage-desc .ae-field-hint{margin-left:0}#ae-dash .g-section{margin:0 0 1em}#ae-dash * .g-section{margin:0}#ae-dash-quota .ae-alert{padding-left:1.5em}.ae-dash-email-disabled{background:url('/img/icn/exclamation_circle.png') no-repeat;margin-bottom:.5em;margin-top:.5em;min-height:16px;padding-left:1.5em}#ae-dash-email-disabled-footnote{font-weight:normal;margin:5px 0 0;padding-left:1.5em}#ae-dash-graph-c{border:1px solid #c5d7ef;padding:5px 0}#ae-dash-graph-change{margin:0 0 0 5px}#ae-dash-graph-img{background-color:#fff;display:block;margin-top:.5em;padding:5px}#ae-dash-graph-nodata{text-align:center}#ae-dash .ae-logs-severity{margin-right:.5em}#ae-dash .g-c{padding:0 0 0 .1em}#ae-dash .g-tpl-50-50 .g-unit .g-c{padding:0 0 0 1em}#ae-dash .g-tpl-50-50 .g-first .g-c{padding:0 1em 0 .1em}.ae-quota-warnings{background-color:#fffbe8;margin:0;padding:.5em .5em 0;text-align:left}.ae-quota-warnings div{padding:0 0 .5em}#ae-dash-quota-refresh-info{font-size:85%}#ae-dash #ae-dash-dollar-bucket-c #ae-dash-dollar-bucket{width:100%;float:none}#ae-dash #ae-dash-quota-bar-col,#ae-dash .ae-dash-quota-bar{width:100px}#ae-dash-quotadetails #ae-dash-quota-bar-col,#ae-dash-quotadetails .ae-dash-quota-bar{width:200px}#ae-dash-quota-percent-col{width:3.5em}#ae-dash-quota-cost-col{width:15%}#ae-dash-quota-alert-col{width:3.5em}#ae-dash .ae-dash-quota-alert-td{padding:0}.ae-dash-quota-alert-td a{display:block;width:16px;height:16px}#ae-dash .ae-dash-quota-alert-td .ae-alert{display:block;width:16px;height:16px;margin:0;padding:0}#ae-dash .ae-dash-quota-alert-td .ae-dash-email-disabled{display:block;width:16px;height:16px;margin:0;padding:0}#ae-dash-quota tbody th{font-weight:normal}#ae-dash-quota caption{padding:0}#ae-dash-quota caption .g-c{padding:3px}.ae-dash-quota-bar{float:left;background-color:#c0c0c0;height:13px;margin:.1em 0 0 0;position:relative}.ae-dash-quota-footnote{margin:5px 0 0;font-weight:normal}.ae-quota-warning{background-color:#f90}.ae-quota-alert{background-color:#c00}.ae-quota-normal{background-color:#0b0}.ae-quota-alert-text{color:#c00}.ae-favicon-text{font-size:.85em}#ae-dash-popular{width:97%}#ae-dash-popular-reqsec-col{width:6.5em}#ae-dash-popular-req-col{width:7em}#ae-dash-popular-mcycles-col{width:9.5em}#ae-dash-popular-latency-col{width:7em}#ae-dash-popular .ae-unimportant{font-size:80%}#ae-dash-popular .ae-nowrap,#ae-dash-errors .ae-nowrap{margin-right:5px;overflow:hidden}#ae-dash-popular th span,#ae-dash-errors th span{font-size:.8em;font-weight:normal;display:block}#ae-dash-errors caption .g-unit{width:9em}#ae-dash-errors-count-col{width:5em}#ae-dash-errors-percent-col{width:7em}#ae-dash-graph-chart-type{float:left;margin-right:1em}#ae-apps-all strong.ae-disabled{color:#000;background:#eee}.ae-quota-resource{width:30%}.ae-quota-safety-limit{width:10%}#ae-quota-details h3{padding-bottom:0;margin-bottom:.25em}#ae-quota-details table{margin-bottom:1.75em}#ae-quota-details table.ae-quota-requests{margin-bottom:.5em}#ae-quota-refresh-note p{text-align:right;padding-top:.5em;padding-bottom:0;margin-bottom:0}#ae-quota-first-api.g-section{padding-bottom:0;margin-bottom:.25em}#ae-instances-summary-table,#ae-instances-details-table{margin-bottom:1em}.ae-instances-details-availability-image{float:left;margin-right:.5em}.ae-instances-vm-health-image{float:left;margin-right:.5em}.ae-instances-agent-warning-image{float:right}.ae-instances-small-text{font-size:80%}.ae-instances-small-text .ae-separator{color:#666}.ae-instances-highlight td{background-color:#fff1a8}.ae-instances-release-col{width:6em}.ae-appbar-superuser-message strong{color:red}#ae-backends-table tr{vertical-align:baseline}.ae-backends-class-reminder{font-size:80%;color:#666;margin-left:3px}#ae-appbar-engines,#ae-appbar-versions{display:inline}#ae-update{background-color:#fffbe8;border:1px solid}.ac-renderer{font:normal 13px Arial,sans-serif;position:absolute;background:#fff;border:1px solid #666;-moz-box-shadow:2px 2px 2px rgba(102,102,102,.4);-webkit-box-shadow:2px 2px 2px rgba(102,102,102,.4);width:202px}.ac-row{cursor:pointer;padding:.4em}.ac-highlighted{font-weight:bold}.ac-active{background-color:#b2b4bf}#ae-datastore-explorer-c{_margin-right:-3000px;_position:relative;_width:100%}#ae-datastore-explorer form dt{margin:1em 0 0 0}#ae-datastore-explorer #ae-datastore-explorer-labels{margin:0 0 3px}#ae-datastore-explorer-header .ae-action{margin-left:1em}#ae-datastore-explorer .id{white-space:nowrap}#ae-datastore-explorer caption{text-align:right;padding:5px}#ae-datastore-explorer-submit{margin-top:5px}#ae-datastore-explorer-namespace{margin-top:7px;margin-right:5px}#ae-datastore-stats-namespace-input,#ae-datastore-explorer-namespace-query,#ae-datastore-explorer-namespace-create{width:200px}#ae-datastore-explorer-gql-spacer{margin-top:22px}h4 #ae-datastore-explorer-gql-label{font-weight:normal}#ae-datastore-form em{font-style:normal;font-weight:normal;margin:0 0 0 .2em;color:#666}#ae-datastore-form dt{font-weight:bold}#ae-datastore-form dd{margin:.4em 0 .3em 1.5em;overflow:auto;zoom:1}#ae-datastore-form dd em{width:4em;float:left}#ae-datastore-form dd.ae-last{margin-bottom:1em}#ae-datastore-explorer-tabs-content{margin-bottom:1em}#ae-datastore-explorer-list .ae-label-row,#ae-datastore-explorer-new .ae-label-row{float:left;padding-top:.2em}#ae-datastore-explorer-list .ae-input-row,#ae-datastore-explorer-list .ae-btn-row,#ae-datastore-explorer-new .ae-input-row,#ae-datastore-explorer-new .ae-btn-row{margin-left:6em}#ae-datastore-explorer-list .ae-btn-row,#ae-datastore-explorer-new .ae-btn-row{margin-bottom:0}.ae-datastore-index-name{font-size:1.2em;font-weight:bold}.ae-table .ae-datastore-index-defs{padding-left:20px}.ae-datastore-index-defs-row{border-top:1px solid #ddd}.ae-datastore-index-defs .ae-unimportant{font-size:.8em}.ae-datastore-index-status{border:1px solid #c0dfbf;background:#f3f7f3;margin:0 25px 0 0;padding:3px}#ae-datastore-index-status-col{width:20%}#ae-datastore-index-stat-col{width:20%}.ae-datastore-index-status-Building{border-color:#edebcd;background:#fefdec}.ae-datastore-index-status-Deleting{border-color:#ccc;background:#eee}.ae-datastore-index-status-Error{border-color:#ffd3b4;background:#ffeae0}.ae-datastore-pathlink{font-size:.9em}#ae-datastore-explorer-max-datastore-viewer-columns-form{float:right}#ae-datastore-explorer-max-datastore-viewer-columns-hint{position:absolute;width:14em;visibility:hidden;box-shadow:0 15px 50px #777;z-index:2}#ae-datastore-stats-top-level-c{padding-bottom:1em;margin-bottom:1em;border-bottom:1px solid #e5ecf9}#ae-datastore-stats-top-level{width:100%}#ae-datastore-stats-piecharts-c{margin-bottom:1em}.ae-datastore-stats-piechart-label{font-size:.85em;font-weight:normal;text-align:center;padding:0}#ae-datastore-stats-property-type{width:60%}#ae-datastore-stats-size-all{width:20%}#ae-datastore-stats-index-size-all{width:20%}#ae-datastore-stats-property-name{width:40%}#ae-datastore-stats-type{width:10%}#ae-datastore-stats-size-entity{width:15%}#ae-datastore-stats-index-size-entity{width:15%}#ae-datastore-stats-percentage-size-entity{width:20%}#ae-datastore-blob-filter-form{margin-bottom:1em}#ae-datastore-blob-query-filter-label{padding-right:.5em}#ae-datastore-blob-filter-contents{padding-top:.5em}#ae-datastore-blob-date-after,#ae-datastore-blob-date-before{float:left}#ae-datastore-blob-date-after{margin-right:1em}#ae-datastore-blob-order label{font-weight:normal}#ae-datastore-blob-col-check{width:2%}#ae-datastore-blob-col-file{width:45%}#ae-datastore-blob-col-type{width:14%}#ae-datastore-blob-col-size{width:16%}#ae-blobstore-col-date{width:18%}#ae-blob-detail-filename{padding-bottom:0}#ae-blob-detail-filename span{font-weight:normal}#ae-blob-detail-key{font-size:85%}#ae-blob-detail-preview{margin-top:1em}#ae-blob-detail-dl{text-align:right}.ae-deployment-add-labels{padding:0 5px 0 20px}.ae-deployment-button-cell{width:95px}#ae-deployment-dm-dialog{width:400px}.ae-deployment-dm-selector{margin:20px 2px 20px 5px}#ae-deployment-exp-add{margin-top:5px}#ae-deployment-exp-contents{margin-top:5px;overflow:hidden}#ae-deployment-exp-desc{margin-bottom:15px}#ae-deployment-exp-div{background-color:#e5ecf9;border:1px solid #c5d7ef;margin:20px 0;padding:7px 4px}#ae-deployment-exp-hdr{font-weight:bold;margin:5px 0 5px}#ae-deployment-exp-tbl{width:400px}#ae-deployment-exp-toggle{font-weight:bold}.ae-deployment-set-button{width:22px}.ae-deployment-traffic-input{width:30px}.ae-deployment-change-state-form{display:inline}.ae-deployment-change-state-submit{background:transparent;text-decoration:underline;border:none;outline:none;cursor:pointer;color:#00c;padding:0 0 0 .1em}#ae-domain-admins-list li{margin-bottom:.3em}#ae-domain-admins-list button{margin-left:.5em}#ae-new-app-dialog-c{width:500px}#ae-new-app-dialog-c .g-section{margin-bottom:1em}p.light-note{color:#555}.ae-bottom-message{margin-top:1em}#domsettings-form div.ae-radio{margin-left:1.7em}#domsettings-form div.ae-radio input{margin-left:-1.47em;float:left}#ae-logs-c{_margin-right:-2000px;_position:relative;_width:100%;background:#fff}#ae-logs{background-color:#c5d7ef;padding:1px;line-height:1.65}#ae-logs .ae-table-caption{border:0}#ae-logs-c ol,#ae-logs-c li{list-style:none;padding:0;margin:0}#ae-logs-c li li{margin:0 0 0 3px;padding:0 0 0 17px}.ae-log-noerror{padding-left:23px}#ae-logs-form .goog-inline-block{margin-top:0}.ae-logs-usage-info{padding-left:.5em}.ae-logs-reqlog .snippet{margin:.1em}.ae-logs-applog .snippet{color:#666}.ae-logs-severity{display:block;float:left;height:1.2em;width:1.2em;line-height:1.2;text-align:center;text-transform:capitalize;font-weight:bold;border-radius:2px;-moz-border-radius:2px;-webkit-border-radius:2px}.ae-logs-severity-4{background-color:#f22;color:#000}.ae-logs-severity-3{background-color:#f90;color:#000}.ae-logs-severity-2{background-color:#fd0}.ae-logs-severity-1{background-color:#3c0;color:#000}.ae-logs-severity-0{background-color:#09f;color:#000}#ae-logs-legend{margin:1em 0 0 0}#ae-logs-legend ul{list-style:none;margin:0;padding:0}#ae-logs-legend li,#ae-logs-legend strong{float:left;margin:0 1em 0 0}#ae-logs-legend li span{margin-right:.3em}.ae-logs-timestamp{padding:0 5px;font-size:85%}#ae-logs-form-c{margin-bottom:5px;padding-bottom:.5em;padding-left:1em}#ae-logs-form{padding:.3em 0 0}#ae-logs-form .ae-label-row{float:left;padding-top:.2em;margin-right:0.539em}#ae-logs-form .ae-input-row,#ae-logs-form .ae-btn-row{margin-left:4em}#ae-logs-form .ae-btn-row{margin-bottom:0}#ae-logs-requests-c{margin-bottom:.1em}#ae-logs-requests-c input{margin:0}#ae-logs-requests-all-label{margin-right:0.539em}#ae-logs-form-options{margin-top:8px}#ae-logs-tip{margin:.2em 0}#ae-logs-expand{margin-right:.2em}#ae-logs-severity-level-label{margin-top:.3em;display:block}#ae-logs-filter-hint-labels-list{margin:2px 0}#ae-logs-filter-hint-labels-list span{position:absolute}#ae-logs-filter-hint-labels-list ul{margin-left:5.5em;padding:0}#ae-logs-filter-hint-labels-list li{float:left;margin-right:.4em;line-height:1.2}.ae-toggle .ae-logs-getdetails,.ae-toggle pre{display:none}.ae-log-expanded .ae-toggle pre{display:block}#ae-logs-c .ae-log .ae-toggle{cursor:default;background:none;padding-left:0}#ae-logs-c .ae-log .ae-toggle h5{cursor:pointer;background-position:0 .55em;background-repeat:no-repeat;padding-left:17px}.ae-log .ae-plus h5{background-image:url('/img/wgt/plus.gif')}.ae-log .ae-minus h5{background-image:url('/img/wgt/minus.gif')}.ae-log{overflow:hidden;background-color:#fff;padding:.3em 0;line-height:1.65;border-bottom:1px solid #c5d7ef}.ae-log .ae-even{background-color:#e9e9e9;border:0}.ae-log h5{font-weight:normal;white-space:nowrap;padding:.4em 0 0 0}.ae-log span,.ae-log strong{margin:0 .3em}.ae-log .ae-logs-snippet{color:#666}.ae-log pre,.ae-logs-expanded{padding:.3em 0 .5em 1.5em;margin:0;font-family:"Courier New"}.ae-log .file{font-weight:bold}.ae-log.ae-log-expanded .file{white-space:pre-wrap;word-wrap:break-word}.ae-logs-app .ae-logs-req{display:none}.ae-logs-req .ae-app,.ae-logs-both .ae-app{padding-left:1em}#ae-dos-blacklist-rejects-table{text-align:left}#ae-dash-quota-percent-col{width:3.5em}.ae-cron-status-ok{color:#008000;font-size:90%;font-weight:bold}.ae-cron-status-error{color:#a03;font-size:90%;font-weight:bold}#ae-cronjobs-table .ae-table td{vertical-align:top}#ae-tasks-table td{vertical-align:top}#ae-tasks-quota{margin:0 0 1em 0}#ae-tasks-quota .ae-dash-quota-bar{width:150px}#ae-tasks-quota #ae-dash-quota-bar-col,#ae-tasks-quota .ae-dash-quota-bar{width:200px}.ae-tasks-paused-row{color:#666;font-style:italic;font-weight:bold}#ae-tasks-quota .ae-quota-safety-limit{width:30%}#ae-tasks-table{margin-top:1em}#ae-tasks-queuecontrols{margin-top:1em;margin-bottom:1em}#ae-tasks-delete-col{width:1em}#ae-tasks-eta-col,#ae-tasks-creation-col{width:11em}#ae-tasks-actions-col{width:7em}#ae-tasks-retry-col{width:4em}#ae-tasks-execution-col{width:6em}#ae-tasks-body-col{width:6em}#ae-tasks-headers-col{width:7em}.ae-tasks-hex-column,.ae-tasks-ascii-column{width:16em}#ae-tasks-table .ae-tasks-arrow{text-align:center}.str{color:#080}.kwd{color:#008}.com{color:#800}.typ{color:#606}.lit{color:#066}.pun,.opn,.clo{color:#660}.pln{color:#000}.tag{color:#008}.atn{color:#606}.atv{color:#080}.dec{color:#606}@media print{.str{color:#060}.kwd{color:#006;font-weight:bold}.com{color:#600;font-style:italic}.typ{color:#404;font-weight:bold}.lit{color:#044}.pun,.opn,.clo{color:#440}.pln{color:#000}.tag{color:#006;font-weight:bold}.atn{color:#404}.atv{color:#060}}pre.prettyprint{padding:2px;border:1px solid #888}ol.linenums{margin-top:0;margin-bottom:0}li.L0,li.L1,li.L2,li.L3,li.L5,li.L6,li.L7,li.L8{list-style-type:none}li.L1,li.L3,li.L5,li.L7,li.L9{background:#eee} \ No newline at end of file +html,body,div,h1,h2,h3,h4,h5,h6,p,img,dl,dt,dd,ol,ul,li,table,caption,tbody,tfoot,thead,tr,th,td,form,fieldset,embed,object,applet{margin:0;padding:0;border:0;}body{font-size:62.5%;font-family:Arial,sans-serif;color:#000;background:#fff}a{color:#00c}a:active{color:#f00}a:visited{color:#551a8b}table{border-collapse:collapse;border-width:0;empty-cells:show}ul{padding:0 0 1em 1em}ol{padding:0 0 1em 1.3em}li{line-height:1.5em;padding:0 0 .5em 0}p{padding:0 0 1em 0}h1,h2,h3,h4,h5{padding:0 0 1em 0}h1,h2{font-size:1.3em}h3{font-size:1.1em}h4,h5,table{font-size:1em}sup,sub{font-size:.7em}input,select,textarea,option{font-family:inherit;font-size:inherit}.g-doc,.g-doc-1024,.g-doc-800{font-size:130%}.g-doc{width:100%;text-align:left}.g-section{width:100%;vertical-align:top;display:inline-block}*:first-child+html .g-section{display:block}* html .g-section{overflow:hidden}@-moz-document url-prefix(''){.g-section{overflow:hidden}}@-moz-document url-prefix(''){.g-section,tt:default{overflow:visible}}.g-section,.g-unit{zoom:1}.g-split .g-unit{text-align:right}.g-split .g-first{text-align:left}.g-doc-1024{width:73.074em;min-width:950px;margin:0 auto;text-align:left}* html .g-doc-1024{width:71.313em}*+html .g-doc-1024{width:71.313em}.g-doc-800{width:57.69em;min-width:750px;margin:0 auto;text-align:left}* html .g-doc-800{width:56.3em}*+html .g-doc-800{width:56.3em}.g-tpl-160 .g-unit,.g-unit .g-tpl-160 .g-unit,.g-unit .g-unit .g-tpl-160 .g-unit,.g-unit .g-unit .g-unit .g-tpl-160 .g-unit{margin:0 0 0 160px;width:auto;float:none}.g-unit .g-unit .g-unit .g-tpl-160 .g-first,.g-unit .g-unit .g-tpl-160 .g-first,.g-unit .g-tpl-160 .g-first,.g-tpl-160 .g-first{margin:0;width:160px;float:left}.g-tpl-160-alt .g-unit,.g-unit .g-tpl-160-alt .g-unit,.g-unit .g-unit .g-tpl-160-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-160-alt .g-unit{margin:0 160px 0 0;width:auto;float:none}.g-unit .g-unit .g-unit .g-tpl-160-alt .g-first,.g-unit .g-unit .g-tpl-160-alt .g-first,.g-unit .g-tpl-160-alt .g-first,.g-tpl-160-alt .g-first{margin:0;width:160px;float:right}.g-tpl-180 .g-unit,.g-unit .g-tpl-180 .g-unit,.g-unit .g-unit .g-tpl-180 .g-unit,.g-unit .g-unit .g-unit .g-tpl-180 .g-unit{margin:0 0 0 180px;width:auto;float:none}.g-unit .g-unit .g-unit .g-tpl-180 .g-first,.g-unit .g-unit .g-tpl-180 .g-first,.g-unit .g-tpl-180 .g-first,.g-tpl-180 .g-first{margin:0;width:180px;float:left}.g-tpl-180-alt .g-unit,.g-unit .g-tpl-180-alt .g-unit,.g-unit .g-unit .g-tpl-180-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-180-alt .g-unit{margin:0 180px 0 0;width:auto;float:none}.g-unit .g-unit .g-unit .g-tpl-180-alt .g-first,.g-unit .g-unit .g-tpl-180-alt .g-first,.g-unit .g-tpl-180-alt .g-first,.g-tpl-180-alt .g-first{margin:0;width:180px;float:right}.g-tpl-300 .g-unit,.g-unit .g-tpl-300 .g-unit,.g-unit .g-unit .g-tpl-300 .g-unit,.g-unit .g-unit .g-unit .g-tpl-300 .g-unit{margin:0 0 0 300px;width:auto;float:none}.g-unit .g-unit .g-unit .g-tpl-300 .g-first,.g-unit .g-unit .g-tpl-300 .g-first,.g-unit .g-tpl-300 .g-first,.g-tpl-300 .g-first{margin:0;width:300px;float:left}.g-tpl-300-alt .g-unit,.g-unit .g-tpl-300-alt .g-unit,.g-unit .g-unit .g-tpl-300-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-300-alt .g-unit{margin:0 300px 0 0;width:auto;float:none}.g-unit .g-unit .g-unit .g-tpl-300-alt .g-first,.g-unit .g-unit .g-tpl-300-alt .g-first,.g-unit .g-tpl-300-alt .g-first,.g-tpl-300-alt .g-first{margin:0;width:300px;float:right}.g-tpl-25-75 .g-unit,.g-unit .g-tpl-25-75 .g-unit,.g-unit .g-unit .g-tpl-25-75 .g-unit,.g-unit .g-unit .g-unit .g-tpl-25-75 .g-unit{width:74.999%;float:right;margin:0}.g-unit .g-unit .g-unit .g-tpl-25-75 .g-first,.g-unit .g-unit .g-tpl-25-75 .g-first,.g-unit .g-tpl-25-75 .g-first,.g-tpl-25-75 .g-first{width:24.999%;float:left;margin:0}.g-tpl-25-75-alt .g-unit,.g-unit .g-tpl-25-75-alt .g-unit,.g-unit .g-unit .g-tpl-25-75-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-25-75-alt .g-unit{width:24.999%;float:left;margin:0}.g-unit .g-unit .g-unit .g-tpl-25-75-alt .g-first,.g-unit .g-unit .g-tpl-25-75-alt .g-first,.g-unit .g-tpl-25-75-alt .g-first,.g-tpl-25-75-alt .g-first{width:74.999%;float:right;margin:0}.g-tpl-75-25 .g-unit,.g-unit .g-tpl-75-25 .g-unit,.g-unit .g-unit .g-tpl-75-25 .g-unit,.g-unit .g-unit .g-unit .g-tpl-75-25 .g-unit{width:24.999%;float:right;margin:0}.g-unit .g-unit .g-unit .g-tpl-75-25 .g-first,.g-unit .g-unit .g-tpl-75-25 .g-first,.g-unit .g-tpl-75-25 .g-first,.g-tpl-75-25 .g-first{width:74.999%;float:left;margin:0}.g-tpl-75-25-alt .g-unit,.g-unit .g-tpl-75-25-alt .g-unit,.g-unit .g-unit .g-tpl-75-25-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-75-25-alt .g-unit{width:74.999%;float:left;margin:0}.g-unit .g-unit .g-unit .g-tpl-75-25-alt .g-first,.g-unit .g-unit .g-tpl-75-25-alt .g-first,.g-unit .g-tpl-75-25-alt .g-first,.g-tpl-75-25-alt .g-first{width:24.999%;float:right;margin:0}.g-tpl-33-67 .g-unit,.g-unit .g-tpl-33-67 .g-unit,.g-unit .g-unit .g-tpl-33-67 .g-unit,.g-unit .g-unit .g-unit .g-tpl-33-67 .g-unit{width:66.999%;float:right;margin:0}.g-unit .g-unit .g-unit .g-tpl-33-67 .g-first,.g-unit .g-unit .g-tpl-33-67 .g-first,.g-unit .g-tpl-33-67 .g-first,.g-tpl-33-67 .g-first{width:32.999%;float:left;margin:0}.g-tpl-33-67-alt .g-unit,.g-unit .g-tpl-33-67-alt .g-unit,.g-unit .g-unit .g-tpl-33-67-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-33-67-alt .g-unit{width:32.999%;float:left;margin:0}.g-unit .g-unit .g-unit .g-tpl-33-67-alt .g-first,.g-unit .g-unit .g-tpl-33-67-alt .g-first,.g-unit .g-tpl-33-67-alt .g-first,.g-tpl-33-67-alt .g-first{width:66.999%;float:right;margin:0}.g-tpl-67-33 .g-unit,.g-unit .g-tpl-67-33 .g-unit,.g-unit .g-unit .g-tpl-67-33 .g-unit,.g-unit .g-unit .g-unit .g-tpl-67-33 .g-unit{width:32.999%;float:right;margin:0}.g-unit .g-unit .g-unit .g-tpl-67-33 .g-first,.g-unit .g-unit .g-tpl-67-33 .g-first,.g-unit .g-tpl-67-33 .g-first,.g-tpl-67-33 .g-first{width:66.999%;float:left;margin:0}.g-tpl-67-33-alt .g-unit,.g-unit .g-tpl-67-33-alt .g-unit,.g-unit .g-unit .g-tpl-67-33-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-67-33-alt .g-unit{width:66.999%;float:left;margin:0}.g-unit .g-unit .g-unit .g-tpl-67-33-alt .g-first,.g-unit .g-unit .g-tpl-67-33-alt .g-first,.g-unit .g-tpl-67-33-alt .g-first,.g-tpl-67-33-alt .g-first{width:32.999%;float:right;margin:0}.g-tpl-50-50 .g-unit,.g-unit .g-tpl-50-50 .g-unit,.g-unit .g-unit .g-tpl-50-50 .g-unit,.g-unit .g-unit .g-unit .g-tpl-50-50 .g-unit{width:49.999%;float:right;margin:0}.g-unit .g-unit .g-unit .g-tpl-50-50 .g-first,.g-unit .g-unit .g-tpl-50-50 .g-first,.g-unit .g-tpl-50-50 .g-first,.g-tpl-50-50 .g-first{width:49.999%;float:left;margin:0}.g-tpl-50-50-alt .g-unit,.g-unit .g-tpl-50-50-alt .g-unit,.g-unit .g-unit .g-tpl-50-50-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-50-50-alt .g-unit{width:49.999%;float:left;margin:0}.g-unit .g-unit .g-unit .g-tpl-50-50-alt .g-first,.g-unit .g-unit .g-tpl-50-50-alt .g-first,.g-unit .g-tpl-50-50-alt .g-first,.g-tpl-50-50-alt .g-first{width:49.999%;float:right;margin:0}.g-tpl-nest{width:auto}.g-tpl-nest .g-section{display:inline}.g-tpl-nest .g-unit,.g-unit .g-tpl-nest .g-unit,.g-unit .g-unit .g-tpl-nest .g-unit,.g-unit .g-unit .g-unit .g-tpl-nest .g-unit{float:left;width:auto;margin:0}.g-tpl-nest-alt .g-unit,.g-unit .g-tpl-nest-alt .g-unit,.g-unit .g-unit .g-tpl-nest-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-nest-alt .g-unit{float:right;width:auto;margin:0}.goog-button{border-width:1px;border-style:solid;border-color:#bbb #999 #999 #bbb;border-radius:2px;-webkit-border-radius:2px;-moz-border-radius:2px;font:normal normal normal 13px/13px Arial,sans-serif;color:#000;text-align:middle;text-decoration:none;text-shadow:0 1px 1px rgba(255,255,255,1);background:#eee;background-image:-webkit-gradient(linear,0 0,0 100%,from(#fff),to(#ddd));background:-moz-linear-gradient(top,#fff,#ddd);filter:progid:DXImageTransform.Microsoft.Gradient(EndColorstr='#dddddd',StartColorstr='#ffffff',GradientType=0);cursor:pointer;margin:0;display:inline;display:-moz-inline-box;display:inline-block;*overflow:visible;padding:4px 8px 5px}a.goog-button,span.goog-button,div.goog-button{padding:4px 8px 5px}.goog-button:visited{color:#000}.goog-button{*display:inline}.goog-button:focus,.goog-button:hover{border-color:#000}.goog-button:active,.goog-button-active{color:#000;background-color:#bbb;border-color:#999 #bbb #bbb #999;background-image:-webkit-gradient(linear,0 0,0 100%,from(#ddd),to(#fff));background-image:-moz-linear-gradient(top,#ddd,#fff);filter:progid:DXImageTransform.Microsoft.Gradient(EndColorstr='#ffffff',StartColorstr='#dddddd',GradientType=0)}.goog-button[disabled],.goog-button[disabled]:active,.goog-button[disabled]:hover{color:#666;border-color:#ddd;background-color:#f3f3f3;background-image:none;text-shadow:none;cursor:auto}.goog-button{padding:5px 8px 4px }.goog-button{*padding:4px 7px 2px}html>body input.goog-button,x:-moz-any-link,x:default,html>body button.goog-button,x:-moz-any-link,x:default{padding-top:3px;padding-bottom:2px}a.goog-button,x:-moz-any-link,x:default,span.goog-button,x:-moz-any-link,x:default,div.goog-button,x:-moz-any-link,x:default{padding:4px 8px 5px}.goog-button-fixed{padding-left:0!important;padding-right:0!important;width:100%}button.goog-button-icon-c{padding-top:1px;padding-bottom:1px}button.goog-button-icon-c{padding-top:3px ;padding-bottom:2px }button.goog-button-icon-c{*padding-top:0;*padding-bottom:0}html>body button.goog-button-icon-c,x:-moz-any-link,x:default{padding-top:1px;padding-bottom:1px}.goog-button-icon{display:block;margin:0 auto;height:18px;width:18px}html>body .goog-inline-block{display:-moz-inline-box;display:inline-block;}.goog-inline-block{position:relative;display:inline-block}* html .goog-inline-block{display:inline}*:first-child+html .goog-inline-block{display:inline}.goog-custom-button{margin:0 2px 2px;border:0;padding:0;font:normal Tahoma,Arial,sans-serif;color:#000;text-decoration:none;list-style:none;vertical-align:middle;cursor:pointer;outline:none;background:#eee;background-image:-webkit-gradient(linear,0 0,0 100%,from(#fff),to(#ddd));background:-moz-linear-gradient(top,#fff,#ddd);filter:progid:DXImageTransform.Microsoft.Gradient(EndColorstr='#dddddd',StartColorstr='#ffffff',GradientType=0)}.goog-custom-button-outer-box,.goog-custom-button-inner-box{border-style:solid;border-color:#bbb #999 #999 #bbb;vertical-align:top}.goog-custom-button-outer-box{margin:0;border-width:1px 0;padding:0}.goog-custom-button-inner-box{margin:0 -1px;border-width:0 1px;padding:3px 4px}* html .goog-custom-button-inner-box{left:-1px}* html .goog-custom-button-rtl .goog-custom-button-outer-box{left:-1px}* html .goog-custom-button-rtl .goog-custom-button-inner-box{left:0}*:first-child+html .goog-custom-button-inner-box{left:-1px}*:first-child+html .goog-custom-button-collapse-right .goog-custom-button-inner-box{border-left-width:2px}*:first-child+html .goog-custom-button-collapse-left .goog-custom-button-inner-box{border-right-width:2px}*:first-child+html .goog-custom-button-collapse-right.goog-custom-button-collapse-left .goog-custom-button-inner-box{border-width:0 1px}*:first-child+html .goog-custom-button-rtl .goog-custom-button-inner-box{left:1px}::root .goog-custom-button,::root .goog-custom-button-outer-box{line-height:0}::root .goog-custom-button-inner-box{line-height:normal}.goog-custom-button-disabled{background-image:none!important;opacity:0.4;-moz-opacity:0.4;filter:alpha(opacity=40)}.goog-custom-button-disabled .goog-custom-button-outer-box,.goog-custom-button-disabled .goog-custom-button-inner-box{color:#333!important;border-color:#999!important}* html .goog-custom-button-disabled{margin:2px 1px!important;padding:0 1px!important}*:first-child+html .goog-custom-button-disabled{margin:2px 1px!important;padding:0 1px!important}.goog-custom-button-hover .goog-custom-button-outer-box,.goog-custom-button-hover .goog-custom-button-inner-box{border-color:#000!important;}.goog-custom-button-active,.goog-custom-button-checked{background-color:#bbb;background-position:bottom left;background-image:-webkit-gradient(linear,0 0,0 100%,from(#ddd),to(#fff));background:-moz-linear-gradient(top,#ddd,#fff);filter:progid:DXImageTransform.Microsoft.Gradient(EndColorstr='#ffffff',StartColorstr='#dddddd',GradientType=0)}.goog-custom-button-focused .goog-custom-button-outer-box,.goog-custom-button-focused .goog-custom-button-inner-box,.goog-custom-button-focused.goog-custom-button-collapse-left .goog-custom-button-inner-box,.goog-custom-button-focused.goog-custom-button-collapse-left.goog-custom-button-checked .goog-custom-button-inner-box{border-color:#000}.goog-custom-button-collapse-right,.goog-custom-button-collapse-right .goog-custom-button-outer-box,.goog-custom-button-collapse-right .goog-custom-button-inner-box{margin-right:0}.goog-custom-button-collapse-left,.goog-custom-button-collapse-left .goog-custom-button-outer-box,.goog-custom-button-collapse-left .goog-custom-button-inner-box{margin-left:0}.goog-custom-button-collapse-left .goog-custom-button-inner-box{border-left:1px solid #fff}.goog-custom-button-collapse-left.goog-custom-button-checked .goog-custom-button-inner-box{border-left:1px solid #ddd}* html .goog-custom-button-collapse-left .goog-custom-button-inner-box{left:0}*:first-child+html .goog-custom-button-collapse-left .goog-custom-button-inner-box{left:0}.goog-date-picker th,.goog-date-picker td{font-family:arial,sans-serif;text-align:center}.goog-date-picker th{font-size:.9em;font-weight:bold;color:#666667;background-color:#c3d9ff}.goog-date-picker td{vertical-align:middle;padding:2px 3px}.goog-date-picker{-moz-user-focus:normal;-moz-user-select:none;position:absolute;border:1px solid gray;float:left;font-family:arial,sans-serif;padding-left:1px;background:white}.goog-date-picker-menu{position:absolute;background:threedface;border:1px solid gray;-moz-user-focus:normal}.goog-date-picker-menu ul{list-style:none;margin:0;padding:0}.goog-date-picker-menu ul li{cursor:default}.goog-date-picker-menu-selected{background-color:#aaccee}.goog-date-picker td div{float:left}.goog-date-picker button{padding:0;margin:1px;border:1px outset gray}.goog-date-picker-week{padding:1px 3px}.goog-date-picker-wday{padding:1px 3px}.goog-date-picker-today-cont{text-align:left!important}.goog-date-picker-none-cont{text-align:right!important}.goog-date-picker-head td{text-align:center}.goog-date-picker-month{width:12ex}.goog-date-picker-year{width:6ex}.goog-date-picker table{border-collapse:collapse}.goog-date-picker-selected{background-color:#aaccee!important;color:blue!important}.goog-date-picker-today{font-weight:bold!important}.goog-date-picker-other-month{-moz-opacity:0.3;filter:Alpha(Opacity=30)}.sat,.sun{background:#eee}#button1,#button2{display:block;width:60px;text-align:center;margin:10px;padding:10px;font:normal .8em arial,sans-serif;border:1px solid #000}.goog-menu{position:absolute;color:#000;border:1px solid #b5b6b5;background-color:#f3f3f7;cursor:default;font:normal small arial,helvetica,sans-serif;margin:0;padding:0;outline:none}.goog-menuitem{padding:2px 5px;margin:0;list-style:none}.goog-menuitem-highlight{background-color:#4279a5;color:#fff}.goog-menuitem-disabled{color:#999}.goog-option{padding-left:15px!important}.goog-option-selected{background-image:url('/img/check.gif');background-position:4px 50%;background-repeat:no-repeat}.goog-menuseparator{position:relative;margin:2px 0;border-top:1px solid #999;padding:0;outline:none}.goog-submenu{position:relative}.goog-submenu-arrow{position:absolute;display:block;width:11px;height:11px;right:3px;top:4px;background-image:url('/img/menu-arrows.gif');background-repeat:no-repeat;background-position:0 0;font-size:1px}.goog-menuitem-highlight .goog-submenu-arrow{background-position:0 -11px}.goog-menuitem-disabled .goog-submenu-arrow{display:none}.goog-menu-filter{margin:2px;border:1px solid silver;background:white;overflow:hidden}.goog-menu-filter div{color:gray;position:absolute;padding:1px}.goog-menu-filter input{margin:0;border:0;background:transparent;width:100%}.goog-menuitem-partially-checked{background-image:url('/img/check-outline.gif');background-position:4px 50%;background-repeat:no-repeat}.goog-menuitem-fully-checked{background-image:url('/img/check.gif');background-position:4px 50%;background-repeat:no-repeat}.goog-menu-button{margin:0 2px 2px 2px;border:0;padding:0;font:normal Tahoma,Arial,sans-serif;color:#000;background:#ddd url("/img/button-bg.gif") repeat-x top left;text-decoration:none;list-style:none;vertical-align:middle;cursor:pointer;outline:none}.goog-menu-button-outer-box,.goog-menu-button-inner-box{border-style:solid;border-color:#aaa;vertical-align:middle}.goog-menu-button-outer-box{margin:0;border-width:1px 0;padding:0}.goog-menu-button-inner-box{margin:0 -1px;border-width:0 1px;padding:0 4px 2px 4px}* html .goog-menu-button-inner-box{left:-1px}* html .goog-menu-button-rtl .goog-menu-button-outer-box{left:-1px}* html .goog-menu-button-rtl .goog-menu-button-inner-box{left:0}*:first-child+html .goog-menu-button-inner-box{left:-1px}*:first-child+html .goog-menu-button-rtl .goog-menu-button-inner-box{left:1px}::root .goog-menu-button,::root .goog-menu-button-outer-box,::root .goog-menu-button-inner-box{line-height:0}::root .goog-menu-button-caption,::root .goog-menu-button-dropdown{line-height:normal}.goog-menu-button-disabled{background-image:none!important;opacity:0.4;-moz-opacity:0.4;filter:alpha(opacity=40)}.goog-menu-button-disabled .goog-menu-button-outer-box,.goog-menu-button-disabled .goog-menu-button-inner-box,.goog-menu-button-disabled .goog-menu-button-caption,.goog-menu-button-disabled .goog-menu-button-dropdown{color:#333!important;border-color:#999!important}* html .goog-menu-button-disabled{margin:2px 1px!important;padding:0 1px!important}*:first-child+html .goog-menu-button-disabled{margin:2px 1px!important;padding:0 1px!important}.goog-menu-button-hover .goog-menu-button-outer-box,.goog-menu-button-hover .goog-menu-button-inner-box{border-color:#9cf #69e #69e #7af!important;}.goog-menu-button-active,.goog-menu-button-open{background-color:#bbb;background-position:bottom left}.goog-menu-button-focused .goog-menu-button-outer-box,.goog-menu-button-focused .goog-menu-button-inner-box{border-color:#3366cc}.goog-menu-button-caption{padding:0 4px 0 0;vertical-align:middle}.goog-menu-button-rtl .goog-menu-button-caption{padding:0 0 0 4px}.goog-menu-button-dropdown{width:7px;background:url('/img/toolbar_icons.gif') no-repeat -176px;vertical-align:middle}.goog-flat-menu-button{margin:0 2px;padding:1px 4px;font:normal 95% Tahoma,Arial,sans-serif;color:#333;text-decoration:none;list-style:none;vertical-align:middle;cursor:pointer;outline:none;-moz-outline:none;border-width:1px;border-style:solid;border-color:#c9c9c9;background-color:#fff}.goog-flat-menu-button-disabled *{color:#999;border-color:#ccc;cursor:default}.goog-flat-menu-button-hover,.goog-flat-menu-button-hover{border-color:#9cf #69e #69e #7af!important;}.goog-flat-menu-button-active{background-color:#bbb;background-position:bottom left}.goog-flat-menu-button-focused{border-color:#3366cc}.goog-flat-menu-button-caption{padding-right:10px;vertical-align:middle}.goog-flat-menu-button-dropdown{width:7px;background:url('/img/toolbar_icons.gif') no-repeat -176px;vertical-align:middle}h1{font-size:1.8em}.g-doc{width:auto;margin:0 10px}.g-doc-1024{margin-left:10px}#ae-logo{background:url('//www.google.com/images/logos/app_engine_logo_sm.gif') 0 0 no-repeat;display:block;width:178px;height:30px;margin:4px 0 0 0}.ae-ir span{position:absolute;display:block;width:0;height:0;overflow:hidden}.ae-noscript{position:absolute;left:-5000px}#ae-lhs-nav{border-right:3px solid #e5ecf9}.ae-notification{margin-bottom:.6em;text-align:center}.ae-notification strong{display:block;width:55%;margin:0 auto;text-align:center;padding:.6em;background-color:#fff1a8;font-weight:bold}.ae-alert{font-weight:bold;background:url('/img/icn/warning.png') no-repeat;margin-bottom:.5em;padding-left:1.8em}.ae-info{background:url('/img/icn/icn-info.gif') no-repeat;margin-bottom:.5em;padding-left:1.8em}.ae-promo{padding:.5em .8em;margin:.6em 0;background-color:#fffbe8;border:1px solid #fff1a9;text-align:left}.ae-promo strong{position:relative;top:.3em}.ae-alert-text,.ae-warning-text{background-color:transparent;background-position:right 1px;padding:0 18px 0 0}.ae-alert-text{color:#c00}.ae-warning-text{color:#f90}.ae-alert-c span{display:inline-block}.ae-message{border:1px solid #e5ecf9;background-color:#f6f9ff;margin-bottom:1em;padding:.5em}.ae-errorbox{border:1px solid #f00;background-color:#fee;margin-bottom:1em;padding:1em}#bd .ae-errorbox ul{padding-bottom:0}.ae-form dt{font-weight:bold}.ae-form dt em,.ae-field-hint{margin-top:.2em;color:#666667;font-size:.85em}.ae-field-hint-inline{color:#666667;font-size:.85em;display:inline}.ae-field-yyyymmdd,.ae-field-hhmmss{width:6em}.ae-field-hint-hhmmss{margin-left:2.3em}.ae-form label{display:block;margin:0 0 .2em 0;font-weight:bold}.ae-radio{margin-bottom:.3em}.ae-radio label{display:inline}.ae-form dd,.ae-input-row{margin-bottom:.6em}.ae-input-row-group{border:1px solid #fff1a9;background:#fffbe8;padding:8px}.ae-btn-row{margin-top:1.4em;margin-bottom:1em}.ae-btn-row-note{padding:5px 0 6px 0}.ae-btn-row-note span{padding-left:18px;padding-right:.5em;background:transparent url('/img/icn/icn-info.gif') 0 0 no-repeat}.ae-btn-primary{font-weight:bold}form .ae-cancel{margin-left:.5em}.ae-submit-inline{margin-left:.8em}.ae-radio-bullet{width:20px;float:left}.ae-label-hanging-indent{margin-left:5px}.ae-divider{margin:0 .6em 0 .5em}.ae-nowrap{white-space:nowrap}.ae-pre-wrap{white-space:pre-wrap;white-space:-moz-pre-wrap;white-space:-pre-wrap;white-space:-o-pre-wrap;word-wrap:break-word;_white-space:pre;}wbr:after{content:"\00200b"}a button{text-decoration:none}.ae-alert ul{margin-bottom:.75em;margin-top:.25em;line-height:1.5em}.ae-alert h4{color:#000;font-weight:bold;padding:0 0 .5em}.ae-form-simple-list{list-style-type:none;padding:0;margin-bottom:1em}.ae-form-simple-list li{padding:.3em 0 .5em .5em;border-bottom:1px solid #c3d9ff}div.ae-datastore-index-to-delete,div.ae-datastore-index-to-build{color:#aaa}#hd p{padding:0}#hd li{display:inline}ul{padding:0 0 1em 1.2em}#ae-userinfo{text-align:right;white-space:nowrap;}#ae-userinfo ul{padding-bottom:0;padding-top:5px}#ae-appbar-lrg{margin:0 0 1.25em 0;padding:.25em .5em;background-color:#e5ecf9;border-top:1px solid #36c}#ae-appbar-lrg h1{font-size:1.2em;padding:0}#ae-appbar-lrg h1 span{font-size:80%;font-weight:normal}#ae-appbar-lrg form{display:inline;padding-right:.1em;margin-right:.5em}#ae-appbar-lrg strong{white-space:nowrap}#ae-appbar-sml{margin:0 0 1.25em 0;height:8px;padding:0 .5em;background:#e5ecf9}.ae-rounded-sml{border-radius:3px;-moz-border-radius:3px;-webkit-border-radius:3px}#ae-appbar-lrg a{margin-top:.3em}#ae-engine-version-bar{margin:0 0 1em}a.ae-ext-link,a span.ae-ext-link{background:url('/img/icn/icn-open-in-new-window.png') no-repeat right;padding-right:18px;margin-right:8px}.ae-no-pad{padding-left:1em}.ae-message h4{margin-bottom:.3em;padding-bottom:0}#ft{text-align:center;margin:2.5em 0 1em;padding-top:.5em;border-top:2px solid #c3d9ff}#bd h3{font-weight:bold;font-size:1.4em}#bd h3 .ae-apps-switch{font-weight:normal;font-size:.7em;margin-left:2em}#bd p{padding:0 0 1em 0}#ae-content{padding-left:1em}.ae-unimportant{color:#666}.ae-new-usr td{border-top:1px solid #ccccce;background-color:#ffe}.ae-error-td td{border:2px solid #f00;background-color:#fee}.ae-delete{cursor:pointer;border:none;background:transparent;}.ae-btn-large{background:#039 url('/img/icn/button_back.png') repeat-x;color:#fff;font-weight:bold;font-size:1.2em;padding:.5em;border:2px outset #000;cursor:pointer}.ae-breadcrumb{margin:0 0 1em}.ae-disabled,a.ae-disabled,a.ae-disabled:hover,a.ae-disabled:active{color:#666!important;text-decoration:none!important;cursor:default!important;opacity:.4!important;-moz-opacity:.4!important;filter:alpha(opacity=40)!important}input.ae-readonly{border:2px solid transparent;border-left:0;background-color:transparent}span.ae-text-input-clone{padding:5px 5px 5px 0}.ae-loading{opacity:.4;-moz-opacity:.4;filter:alpha(opacity=40)}.ae-tip{margin:1em 0;background:url('/img/tip.png') top left no-repeat;padding:2px 0 0 25px}sup.ae-new-sup{color:red}sup.ae-new-sup a{border-bottom:1px solid red;color:red;text-decoration:none}.ae-action{color:#00c;cursor:pointer;text-decoration:underline}.ae-toggle{padding-left:16px;background-position:left center;background-repeat:no-repeat;cursor:pointer}.ae-minus{background-image:url('/img/wgt/minus.gif')}.ae-plus{background-image:url('/img/wgt/plus.gif')}.ae-print{background-image:url('/img/print.gif');padding-left:19px}.ae-download{background:url('/img/download.png') left center no-repeat;padding-left:22px}.ae-currency,.ae-table thead th.ae-currency{text-align:right;white-space:nowrap}#ae-loading{font-size:1.2em;position:absolute;text-align:center;top:0;width:100%}#ae-loading div{margin:0 auto;background:#fff1a9;width:5em;font-weight:bold;padding:4px 10px;-moz-border-radius-bottomleft:3px;-moz-border-radius-bottomright:3px;-webkit-border-radius-bottomleft:3px;-webkit-border-radius-bottomright:3px}.ae-occlude{filter:alpha(opacity=0);position:absolute}.g-tpl-66-34 .g-unit,.g-unit .g-tpl-66-34 .g-unit,.g-unit .g-unit .g-tpl-66-34 .g-unit,.g-unit .g-unit .g-unit .g-tpl-66-34 .g-unit{display:inline;margin:0;width:33.999%;float:right}.g-unit .g-unit .g-unit .g-tpl-66-34 .g-first,.g-unit .g-unit .g-tpl-66-34 .g-first,.g-unit .g-tpl-66-34 .g-first,.g-tpl-66-34 .g-first{display:inline;margin:0;width:65.999%;float:left}.ae-ie6-c{_margin-right:-2000px;_position:relative;_width:100%;background:#fff}h2.ae-section-header{background:#e5ecf9;padding:.2em .4em;margin-bottom:.5em}.ae-field-span{padding:3px 0}ul.ae-admin-list li{margin:0 0;padding:.1em 0}#ae-feedback-bar{background-color:#f9edbe;border:1px solid #f0c36d;-moz-border-radius:2px;-webkit-border-radius:2px;border-radius:2px;-webkit-box-shadow:0 2px 4px rgba(0,0,0,0.2);-moz-box-shadow:0 2px 4px rgba(0,0,0,0.2);box-shadow:0 2px 4px rgba(0,0,0,0.2);left:280px;margin-top:-5px;overflow:hidden;padding:10px 16px;position:fixed;text-align:center;z-index:999}.ae-feedback-info{margin-bottom:7px}.ae-feedback-option{color:#222;display:inline-block;font-size:0.87em;vertical-align:bottom;width:75px}.ae-feedback-close-icon{background:url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAA8AAAAPCAYAAAA71pVKAAAARUlEQVR42mMon7n7fwWZmAFEkAOorxmXYdjUoWhG9g+6QnRxnDYjK8RnIFY/YwtZkgIMn0ba2Ey2n6kS2mTFM/2TJ7kYAJSLDRhvVX1GAAAAAElFTkSuQmCC') no-repeat 0 0;float:right;height:15px;margin-right:-11px;width:15px}select{font:13px/13px Arial,sans-serif;color:#000;border-width:1px;border-style:solid;border-color:#bbb #999 #999 #bbb;-webkit-border-radius:2px;-moz-border-radius:2px;background:#eee;background:-webkit-gradient(linear,0 0,0 100%,from(#fff),to(#ddd));background:-moz-linear-gradient(top,#fff,#ddd);filter:progid:DXImageTransform.Microsoft.Gradient(EndColorstr='#dddddd',StartColorstr='#ffffff',GradientType=0);cursor:pointer;padding:2px 1px;margin:0}select:hover{border-color:#000}select[disabled],select[disabled]:active{color:#666;border-color:#ddd;background-color:#f3f3f3;background-image:none;text-shadow:none;cursor:auto}.ae-table-plain{border-collapse:collapse;width:100%}.ae-table{border:1px solid #c5d7ef;border-collapse:collapse;width:100%}#bd h2.ae-table-title{background:#e5ecf9;margin:0;color:#000;font-size:1em;padding:3px 0 3px 5px;border-left:1px solid #c5d7ef;border-right:1px solid #c5d7ef;border-top:1px solid #c5d7ef}.ae-table-caption,.ae-table caption{border:1px solid #c5d7ef;background:#e5ecf9;-moz-margin-start:-1px}.ae-table caption{padding:3px 5px;text-align:left}.ae-table th,.ae-table td{background-color:#fff;padding:.35em 1em .25em .35em;margin:0}.ae-table thead th{font-weight:bold;text-align:left;background:#c5d7ef;vertical-align:bottom}.ae-table thead th .ae-no-bold{font-weight:normal}.ae-table tfoot tr td{border-top:1px solid #c5d7ef;background-color:#e5ecf9}.ae-table td{border-top:1px solid #c5d7ef;border-bottom:1px solid #c5d7ef}.ae-even>td,.ae-even th,.ae-even-top td,.ae-even-tween td,.ae-even-bottom td,ol.ae-even{background-color:#e9e9e9;border-top:1px solid #c5d7ef;border-bottom:1px solid #c5d7ef}.ae-even-top td{border-bottom:0}.ae-even-bottom td{border-top:0}.ae-even-tween td{border:0}.ae-table .ae-tween td{border:0}.ae-table .ae-tween-top td{border-bottom:0}.ae-table .ae-tween-bottom td{border-top:0}#bd .ae-table .cbc{width:1.5em;padding-right:0}.ae-table #ae-live td{background-color:#ffeac0}.ae-table-fixed{table-layout:fixed}.ae-table-fixed td,.ae-table-nowrap{overflow:hidden;white-space:nowrap}.ae-paginate strong{margin:0 .5em}tfoot .ae-paginate{text-align:right}.ae-table-caption .ae-paginate,.ae-table-caption .ae-orderby{padding:2px 5px}.modal-dialog{background:#c1d9ff;border:1px solid #3a5774;color:#000;padding:4px;position:absolute;font-size:1.3em;-moz-box-shadow:0 1px 4px #333;-webkit-box-shadow:0 1px 4px #333;box-shadow:0 1px 4px #333}.modal-dialog a,.modal-dialog a:link,.modal-dialog a:visited{color:#06c;cursor:pointer}.modal-dialog-bg{background:#666;left:0;position:absolute;top:0}.modal-dialog-title{background:#e0edfe;color:#000;cursor:pointer;font-size:120%;font-weight:bold;padding:8px 15px 8px 8px;position:relative;_zoom:1;}.modal-dialog-title-close{background:#e0edfe url('https://ssl.gstatic.com/editor/editortoolbar.png') no-repeat -528px 0;cursor:default;height:15px;position:absolute;right:10px;top:8px;width:15px;vertical-align:middle}.modal-dialog-buttons,.modal-dialog-content{background-color:#fff;padding:8px}.modal-dialog-buttons button{margin-right:.75em}.goog-buttonset-default{font-weight:bold}.goog-tab{position:relative;border:1px solid #8ac;padding:4px 9px;color:#000;background:#e5ecf9;border-top-left-radius:2px;border-top-right-radius:2px;-moz-border-radius-topleft:2px;-webkit-border-top-left-radius:2px;-moz-border-radius-topright:2px;-webkit-border-top-right-radius:2px}.goog-tab-bar-top .goog-tab{margin:1px 4px 0 0;border-bottom:0;float:left}.goog-tab-bar-bottom .goog-tab{margin:0 4px 1px 0;border-top:0;float:left}.goog-tab-bar-start .goog-tab{margin:0 0 4px 1px;border-right:0}.goog-tab-bar-end .goog-tab{margin:0 1px 4px 0;border-left:0}.goog-tab-hover{text-decoration:underline;cursor:pointer}.goog-tab-disabled{color:#fff;background:#ccc;border-color:#ccc}.goog-tab-selected{background:#fff!important;color:black;font-weight:bold}.goog-tab-bar-top .goog-tab-selected{top:1px;margin-top:0;padding-bottom:5px}.goog-tab-bar-bottom .goog-tab-selected{top:-1px;margin-bottom:0;padding-top:5px}.goog-tab-bar-start .goog-tab-selected{left:1px;margin-left:0;padding-right:9px}.goog-tab-bar-end .goog-tab-selected{left:-1px;margin-right:0;padding-left:9px}.goog-tab-content{padding:.1em .8em .8em .8em;border:1px solid #8ac;border-top:none}.goog-tab-bar{position:relative;margin:0 0 0 5px;border:0;padding:0;list-style:none;cursor:default;outline:none}.goog-tab-bar-clear{border-top:1px solid #8ac;clear:both;height:0;overflow:hidden}.goog-tab-bar-start{float:left}.goog-tab-bar-end{float:right}* html .goog-tab-bar-start{margin-right:-3px}* html .goog-tab-bar-end{margin-left:-3px}#ae-nav ul{list-style-type:none;margin:0;padding:1em 0}#ae-nav ul li{padding-left:.5em}#ae-nav .ae-nav-selected{color:#000;display:block;font-weight:bold;background-color:#e5ecf9;margin-right:-1px;border-top-left-radius:4px;-moz-border-radius-topleft:4px;-webkit-border-top-left-radius:4px;border-bottom-left-radius:4px;-moz-border-radius-bottomleft:4px;-webkit-border-bottom-left-radius:4px}#ae-nav .ae-nav-bold{font-weight:bold}#ae-nav ul li span.ae-nav-disabled{color:#666}#ae-nav ul ul{margin:0;padding:0 0 0 .5em}#ae-nav ul ul li{padding-left:.5em}#ae-nav ul li a,#ae-nav ul li span,#ae-nav ul ul li a{padding-left:.5em}#ae-nav li a:link,#ae-nav li a:visited{color:#00c}.ae-nav-group{padding:.5em;margin:0 .75em 0 0;background-color:#fffbe8;border:1px solid #fff1a9}.ae-nav-group h4{font-weight:bold;padding:auto auto .5em .5em;padding-left:.4em;margin-bottom:.5em;padding-bottom:0}.ae-nav-group ul{margin:0 0 .5em 0;padding:0 0 0 1.3em;list-style-type:none}.ae-nav-group ul li{padding-bottom:.5em}.ae-nav-group li a:link,.ae-nav-group li a:visited{color:#00c}.ae-nav-group li a:hover{color:#00c}@media print{body{font-size:13px;width:8.5in;background:#fff}table,.ae-table-fixed{table-layout:automatic}tr{display:table-row!important}.g-doc-1024{width:8.5in}#ae-appbar-lrg,.ae-table-caption,.ae-table-nowrap,.ae-nowrap,th,td{overflow:visible!important;white-space:normal!important;background:#fff!important}.ae-print,.ae-toggle{display:none}#ae-lhs-nav-c{display:none}#ae-content{margin:0;padding:0}.goog-zippy-collapsed,.goog-zippy-expanded{background:none!important;padding:0!important}}#ae-admin-dev-table{margin:0 0 1em 0}.ae-admin-dev-tip,.ae-admin-dev-tip.ae-tip{margin:-0.31em 0 2.77em}#ae-sms-countryselect{margin-right:.5em}#ae-admin-enable-form{margin-bottom:1em}#ae-admin-services-c{margin-top:2em}#ae-admin-services{padding:0 0 0 3em;margin-bottom:1em;font-weight:bold}#ae-admin-logs-table-c{_margin-right:-2000px;_position:relative;_width:100%;background:#fff}#ae-admin-logs-table{margin:0;padding:0}#ae-admin-logs-filters{padding:3px 0 3px 5px}#ae-admin-logs-pagination{padding:6px 5px 0 0;text-align:right;width:45%}#ae-admin-logs-pagination span.ae-disabled{color:#666;background-color:transparent}#ae-admin-logs-table td{white-space:nowrap}#ae-admin-logs-timezone{float:right;margin-bottom:7px}#ae-storage-content div.ae-alert{padding-bottom:5px}#ae-admin-performance-form input[type=text]{width:2em}.ae-admin-performance-value{font-weight:normal}.ae-admin-performance-static-value{color:#666}#ae-admin-performance-frontend-class{margin-left:0.5em}.goog-slider-horizontal,.goog-twothumbslider-horizontal{position:relative;width:502px;height:7px;display:block;outline:0;margin:1.0em 0 0.9em 3em}.ae-slider-rail:before{position:relative;top:-0.462em;float:left;content:'Min';margin:0 0 0 -3em;color:#999}.ae-slider-rail{position:absolute;background-color:#d9d9d9;top:0;right:8px;bottom:0;left:8px;border:solid 1px;border-color:#a6a6a6 #b3b3b3 #bfbfbf;border-radius:5px}.ae-slider-rail:after{position:relative;top:-0.462em;float:right;content:'Max';margin:0 -3em 0 0;color:#999}.goog-slider-horizontal .goog-slider-thumb,.goog-twothumbslider-horizontal .goog-twothumbslider-value-thumb,.goog-twothumbslider-horizontal .goog-twothumbslider-extent-thumb{position:absolute;width:17px;height:17px;background:transparent url('/img/slider_thumb-down.png') no-repeat;outline:0}.goog-slider-horizontal .goog-slider-thumb{top:-5px}.goog-twothumbslider-horizontal .goog-twothumbslider-value-thumb{top:-11px}.goog-twothumbslider-horizontal .goog-twothumbslider-extent-thumb{top:2px;background-image:url('/img/slider_thumb-up.png')}.ae-admin-performance-scale{position:relative;display:inline-block;width:502px;margin:0 0 2.7em 3em}.ae-admin-performance-scale .ae-admin-performance-scale-start{position:absolute;display:inline-block;top:0;width:100%;text-align:left}.ae-admin-performance-scale .ae-admin-performance-scale-mid{position:absolute;display:inline-block;top:0;width:100%;text-align:center}.ae-admin-performance-scale .ae-admin-performance-scale-end{position:absolute;display:inline-block;top:0;width:100%;text-align:right}.ae-pagespeed-controls{margin:0 0 1em 8px}.ae-pagespeed-controls label{display:inline;font-weight:normal}#ae-pagespeed-flush-cache{margin-left:1em}#ae-pagespeed-flush-cache-status{margin-left:1em;font-weight:bold}.ae-absolute-container{display:inline-block;width:100%}.ae-hidden-range{display:none}.ae-default-version-radio-column{width:1em}#ae-settings-builtins-change{margin-bottom:1em}#ae-memcache-full-flush-warning{color:#c00}#ae-memcache-partial-flush-warning{color:#c00}.prettyprint{background-color:#fafafa;font:1em "Droid Sans Mono",monospace;margin-right:20px}#formatted-performance-settings>div{float:left}#ae-billing-form-c{_margin-right:-3000px;_position:relative;_width:100%}.ae-rounded-top-small{-moz-border-radius-topleft:3px;-webkit-border-top-left-radius:3px;-moz-border-radius-topright:3px;-webkit-border-top-right-radius:3px}.ae-progress-content{height:400px}#ae-billing-tos{text-align:left;width:100%;margin-bottom:.5em}.ae-billing-budget-section{margin-bottom:1.5em}.ae-billing-budget-section .g-unit,.g-unit .ae-billing-budget-section .g-unit,.g-unit .g-unit .ae-billing-budget-section .g-unit{margin:0 0 0 11em;width:auto;float:none}.g-unit .g-unit .ae-billing-budget-section .g-first,.g-unit .ae-billing-budget-section .g-first,.ae-billing-budget-section .g-first{margin:0;width:11em;float:left}#ae-billing-form .ae-btn-row{margin-left:11em}#ae-billing-form .ae-btn-row .ae-info{margin-top:10px}#ae-billing-checkout{width:150px;float:left}#ae-billing-alloc-table{border:1px solid #c5d7ef;border-bottom:none;width:100%;margin-top:.5em}#ae-billing-alloc-table th,#ae-billing-alloc-table td{padding:.35em 1em .25em .35em;border-bottom:1px solid #c5d7ef;color:#000;white-space:nowrap}.ae-billing-resource{background-color:transparent;font-weight:normal}#ae-billing-alloc-table tr th span{font-weight:normal}#ae-billing-alloc-table tr{vertical-align:baseline}#ae-billing-alloc-table th{white-space:nowrap}#ae-billing-alloc-table .ae-editable span.ae-text-input-clone,#ae-billing-alloc-table .ae-readonly input{display:none}#ae-billing-alloc-table .ae-readonly span.ae-text-input-clone,#ae-billing-alloc-table .ae-editable input{display:inline}#ae-billing-alloc-table td span.ae-billing-warn-note,#ae-billing-table-errors .ae-billing-warn-note{margin:0;background-repeat:no-repeat;display:inline-block;background-image:url('/img/icn/warning.png');text-align:right;padding-left:16px;padding-right:.1em;height:16px;font-weight:bold}#ae-billing-alloc-table td span.ae-billing-warn-note span,#ae-billing-table-errors .ae-billing-warn-note span{vertical-align:super;font-size:80%}#ae-billing-alloc-table td span.ae-billing-error-hidden,#ae-billing-table-errors .ae-billing-error-hidden{display:none}.ae-billing-percent{font-size:80%;color:#666;margin-left:3px}#ae-billing-week-info{margin-top:5px;line-height:1.4}#ae-billing-table-errors{margin-top:.3em}#ae-billing-allocation-noscript{margin-top:1.5em}#ae-billing-allocation-custom-opts{margin-left:2.2em}#ae-billing-settings h2{font-size:1em;display:inline}#ae-billing-settings p{padding:.3em 0 .5em}#ae-billing-settings-table{margin:.4em 0 .5em}#ae-settings-resource-col{width:19%}#ae-settings-budget-col{width:11%}#ae-billing-settings-table .ae-settings-budget-col{padding-right:2em}.ae-table th.ae-settings-unit-cell,.ae-table td.ae-settings-unit-cell,.ae-table th.ae-total-unit-cell,.ae-table td.ae-total-unit-cell{padding-left:1.2em}#ae-settings-unit-col{width:18%}#ae-settings-paid-col{width:15%}#ae-settings-free-col{width:15%}#ae-settings-total-col{width:22%}.ae-billing-inline-link{margin-left:.5em}.ae-billing-settings-section{margin-bottom:2em}#ae-billing-settings form{display:inline-block}#ae-billing-settings .ae-btn-row{margin-top:0.5em}#ae-billing-budget-setup-checkout{margin-bottom:0}#ae-billing-vat-c .ae-field-hint{width:85%}#ae-billing-checkout-note{margin-top:.8em}.ae-drachma-preset{background-color:#f6f9ff;margin-left:11em}.ae-drachma-preset p{margin-top:.5em}.ae-table thead th.ae-currency-th{text-align:right}#ae-billing-logs-date{width:15%}#ae-billing-logs-event{width:69%}#ae-billing-logs-amount{text-align:right;width:8%}#ae-billing-logs-balance{text-align:right;width:8%}#ae-billing-history-expand .ae-action{margin-left:1em}.ae-table .ae-billing-usage-premier,.ae-table .ae-billing-usage-report{width:100%;*width:auto;margin:0 0 1em 0}.ae-table .ae-billing-usage-report th,.ae-table .ae-billing-usage-premier th,.ae-billing-charges th{color:#666;border-top:0}.ae-table .ae-billing-usage-report th,.ae-table .ae-billing-usage-report td,.ae-table .ae-billing-usage-premier th,.ae-table .ae-billing-usage-premier td,.ae-billing-charges th,.ae-billing-charges td{background-color:transparent;padding:.4em 0;border-bottom:1px solid #ddd}.ae-table .ae-billing-usage-report tfoot td,.ae-billing-charges tfoot td{border-bottom:none}table.ae-billing-usage-report col.ae-billing-report-resource{width:30%}table.ae-billing-usage-report col.ae-billing-report-used{width:20%}table.ae-billing-usage-report col.ae-billing-report-free{width:16%}table.ae-billing-usage-report col.ae-billing-report-paid{width:17%}table.ae-billing-usage-report col.ae-billing-report-charge{width:17%}table.ae-billing-usage-premier col.ae-billing-report-resource{width:50%}table.ae-billing-usage-premier col.ae-billing-report-used{width:30%}table.ae-billing-usage-premier col.ae-billing-report-unit{width:20%}.ae-billing-change-resource{width:85%}.ae-billing-change-budget{width:15%}#ae-billing-always-on-label{display:inline}#ae-billing-budget-buffer-label{display:inline}.ae-billing-charges{width:50%}.ae-billing-charges-charge{text-align:right}.ae-billing-usage-report-container{padding:1em 1em 0 1em}#ae-billing-new-usage{background-color:#f6f9ff}.goog-zippy-expanded{background-image:url('/img/wgt/minus.gif');cursor:pointer;background-repeat:no-repeat;padding-left:17px}.goog-zippy-collapsed{background-image:url('/img/wgt/plus.gif');cursor:pointer;background-repeat:no-repeat;padding-left:17px}#ae-admin-logs-pagination{width:auto}.ae-usage-cycle-note{color:#555}#ae-billing-budget-widget .g-content{margin-bottom:0.5em;margin-right:0.5em}#ae-request-billing-dialog{width:800px}#ae-manage-billing-admins-form-cancel{color:#000}.ae-grace-period-resignup-outstanding-balance{color:red;font-weight:bold}.ae-billing-admins-table{width:100%}.ae-billing-admins-table td{width:33%;word-break:break-all;padding-bottom:5px}.b3iframe{width:100%}iframe{border:none}#ae-createapp-start{background-color:#c6d5f1;padding:1em;padding-bottom:2em;text-align:center}#ae-admin-app_id_alias-check,#ae-createapp-id-check{margin:0 0 0 1em}#ae-admin-app_id_alias-message{display:block;margin:.4em 0}#ae-createapp-id-content{width:100%}#ae-createapp-id-content td{vertical-align:top}#ae-createapp-id-td{white-space:nowrap;width:1%}#ae-createapp-id-td #ae-createapp-id-error{position:absolute;width:24em;padding-left:1em;white-space:normal}#ae-createapp-id-error-td{padding-left:1em}#ae-admin-dev-invite label{float:left;width:3.6em;position:relative;top:.3em}#ae-admin-dev-invite .ae-radio{margin-left:3.6em}#ae-admin-dev-invite .ae-radio label{float:none;width:auto;font-weight:normal;position:static}#ae-admin-dev-invite .goog-button{margin-left:3.6em}#ae-admin-dev-invite .ae-field-hint{margin-left:4.2em}#ae-admin-dev-invite .ae-radio .ae-field-hint{margin-left:0}.ae-you{color:#008000}#ae-authdomain-opts{margin-bottom:1em}#ae-authdomain-content .ae-input-text,#ae-authdomain-content .ae-field-hint{margin:.3em 0 .4em 2.5em}#ae-authdomain-opts a{margin-left:1em}#ae-authdomain-opts-hint{color:#666667;font-size:.85em;margin-top:.2em}#ae-authdomain-content #ae-authdomain-desc .ae-field-hint{margin-left:0}#ae-storage-opts{margin-bottom:1em}#ae-storage-content .ae-input-text,#ae-storage-content .ae-field-hint{margin:.3em 0 .4em 2.5em}#ae-storage-opts a{margin-left:1em}#ae-storage-opts-hint{color:#666667;font-size:.85em;margin-top:.2em}#ae-storage-content #ae-storage-desc .ae-field-hint{margin-left:0}#ae-location-opts{margin-bottom:1em}#ae-location-content .ae-input-text,#ae-location-content .ae-field-hint{margin:.3em 0 .4em 2.5em}#ae-location-opts a{margin-left:1em}#ae-location-opts-hint{color:#666667;font-size:.85em;margin-top:.2em}#ae-location-content #ae-storage-desc .ae-field-hint{margin-left:0}#ae-dash .g-section{margin:0 0 1em}#ae-dash * .g-section{margin:0}#ae-dash-quota .ae-alert{padding-left:1.5em}.ae-dash-email-disabled{background:url('/img/icn/exclamation_circle.png') no-repeat;margin-bottom:.5em;margin-top:.5em;min-height:16px;padding-left:1.5em}#ae-dash-email-disabled-footnote{font-weight:normal;margin:5px 0 0;padding-left:1.5em}#ae-dash-graph-c{border:1px solid #c5d7ef;padding:5px 0}#ae-dash-graph-change{margin:0 0 0 5px}#ae-dash-graph-img{background-color:#fff;display:block;margin-top:.5em;padding:5px}#ae-dash-graph-nodata{text-align:center}#ae-dash .ae-logs-severity{margin-right:.5em}#ae-dash .g-c{padding:0 0 0 .1em}#ae-dash .g-tpl-50-50 .g-unit .g-c{padding:0 0 0 1em}#ae-dash .g-tpl-50-50 .g-first .g-c{padding:0 1em 0 .1em}.ae-quota-warnings{background-color:#fffbe8;margin:0;padding:.5em .5em 0;text-align:left}.ae-quota-warnings div{padding:0 0 .5em}#ae-dash-quota-refresh-info{font-size:85%}#ae-dash #ae-dash-dollar-bucket-c #ae-dash-dollar-bucket{width:100%;float:none}#ae-dash #ae-dash-quota-bar-col,#ae-dash .ae-dash-quota-bar{width:100px}#ae-dash-quotadetails #ae-dash-quota-bar-col,#ae-dash-quotadetails .ae-dash-quota-bar{width:200px}#ae-dash-quota-percent-col{width:3.5em}#ae-dash-quota-cost-col{width:15%}#ae-dash-quota-alert-col{width:3.5em}#ae-dash .ae-dash-quota-alert-td{padding:0}.ae-dash-quota-alert-td a{display:block;width:16px;height:16px}#ae-dash .ae-dash-quota-alert-td .ae-alert{display:block;width:16px;height:16px;margin:0;padding:0}#ae-dash .ae-dash-quota-alert-td .ae-dash-email-disabled{display:block;width:16px;height:16px;margin:0;padding:0}#ae-dash-quota tbody th{font-weight:normal}#ae-dash-quota caption{padding:0}#ae-dash-quota caption .g-c{padding:3px}.ae-dash-quota-bar{float:left;background-color:#c0c0c0;height:13px;margin:.1em 0 0 0;position:relative}.ae-dash-quota-footnote{margin:5px 0 0;font-weight:normal}.ae-quota-warning{background-color:#f90}.ae-quota-alert{background-color:#c00}.ae-quota-normal{background-color:#0b0}.ae-quota-alert-text{color:#c00}.ae-favicon-text{font-size:.85em}#ae-dash-popular{width:97%}#ae-dash-popular-reqsec-col{width:6.5em}#ae-dash-popular-req-col{width:7em}#ae-dash-popular-mcycles-col{width:9.5em}#ae-dash-popular-latency-col{width:7em}#ae-dash-popular .ae-unimportant{font-size:80%}#ae-dash-popular .ae-nowrap,#ae-dash-errors .ae-nowrap{margin-right:5px;overflow:hidden}#ae-dash-popular th span,#ae-dash-errors th span{font-size:.8em;font-weight:normal;display:block}#ae-dash-errors caption .g-unit{width:9em}#ae-dash-errors-count-col{width:5em}#ae-dash-errors-percent-col{width:7em}#ae-dash-graph-chart-type{float:left;margin-right:1em}#ae-apps-all strong.ae-disabled{color:#000;background:#eee}.ae-quota-resource{width:30%}.ae-quota-safety-limit{width:10%}#ae-quota-details h3{padding-bottom:0;margin-bottom:.25em}#ae-quota-details table{margin-bottom:1.75em}#ae-quota-details table.ae-quota-requests{margin-bottom:.5em}#ae-quota-refresh-note p{text-align:right;padding-top:.5em;padding-bottom:0;margin-bottom:0}#ae-quota-first-api.g-section{padding-bottom:0;margin-bottom:.25em}#ae-instances-summary-table,#ae-instances-details-table{margin-bottom:1em}.ae-instances-details-availability-image{float:left;margin-right:.5em}.ae-instances-vm-health-image{float:left;margin-right:.5em}.ae-instances-agent-warning-image{float:right}.ae-instances-small-text{font-size:80%}.ae-instances-small-text .ae-separator{color:#666}.ae-instances-highlight td{background-color:#fff1a8}.ae-instances-release-col{width:6em}.ae-appbar-superuser-message strong{color:red}#ae-backends-table tr{vertical-align:baseline}.ae-backends-class-reminder{font-size:80%;color:#666;margin-left:3px}#ae-appbar-engines,#ae-appbar-versions{display:inline}#ae-update{background-color:#fffbe8;border:1px solid}.ac-renderer{font:normal 13px Arial,sans-serif;position:absolute;background:#fff;border:1px solid #666;-moz-box-shadow:2px 2px 2px rgba(102,102,102,.4);-webkit-box-shadow:2px 2px 2px rgba(102,102,102,.4);width:202px}.ac-row{cursor:pointer;padding:.4em}.ac-highlighted{font-weight:bold}.ac-active{background-color:#b2b4bf}#ae-datastore-explorer-c{_margin-right:-3000px;_position:relative;_width:100%}#ae-datastore-explorer form dt{margin:1em 0 0 0}#ae-datastore-explorer #ae-datastore-explorer-labels{margin:0 0 3px}#ae-datastore-explorer-header .ae-action{margin-left:1em}#ae-datastore-explorer .id{white-space:nowrap}#ae-datastore-explorer caption{text-align:right;padding:5px}#ae-datastore-explorer-submit{margin-top:5px}#ae-datastore-explorer-namespace{margin-top:7px;margin-right:5px}#ae-datastore-stats-namespace-input,#ae-datastore-explorer-namespace-query,#ae-datastore-explorer-namespace-create{width:200px}#ae-datastore-explorer-gql-spacer{margin-top:22px}h4 #ae-datastore-explorer-gql-label{font-weight:normal}#ae-datastore-form em{font-style:normal;font-weight:normal;margin:0 0 0 .2em;color:#666}#ae-datastore-form dt{font-weight:bold}#ae-datastore-form dd{margin:.4em 0 .3em 1.5em;overflow:auto;zoom:1}#ae-datastore-form dd em{width:4em;float:left}#ae-datastore-form dd.ae-last{margin-bottom:1em}#ae-datastore-explorer-tabs-content{margin-bottom:1em}#ae-datastore-explorer-list .ae-label-row,#ae-datastore-explorer-new .ae-label-row{float:left;padding-top:.2em}#ae-datastore-explorer-list .ae-input-row,#ae-datastore-explorer-list .ae-btn-row,#ae-datastore-explorer-new .ae-input-row,#ae-datastore-explorer-new .ae-btn-row{margin-left:6em}#ae-datastore-explorer-list .ae-btn-row,#ae-datastore-explorer-new .ae-btn-row{margin-bottom:0}.ae-datastore-index-name{font-size:1.2em;font-weight:bold}.ae-table .ae-datastore-index-defs{padding-left:20px}.ae-datastore-index-defs-row{border-top:1px solid #ddd}.ae-datastore-index-defs .ae-unimportant{font-size:.8em}.ae-datastore-index-status{border:1px solid #c0dfbf;background:#f3f7f3;margin:0 25px 0 0;padding:3px}#ae-datastore-index-status-col{width:20%}#ae-datastore-index-stat-col{width:20%}.ae-datastore-index-status-Building{border-color:#edebcd;background:#fefdec}.ae-datastore-index-status-Deleting{border-color:#ccc;background:#eee}.ae-datastore-index-status-Error{border-color:#ffd3b4;background:#ffeae0}.ae-datastore-pathlink{font-size:.9em}#ae-datastore-explorer-max-datastore-viewer-columns-form{float:right}#ae-datastore-explorer-max-datastore-viewer-columns-hint{position:absolute;width:14em;visibility:hidden;box-shadow:0 15px 50px #777;z-index:2}#ae-datastore-stats-top-level-c{padding-bottom:1em;margin-bottom:1em;border-bottom:1px solid #e5ecf9}#ae-datastore-stats-top-level{width:100%}#ae-datastore-stats-piecharts-c{margin-bottom:1em}.ae-datastore-stats-piechart-label{font-size:.85em;font-weight:normal;text-align:center;padding:0}#ae-datastore-stats-property-type{width:60%}#ae-datastore-stats-size-all{width:20%}#ae-datastore-stats-index-size-all{width:20%}#ae-datastore-stats-property-name{width:40%}#ae-datastore-stats-type{width:10%}#ae-datastore-stats-size-entity{width:15%}#ae-datastore-stats-index-size-entity{width:15%}#ae-datastore-stats-percentage-size-entity{width:20%}#ae-datastore-blob-filter-form{margin-bottom:1em}#ae-datastore-blob-query-filter-label{padding-right:.5em}#ae-datastore-blob-filter-contents{padding-top:.5em}#ae-datastore-blob-date-after,#ae-datastore-blob-date-before{float:left}#ae-datastore-blob-date-after{margin-right:1em}#ae-datastore-blob-order label{font-weight:normal}#ae-datastore-blob-col-check{width:2%}#ae-datastore-blob-col-file{width:45%}#ae-datastore-blob-col-type{width:14%}#ae-datastore-blob-col-size{width:16%}#ae-blobstore-col-date{width:18%}#ae-blob-detail-filename{padding-bottom:0}#ae-blob-detail-filename span{font-weight:normal}#ae-blob-detail-key{font-size:85%}#ae-blob-detail-preview{margin-top:1em}#ae-blob-detail-dl{text-align:right}.ae-deployment-add-labels{padding:0 5px 0 20px}.ae-deployment-button-cell{width:95px}#ae-deployment-dm-dialog{width:400px}.ae-deployment-dm-selector{margin:20px 2px 20px 5px}#ae-deployment-exp-add{margin-top:5px}#ae-deployment-exp-contents{margin-top:5px;overflow:hidden}#ae-deployment-exp-desc{margin-bottom:15px}#ae-deployment-exp-div{background-color:#e5ecf9;border:1px solid #c5d7ef;margin:20px 0;padding:7px 4px}#ae-deployment-exp-hdr{font-weight:bold;margin:5px 0 5px}#ae-deployment-exp-tbl{width:400px}#ae-deployment-exp-toggle{font-weight:bold}.ae-deployment-set-button{width:22px}.ae-deployment-traffic-input{width:30px}.ae-deployment-change-state-form{display:inline}.ae-deployment-change-state-submit{background:transparent;text-decoration:underline;border:none;outline:none;cursor:pointer;color:#00c;padding:0 0 0 .1em}#ae-domain-admins-list li{margin-bottom:.3em}#ae-domain-admins-list button{margin-left:.5em}#ae-new-app-dialog-c{width:500px}#ae-new-app-dialog-c .g-section{margin-bottom:1em}p.light-note{color:#555}.ae-bottom-message{margin-top:1em}#domsettings-form div.ae-radio{margin-left:1.7em}#domsettings-form div.ae-radio input{margin-left:-1.47em;float:left}#ae-logs-c{_margin-right:-2000px;_position:relative;_width:100%;background:#fff}#ae-logs{background-color:#c5d7ef;padding:1px;line-height:1.65}#ae-logs .ae-table-caption{border:0}#ae-logs-c ol,#ae-logs-c li{list-style:none;padding:0;margin:0}#ae-logs-c li li{margin:0 0 0 3px;padding:0 0 0 17px}.ae-log-noerror{padding-left:23px}#ae-logs-form .goog-inline-block{margin-top:0}.ae-logs-usage-info{padding-left:.5em}.ae-logs-reqlog .snippet{margin:.1em}.ae-logs-applog .snippet{color:#666}.ae-logs-severity{display:block;float:left;height:1.2em;width:1.2em;line-height:1.2;text-align:center;text-transform:capitalize;font-weight:bold;border-radius:2px;-moz-border-radius:2px;-webkit-border-radius:2px}.ae-logs-severity-4{background-color:#f22;color:#000}.ae-logs-severity-3{background-color:#f90;color:#000}.ae-logs-severity-2{background-color:#fd0}.ae-logs-severity-1{background-color:#3c0;color:#000}.ae-logs-severity-0{background-color:#09f;color:#000}#ae-logs-legend{margin:1em 0 0 0}#ae-logs-legend ul{list-style:none;margin:0;padding:0}#ae-logs-legend li,#ae-logs-legend strong{float:left;margin:0 1em 0 0}#ae-logs-legend li span{margin-right:.3em}.ae-logs-timestamp{padding:0 5px;font-size:85%}#ae-logs-form-c{margin-bottom:5px;padding-bottom:.5em;padding-left:1em}#ae-logs-form{padding:.3em 0 0}#ae-logs-form .ae-label-row{float:left;padding-top:.2em;margin-right:0.539em}#ae-logs-form .ae-input-row,#ae-logs-form .ae-btn-row{margin-left:4em}#ae-logs-form .ae-btn-row{margin-bottom:0}#ae-logs-requests-c{margin-bottom:.1em}#ae-logs-requests-c input{margin:0}#ae-logs-requests-all-label{margin-right:0.539em}#ae-logs-form-options{margin-top:8px}#ae-logs-tip{margin:.2em 0}#ae-logs-expand{margin-right:.2em}#ae-logs-severity-level-label{margin-top:.3em;display:block}#ae-logs-filter-hint-labels-list{margin:2px 0}#ae-logs-filter-hint-labels-list span{position:absolute}#ae-logs-filter-hint-labels-list ul{margin-left:5.5em;padding:0}#ae-logs-filter-hint-labels-list li{float:left;margin-right:.4em;line-height:1.2}.ae-toggle .ae-logs-getdetails,.ae-toggle pre{display:none}.ae-log-expanded .ae-toggle pre{display:block}#ae-logs-c .ae-log .ae-toggle{cursor:default;background:none;padding-left:0}#ae-logs-c .ae-log .ae-toggle h5{cursor:pointer;background-position:0 .55em;background-repeat:no-repeat;padding-left:17px}.ae-log .ae-plus h5{background-image:url('/img/wgt/plus.gif')}.ae-log .ae-minus h5{background-image:url('/img/wgt/minus.gif')}.ae-log{overflow:hidden;background-color:#fff;padding:.3em 0;line-height:1.65;border-bottom:1px solid #c5d7ef}.ae-log .ae-even{background-color:#e9e9e9;border:0}.ae-log h5{font-weight:normal;white-space:nowrap;padding:.4em 0 0 0}.ae-log span,.ae-log strong{margin:0 .3em}.ae-log .ae-logs-snippet{color:#666}.ae-log pre,.ae-logs-expanded{padding:.3em 0 .5em 1.5em;margin:0;font-family:"Courier New"}.ae-log .file{font-weight:bold}.ae-log.ae-log-expanded .file{white-space:pre-wrap;word-wrap:break-word}.ae-logs-app .ae-logs-req{display:none}.ae-logs-req .ae-app,.ae-logs-both .ae-app{padding-left:1em}#ae-dos-blacklist-rejects-table{text-align:left}#ae-dash-quota-percent-col{width:3.5em}.ae-cron-status-ok{color:#008000;font-size:90%;font-weight:bold}.ae-cron-status-error{color:#a03;font-size:90%;font-weight:bold}#ae-cronjobs-table .ae-table td{vertical-align:top}#ae-tasks-table td{vertical-align:top}#ae-tasks-quota{margin:0 0 1em 0}#ae-tasks-quota .ae-dash-quota-bar{width:150px}#ae-tasks-quota #ae-dash-quota-bar-col,#ae-tasks-quota .ae-dash-quota-bar{width:200px}.ae-tasks-paused-row{color:#666;font-style:italic;font-weight:bold}#ae-tasks-quota .ae-quota-safety-limit{width:30%}#ae-tasks-table{margin-top:1em}#ae-tasks-queuecontrols{margin-top:1em;margin-bottom:1em}#ae-tasks-delete-col{width:1em}#ae-tasks-eta-col,#ae-tasks-creation-col{width:11em}#ae-tasks-actions-col{width:7em}#ae-tasks-retry-col{width:4em}#ae-tasks-execution-col{width:6em}#ae-tasks-body-col{width:6em}#ae-tasks-headers-col{width:7em}.ae-tasks-hex-column,.ae-tasks-ascii-column{width:16em}#ae-tasks-table .ae-tasks-arrow{text-align:center}.str{color:#080}.kwd{color:#008}.com{color:#800}.typ{color:#606}.lit{color:#066}.pun,.opn,.clo{color:#660}.pln{color:#000}.tag{color:#008}.atn{color:#606}.atv{color:#080}.dec{color:#606}@media print{.str{color:#060}.kwd{color:#006;font-weight:bold}.com{color:#600;font-style:italic}.typ{color:#404;font-weight:bold}.lit{color:#044}.pun,.opn,.clo{color:#440}.pln{color:#000}.tag{color:#006;font-weight:bold}.atn{color:#404}.atv{color:#060}}pre.prettyprint{padding:2px;border:1px solid #888}ol.linenums{margin-top:0;margin-bottom:0}li.L0,li.L1,li.L2,li.L3,li.L5,li.L6,li.L7,li.L8{list-style-type:none}li.L1,li.L3,li.L5,li.L7,li.L9{background:#eee} \ No newline at end of file diff --git a/python/google/appengine/ext/datastore_admin/static/js/compiled.js b/python/google/appengine/ext/datastore_admin/static/js/compiled.js dissimilarity index 95% index 935130fd..5e7640f3 100644 --- a/python/google/appengine/ext/datastore_admin/static/js/compiled.js +++ b/python/google/appengine/ext/datastore_admin/static/js/compiled.js @@ -1,18 +1,19 @@ -var h=void 0,k=!0,m=null,p=!1,r=document,s=Array,t=Error,u=parseInt,w=String;function aa(a,b){return a.currentTarget=b}function ba(a,b){return a.keyCode=b}function ca(a,b){return a.length=b}function x(a,b){return a.disabled=b} -var y="push",z="slice",A="replace",B="value",da="preventDefault",C="indexOf",D="keyCode",F="handleEvent",G="type",ea="name",H="length",fa="propertyIsEnumerable",I="prototype",J="split",ga="target",K="call",L,M=this,ha=function(a){var b=typeof a;if("object"==b)if(a){if(a instanceof s)return"array";if(a instanceof Object)return b;var d=Object[I].toString[K](a);if("[object Window]"==d)return"object";if("[object Array]"==d||"number"==typeof a[H]&&"undefined"!=typeof a.splice&&"undefined"!=typeof a[fa]&& -!a[fa]("splice"))return"array";if("[object Function]"==d||"undefined"!=typeof a[K]&&"undefined"!=typeof a[fa]&&!a[fa]("call"))return"function"}else return"null";else if("function"==b&&"undefined"==typeof a[K])return"object";return b},ia=function(a){var b=ha(a);return"array"==b||"object"==b&&"number"==typeof a[H]},N=function(a){return"string"==typeof a},ja=function(a){return"function"==ha(a)},ka=function(a){var b=typeof a;return"object"==b&&a!=m||"function"==b},O="closure_uid_"+(1E9*Math.random()>>> -0),la=0,ma=function(a,b){function d(){}d.prototype=b[I];a.u=b[I];a.prototype=new d};var P=function(a){t.captureStackTrace?t.captureStackTrace(this,P):this.stack=t().stack||"";a&&(this.message=w(a))};ma(P,t);P[I].name="CustomError";var na=function(a,b){for(var d=1;d")&&(a=a[A](qa,">"));-1!=a[C]('"')&&(a=a[A](ra,"""));return a},oa=/&/g,pa=//g,ra=/\"/g,sa=/[&<>\"]/;var ua=function(a,b){b.unshift(a);P[K](this,na.apply(m,b));b.shift()};ma(ua,P);ua[I].name="AssertionError";var va=function(a,b,d){if(!a){var c=s[I][z][K](arguments,2),f="Assertion failed";if(b)var f=f+(": "+b),e=c;throw new ua(""+f,e||[]);}return a};var R=s[I],wa=R[C]?function(a,b,d){va(a[H]!=m);return R[C][K](a,b,d)}:function(a,b,d){d=d==m?0:0>d?Math.max(0,a[H]+d):d;if(N(a))return!N(b)||1!=b[H]?-1:a[C](b,d);for(;d=arguments[H]? -R[z][K](a,b):R[z][K](a,b,d)};var Aa=function(a,b,d){for(var c in a)b[K](d,a[c],c,a)},Ba="constructor hasOwnProperty isPrototypeOf propertyIsEnumerable toLocaleString toString valueOf".split(" "),Ca=function(a,b){for(var d,c,f=1;fparseFloat(Ma)){La=w(Qa);break a}}La=Ma} -var Ra=La,Sa={},W=function(a){var b;if(!(b=Sa[a])){b=0;for(var d=w(Ra)[A](/^[\s\xa0]+|[\s\xa0]+$/g,"")[J]("."),c=w(a)[A](/^[\s\xa0]+|[\s\xa0]+$/g,"")[J]("."),f=Math.max(d[H],c[H]),e=0;0==b&&e(0==v[1][H]?0:u(v[1],10))?1:0)||((0==q[2][H])<(0== -v[2][H])?-1:(0==q[2][H])>(0==v[2][H])?1:0)||(q[2]v[2]?1:0)}while(0==b)}b=Sa[a]=0<=b}return b},Ta=M.document,Ua=!Ta||!T?h:Ka()||("CSS1Compat"==Ta.compatMode?u(Ra,10):5);var Va=!T||T&&9<=Ua;!U&&!T||T&&T&&9<=Ua||U&&W("1.9.1");T&&W("9");var Wa=function(a,b){var d;d=a.className;d=N(d)&&d.match(/\S+/g)||[];for(var c=za(arguments,1),f=d[H]+c[H],e=d,g=0;g");f=f.join("")}f= -r.createElement(f);e&&(N(e)?f.className=e:"array"==ha(e)?Wa.apply(m,[f].concat(e)):$a(f,e));2=a[D])&&ba(a,-1)}catch(b){}};var ib="closure_listenable_"+(1E6*Math.random()|0),jb=0;var kb=function(){};L=kb[I];L.key=0;L.c=p;L.h=p;L.i=function(a,b,d,c,f,e){if(ja(a))this.q=k;else if(a&&a[F]&&ja(a[F]))this.q=p;else throw t("Invalid listener argument");this.d=a;this.o=b;this.src=d;this.type=c;this.capture=!!f;this.n=e;this.h=p;this.key=++jb;this.c=p};L.handleEvent=function(a){return this.q?this.d[K](this.n||this.src,a):this.d[F][K](this.d,a)};var lb={},X={},Y={},Z={},mb=function(a,b,d,c,f){if("array"==ha(b)){for(var e=0;ef[D]||f.returnValue!=h)return k;a:{var l=p;if(0==f[D])try{ba(f,-1);break a}catch(Q){l=k}if(l||f.returnValue==h)f.returnValue= -k}}l=new hb;l.i(f,this);f=k;try{if(g){for(var q=[],v=l.currentTarget;v;v=v.parentNode)q[y](v);e=c[k];e.b=e.a;for(var E=q[H]-1;!l.m&&0<=E&&e.b;E--)aa(l,q[E]),f&=sb(e,q[E],d,k,l);if(n){e=c[p];e.b=e.a;for(E=0;!l.m&&E>>0),nb=function(a){return ja(a)?a:a[tb]||(a[tb]=function(b){return a[F](b)})};var ub=function(a,b){var d=[];1>>0),ja=0,ka=function(a,b){function c(){}c.prototype=b[H];a.u=b[H];a.prototype= +new c};var P=function(a){t.captureStackTrace?t.captureStackTrace(this,P):this.stack=t().stack||"";a&&(this.message=w(a))};ka(P,t);P[H].name="CustomError";var la=function(a,b){for(var c=1;c")&&(a=a[A](oa,">"));-1!=a[C]('"')&&(a=a[A](pa,"""));return a},ma=/&/g,na=//g,pa=/\"/g,qa=/[&<>\"]/;var sa=function(a,b){b.unshift(a);P[J](this,la.apply(n,b));b.shift()};ka(sa,P);sa[H].name="AssertionError";var R=function(a,b,c){if(!a){var e=s[H][z][J](arguments,2),f="Assertion failed";if(b)var f=f+(": "+b),d=e;throw new sa(""+f,d||[]);}return a};var S=s[H],ta=S[C]?function(a,b,c){R(a[G]!=n);return S[C][J](a,b,c)}:function(a,b,c){c=c==n?0:0>c?Math.max(0,a[G]+c):c;if(N(a))return!N(b)||1!=b[G]?-1:a[C](b,c);for(;c=arguments[G]? +S[z][J](a,b):S[z][J](a,b,c)};var xa=function(a,b,c){for(var e in a)b[J](c,a[e],e,a)},ya="constructor hasOwnProperty isPrototypeOf propertyIsEnumerable toLocaleString toString valueOf".split(" "),za=function(a,b){for(var c,e,f=1;fparseFloat(Ka)){Ja=w(Oa);break e}}Ja=Ka} +var Pa=Ja,Qa={},W=function(a){var b;if(!(b=Qa[a])){b=0;for(var c=w(Pa)[A](/^[\s\xa0]+|[\s\xa0]+$/g,"")[I]("."),e=w(a)[A](/^[\s\xa0]+|[\s\xa0]+$/g,"")[I]("."),f=Math.max(c[G],e[G]),d=0;0==b&&d(0==v[1][G]?0:u(v[1],10))?1:0)||((0==q[2][G])<(0== +v[2][G])?-1:(0==q[2][G])>(0==v[2][G])?1:0)||(q[2]v[2]?1:0)}while(0==b)}b=Qa[a]=0<=b}return b},Ra=L.document,Sa=!Ra||!U?h:Ia()||("CSS1Compat"==Ra.compatMode?u(Pa,10):5);var Ta=!U||U&&9<=Sa;!V&&!U||U&&U&&9<=Sa||V&&W("1.9.1");U&&W("9");var Ua=function(a,b){var c;c=a.className;c=N(c)&&c.match(/\S+/g)||[];for(var e=wa(arguments,1),f=c[G]+e[G],d=c,g=0;g");f=f.join("")}f=r.createElement(f);d&&(N(d)?f.className=d:"array"==M(d)?Ua.apply(n,[f].concat(d)):Ya(f,d));2=a[D])&&ba(a,-1)}catch(b){}};var gb="closure_listenable_"+(1E6*Math.random()|0),hb=0;var ib=function(a,b,c,e,f,d){this.e=a;this.o=b;this.src=c;this.type=e;this.capture=!!f;this.k=d;this.key=++hb;this.d=this.l=p};ib[H].r=function(){this.d=l;this.k=this.src=this.o=this.e=n};var jb={},X={},Y={},Z={},kb=function(a,b,c,e,f){if("array"==M(b)){for(var d=0;df[D]||f.returnValue!=h)return l;e:{var k=p;if(0==f[D])try{ba(f,-1);break e}catch(Q){k=l}if(k||f.returnValue==h)f.returnValue=l}}k=new fb;k.p(f,this);f=l;try{if(g){for(var q= +[],v=k.currentTarget;v;v=v.parentNode)q[y](v);d=e[l];d.b=d.a;for(var E=q[G]-1;!k.m&&0<=E&&d.b;E--)aa(k,q[E]),f&=qb(d,q[E],c,l,k);if(m){d=e[p];d.b=d.a;for(E=0;!k.m&&E>>0),lb=function(a){R(a,"Listener can not be null.");if("function"==M(a))return a;R(a.handleEvent,"An object listener must have handleEvent method.");return a[rb]||(a[rb]= +function(b){return a.handleEvent(b)})};var sb=function(a,b){var c=[];1= RESERVE_KEY_POOL_MAX_SIZE: + self.flush() + return def flush(self): - for namespace, path_to_max_id in self.ns_to_path_to_max_id.iteritems(): - for path, max_id in path_to_max_id.iteritems(): - datastore.AllocateIds(db.Key.from_path(namespace=namespace, - _app=self.app_id, - *list(path)), - max=max_id) - self.ns_to_path_to_max_id = collections.defaultdict(dict) + + datastore._GetConnection()._reserve_keys(self.keys) + self.keys = [] -class AllocateMaxId(operation.Operation): - """Mapper operation to allocate max id.""" +class ReserveKey(operation.Operation): + """Mapper operation to reserve key ids.""" def __init__(self, key, app_id): self.key = key self.app_id = app_id - self.pool_id = 'allocate_max_id_%s_pool' % self.app_id + self.pool_id = 'reserve_key_%s_pool' % self.app_id def __call__(self, ctx): pool = ctx.get_pool(self.pool_id) if not pool: - pool = AllocateMaxIdPool(self.app_id) + pool = ReserveKeyPool() ctx.register_pool(self.pool_id, pool) - pool.allocate_max_id(self.key) + pool.reserve_key(self.key) diff --git a/python/google/appengine/ext/db/__init__.py b/python/google/appengine/ext/db/__init__.py index 8d253971..7ce16ffc 100644 --- a/python/google/appengine/ext/db/__init__.py +++ b/python/google/appengine/ext/db/__init__.py @@ -3045,7 +3045,7 @@ class DateTimeProperty(Property): Returns: 'now' as a whole timestamp, including both time and date. """ - return datetime.datetime.now() + return datetime.datetime.utcnow() def _date_to_datetime(value): @@ -3093,7 +3093,7 @@ class DateProperty(DateTimeProperty): Returns: 'date' part of 'now' only. """ - return datetime.datetime.now().date() + return datetime.datetime.utcnow().date() def validate(self, value): """Validate date. @@ -3170,7 +3170,7 @@ class TimeProperty(DateTimeProperty): Returns: 'time' part of 'now' only. """ - return datetime.datetime.now().time() + return datetime.datetime.utcnow().time() def empty(self, value): """Is time property empty. diff --git a/python/google/appengine/ext/endpoints/api_backend_service.py b/python/google/appengine/ext/endpoints/api_backend_service.py index 05e7b071..7846add0 100644 --- a/python/google/appengine/ext/endpoints/api_backend_service.py +++ b/python/google/appengine/ext/endpoints/api_backend_service.py @@ -63,9 +63,7 @@ class ApiConfigRegistry(object): if config_contents is None: return parsed_config = json.loads(config_contents) - if not self.__register_class(parsed_config): - return - + self.__register_class(parsed_config) self.__api_configs.add(config_contents) self.__register_methods(parsed_config) @@ -75,38 +73,26 @@ class ApiConfigRegistry(object): Args: parsed_config: The JSON object with the API configuration being added. - Returns: - True if the class has been registered and it's fine to add this - configuration. False if this configuration shouldn't be added. + Raises: + ApiConfigurationError: If the class has already been registered. """ methods = parsed_config.get('methods') if not methods: - return True - - + return - service_class = None + service_classes = set() for method in methods.itervalues(): rosy_method = method.get('rosyMethod') if rosy_method and '.' in rosy_method: method_class = rosy_method.split('.', 1)[0] - if service_class is None: - service_class = method_class - elif service_class != method_class: - raise api_config.ApiConfigurationError( - 'SPI registered with multiple classes within one ' - 'configuration (%s and %s). Each call to register_spi should ' - 'only contain the methods from a single class. Call ' - 'repeatedly for multiple classes.' % (service_class, - method_class)) - - if service_class is not None: - if service_class in self.__registered_classes: + service_classes.add(method_class) - return False + for service_class in service_classes: + if service_class in self.__registered_classes: + raise api_config.ApiConfigurationError( + 'SPI class %s has already been registered.' % service_class) self.__registered_classes.add(service_class) - return True def __register_methods(self, parsed_config): """Register all methods from the given api config file. diff --git a/python/google/appengine/ext/endpoints/api_config.py b/python/google/appengine/ext/endpoints/api_config.py index 391d025a..5700db4f 100644 --- a/python/google/appengine/ext/endpoints/api_config.py +++ b/python/google/appengine/ext/endpoints/api_config.py @@ -47,7 +47,13 @@ from protorpc import messages from protorpc import remote from protorpc import util -from google.appengine.api import app_identity +try: + + from google.appengine.api import app_identity +except ImportError: + + from google.appengine.api import app_identity + from google.appengine.ext.endpoints import message_parser from google.appengine.ext.endpoints import users_id_token @@ -57,6 +63,8 @@ __all__ = [ 'ApiAuth', 'ApiConfigGenerator', 'ApiConfigurationError', + 'ApiFrontEndLimitRule', + 'ApiFrontEndLimits', 'CacheControl', 'EMAIL_SCOPE', 'api', @@ -226,6 +234,26 @@ class _ApiInfo(object): return self.__common_info.auth @property + def owner_domain(self): + """Domain of the owner of this API.""" + return self.__common_info.owner_domain + + @property + def owner_name(self): + """Name of the owner of this API.""" + return self.__common_info.owner_name + + @property + def package_path(self): + """Package this API belongs to, '/' delimited. Used by client libs.""" + return self.__common_info.package_path + + @property + def frontend_limits(self): + """Optional query limits for unregistered developers.""" + return self.__common_info.frontend_limits + + @property def resource_name(self): """Resource name for the class this decorates.""" return self.__resource_name @@ -247,7 +275,8 @@ class _ApiDecorator(object): @util.positional(3) def __init__(self, name, version, description=None, hostname=None, audiences=None, scopes=None, allowed_client_ids=None, - canonical_name=None, auth=None): + canonical_name=None, auth=None, owner_domain=None, + owner_name=None, package_path=None, frontend_limits=None): """Constructor for _ApiDecorator. Args: @@ -262,12 +291,25 @@ class _ApiDecorator(object): readable version of the name. auth: ApiAuth instance, the authentication configuration information for this API. + owner_domain: string, the domain of the person or company that owns + this API. Along with owner_name, this provides hints to properly + name client libraries for this API. + owner_name: string, the name of the owner of this API. Along with + owner_domain, this provides hints to properly name client libraries + for this API. + package_path: string, the "package" this API belongs to. This '/' + delimited value specifies logical groupings of APIs. This is used by + client libraries of this API. + frontend_limits: ApiFrontEndLimits, optional query limits for unregistered + developers. """ self.__common_info = self.__ApiCommonInfo( name, version, description=description, hostname=hostname, audiences=audiences, scopes=scopes, allowed_client_ids=allowed_client_ids, - canonical_name=canonical_name, auth=auth) + canonical_name=canonical_name, auth=auth, owner_domain=owner_domain, + owner_name=owner_name, package_path=package_path, + frontend_limits=frontend_limits) class __ApiCommonInfo(object): """API information that's common among all classes that implement an API. @@ -288,7 +330,8 @@ class _ApiDecorator(object): @util.positional(3) def __init__(self, name, version, description=None, hostname=None, audiences=None, scopes=None, allowed_client_ids=None, - canonical_name=None, auth=None): + canonical_name=None, auth=None, owner_domain=None, + owner_name=None, package_path=None, frontend_limits=None): """Constructor for _ApiCommonInfo. Args: @@ -303,6 +346,17 @@ class _ApiDecorator(object): readable version of the name. auth: ApiAuth instance, the authentication configuration information for this API. + owner_domain: string, the domain of the person or company that owns + this API. Along with owner_name, this provides hints to properly + name client libraries for this API. + owner_name: string, the name of the owner of this API. Along with + owner_domain, this provides hints to properly name client libraries + for this API. + package_path: string, the "package" this API belongs to. This '/' + delimited value specifies logical groupings of APIs. This is used by + client libraries of this API. + frontend_limits: ApiFrontEndLimits, optional query limits for + unregistered developers. """ _CheckType(name, basestring, 'name', allow_none=False) _CheckType(version, basestring, 'version', allow_none=False) @@ -313,6 +367,10 @@ class _ApiDecorator(object): _CheckListType(allowed_client_ids, basestring, 'allowed_client_ids') _CheckType(canonical_name, basestring, 'canonical_name') _CheckType(auth, ApiAuth, 'auth') + _CheckType(owner_domain, basestring, 'owner_domain') + _CheckType(owner_name, basestring, 'owner_name') + _CheckType(package_path, basestring, 'package_path') + _CheckType(frontend_limits, ApiFrontEndLimits, 'frontend_limits') if hostname is None: hostname = app_identity.get_default_version_hostname() @@ -332,6 +390,10 @@ class _ApiDecorator(object): self.__allowed_client_ids = allowed_client_ids self.__canonical_name = canonical_name self.__auth = auth + self.__owner_domain = owner_domain + self.__owner_name = owner_name + self.__package_path = package_path + self.__frontend_limits = frontend_limits @property def name(self): @@ -378,6 +440,26 @@ class _ApiDecorator(object): """Authentication configuration for this API.""" return self.__auth + @property + def owner_domain(self): + """Domain of the owner of this API.""" + return self.__owner_domain + + @property + def owner_name(self): + """Name of the owner of this API.""" + return self.__owner_name + + @property + def package_path(self): + """Package this API belongs to, '/' delimited. Used by client libs.""" + return self.__package_path + + @property + def frontend_limits(self): + """Optional query limits for unregistered developers.""" + return self.__frontend_limits + def __call__(self, api_class): """Decorator for ProtoRPC class that configures Google's API server. @@ -433,7 +515,7 @@ class _ApiDecorator(object): class ApiAuth(object): """Optional authorization configuration information for an API.""" - def __init__(self, allow_cookie_auth=None): + def __init__(self, allow_cookie_auth=None, blocked_regions=None): """Constructor for ApiAuth, authentication information for an API. Args: @@ -444,21 +526,134 @@ class ApiAuth(object): potentially dangerous results. Please be very cautious in enabling this setting, and make sure to require appropriate XSRF tokens to protect your API. + blocked_regions: list of Strings, a list of 2-letter ISO region codes + to block. """ _CheckType(allow_cookie_auth, bool, 'allow_cookie_auth') + _CheckListType(blocked_regions, basestring, 'blocked_regions') self.__allow_cookie_auth = allow_cookie_auth + self.__blocked_regions = blocked_regions @property def allow_cookie_auth(self): """Whether cookie authentication is allowed for this API.""" return self.__allow_cookie_auth + @property + def blocked_regions(self): + """List of 2-letter ISO region codes to block.""" + return self.__blocked_regions + + +class ApiFrontEndLimitRule(object): + """Custom rule to limit unregistered traffic.""" + + def __init__(self, match=None, qps=None, user_qps=None, daily=None, + analytics_id=None): + """Constructor for ApiFrontEndLimitRule. + + Args: + match: string, the matching rule that defines this traffic segment. + qps: int, the aggregate QPS for this segment. + user_qps: int, the per-end-user QPS for this segment. + daily: int, the aggregate daily maximum for this segment. + analytics_id: string, the project ID under which traffic for this segment + will be logged. + """ + _CheckType(match, basestring, 'match') + _CheckType(qps, int, 'qps') + _CheckType(user_qps, int, 'user_qps') + _CheckType(daily, int, 'daily') + _CheckType(analytics_id, basestring, 'analytics_id') + + self.__match = match + self.__qps = qps + self.__user_qps = user_qps + self.__daily = daily + self.__analytics_id = analytics_id + + @property + def match(self): + """The matching rule that defines this traffic segment.""" + return self.__match + + @property + def qps(self): + """The aggregate QPS for this segment.""" + return self.__qps + + @property + def user_qps(self): + """The per-end-user QPS for this segment.""" + return self.__user_qps + + @property + def daily(self): + """The aggregate daily maximum for this segment.""" + return self.__daily + + @property + def analytics_id(self): + """Project ID under which traffic for this segment will be logged.""" + return self.__analytics_id + + +class ApiFrontEndLimits(object): + """Optional front end limit information for an API.""" + + def __init__(self, unregistered_user_qps=None, unregistered_qps=None, + unregistered_daily=None, rules=None): + """Constructor for ApiFrontEndLimits, front end limit info for an API. + + Args: + unregistered_user_qps: int, the per-end-user QPS. Users are identified + by their IP address. A value of 0 will block unregistered requests. + unregistered_qps: int, an aggregate QPS upper-bound for all unregistered + traffic. A value of 0 currently means unlimited, though it might change + in the future. To block unregistered requests, use unregistered_user_qps + or unregistered_daily instead. + unregistered_daily: int, an aggregate daily upper-bound for all + unregistered traffic. A value of 0 will block unregistered requests. + rules: A list or tuple of ApiFrontEndLimitRule instances: custom rules + used to apply limits to unregistered traffic. + """ + _CheckType(unregistered_user_qps, int, 'unregistered_user_qps') + _CheckType(unregistered_qps, int, 'unregistered_qps') + _CheckType(unregistered_daily, int, 'unregistered_daily') + _CheckListType(rules, ApiFrontEndLimitRule, 'rules') + + self.__unregistered_user_qps = unregistered_user_qps + self.__unregistered_qps = unregistered_qps + self.__unregistered_daily = unregistered_daily + self.__rules = rules + + @property + def unregistered_user_qps(self): + """Per-end-user QPS limit.""" + return self.__unregistered_user_qps + + @property + def unregistered_qps(self): + """Aggregate QPS upper-bound for all unregistered traffic.""" + return self.__unregistered_qps + + @property + def unregistered_daily(self): + """Aggregate daily upper-bound for all unregistered traffic.""" + return self.__unregistered_daily + + @property + def rules(self): + """Custom rules used to apply limits to unregistered traffic.""" + return self.__rules + @util.positional(2) def api(name, version, description=None, hostname=None, audiences=None, scopes=None, allowed_client_ids=None, canonical_name=None, - auth=None): + auth=None, owner_domain=None, owner_name=None, package_path=None, + frontend_limits=None): """Decorate a ProtoRPC Service class for use by the framework above. This decorator can be used to specify an API name, version, description, and @@ -499,6 +694,17 @@ def api(name, version, description=None, hostname=None, audiences=None, readable version of the name. auth: ApiAuth instance, the authentication configuration information for this API. + owner_domain: string, the domain of the person or company that owns + this API. Along with owner_name, this provides hints to properly + name client libraries for this API. + owner_name: string, the name of the owner of this API. Along with + owner_domain, this provides hints to properly name client libraries + for this API. + package_path: string, the "package" this API belongs to. This '/' + delimited value specifies logical groupings of APIs. This is used by + client libraries of this API. + frontend_limits: ApiFrontEndLimits, optional query limits for unregistered + developers. Returns: Class decorated with api_info attribute, an instance of ApiInfo. @@ -507,7 +713,10 @@ def api(name, version, description=None, hostname=None, audiences=None, return _ApiDecorator(name, version, description=description, hostname=hostname, audiences=audiences, scopes=scopes, allowed_client_ids=allowed_client_ids, - canonical_name=canonical_name, auth=auth) + canonical_name=canonical_name, auth=auth, + owner_domain=owner_domain, owner_name=owner_name, + package_path=package_path, + frontend_limits=frontend_limits) class CacheControl(object): @@ -1374,9 +1583,47 @@ class ApiConfigGenerator(object): auth_descriptor = {} if api_info.auth.allow_cookie_auth is not None: auth_descriptor['allowCookieAuth'] = api_info.auth.allow_cookie_auth + if api_info.auth.blocked_regions: + auth_descriptor['blockedRegions'] = api_info.auth.blocked_regions return auth_descriptor + def __frontend_limit_descriptor(self, api_info): + if api_info.frontend_limits is None: + return None + + descriptor = {} + for propname, descname in (('unregistered_user_qps', 'unregisteredUserQps'), + ('unregistered_qps', 'unregisteredQps'), + ('unregistered_daily', 'unregisteredDaily')): + if getattr(api_info.frontend_limits, propname) is not None: + descriptor[descname] = getattr(api_info.frontend_limits, propname) + + rules = self.__frontend_limit_rules_descriptor(api_info) + if rules: + descriptor['rules'] = rules + + return descriptor + + def __frontend_limit_rules_descriptor(self, api_info): + if not api_info.frontend_limits.rules: + return None + + rules = [] + for rule in api_info.frontend_limits.rules: + descriptor = {} + for propname, descname in (('match', 'match'), + ('qps', 'qps'), + ('user_qps', 'userQps'), + ('daily', 'daily'), + ('analytics_id', 'analyticsId')): + if getattr(rule, propname) is not None: + descriptor[descname] = getattr(rule, propname) + if descriptor: + rules.append(descriptor) + + return rules + def __api_descriptor(self, services, hostname=None): """Builds a description of an API. @@ -1409,6 +1656,11 @@ class ApiConfigGenerator(object): if auth_descriptor: descriptor['auth'] = auth_descriptor + frontend_limit_descriptor = self.__frontend_limit_descriptor( + merged_api_info) + if frontend_limit_descriptor: + descriptor['frontendLimits'] = frontend_limit_descriptor + method_map = {} method_collision_tracker = {} @@ -1461,11 +1713,18 @@ class ApiConfigGenerator(object): 'abstract': False, 'adapter': { 'bns': 'https://%s/_ah/spi' % hostname, - 'type': 'lily' + 'type': 'lily', + 'deadline': 10.0 } } if api_info.canonical_name: defaults['canonicalName'] = api_info.canonical_name + if api_info.owner_domain: + defaults['ownerDomain'] = api_info.owner_domain + if api_info.owner_name: + defaults['ownerName'] = api_info.owner_name + if api_info.package_path: + defaults['packagePath'] = api_info.package_path return defaults def pretty_print_config_to_json(self, services, hostname=None): diff --git a/python/google/appengine/ext/endpoints/message_parser.py b/python/google/appengine/ext/endpoints/message_parser.py index d628b24d..8bd8cec3 100644 --- a/python/google/appengine/ext/endpoints/message_parser.py +++ b/python/google/appengine/ext/endpoints/message_parser.py @@ -59,7 +59,7 @@ class MessageTypeToJsonSchema(object): messages.Variant.DOUBLE: ('number', 'double'), None: ('number', 'float')}, messages.BooleanField: ('boolean', None), - messages.BytesField: ('string', None), + messages.BytesField: ('string', 'byte'), message_types.DateTimeField: ('string', 'date-time'), messages.StringField: ('string', None), messages.MessageField: ('object', None), diff --git a/python/google/appengine/ext/endpoints/users_id_token.py b/python/google/appengine/ext/endpoints/users_id_token.py index d524e954..d94416f3 100644 --- a/python/google/appengine/ext/endpoints/users_id_token.py +++ b/python/google/appengine/ext/endpoints/users_id_token.py @@ -37,10 +37,18 @@ import urllib import google -from google.appengine.api import memcache -from google.appengine.api import oauth -from google.appengine.api import urlfetch -from google.appengine.api import users +try: + + from google.appengine.api import memcache + from google.appengine.api import oauth + from google.appengine.api import urlfetch + from google.appengine.api import users +except ImportError: + + from google.appengine.api import memcache + from google.appengine.api import oauth + from google.appengine.api import urlfetch + from google.appengine.api import users try: @@ -201,12 +209,10 @@ def _maybe_set_current_user_vars(method, api_info=None, request=None): if scopes: logging.info('Checking for oauth token.') - result = urlfetch.fetch( - '%s?%s' % (_TOKENINFO_URL, urllib.urlencode({'access_token': token}))) - if result.status_code == 200: - token_info = json.loads(result.content) - _set_oauth_user_vars(token_info, audiences, allowed_client_ids, - scopes, _is_local_dev()) + if _is_local_dev(): + _set_bearer_user_vars_local(token, allowed_client_ids, scopes) + else: + _set_bearer_user_vars(allowed_client_ids, scopes) def _get_token(request): @@ -282,85 +288,96 @@ def _get_id_token_user(token, audiences, allowed_client_ids, time_now, cache): return users.User(email) + def _set_oauth_user_vars(token_info, audiences, allowed_client_ids, scopes, local_dev): - """Validate the oauth token and set endpoints auth user variables. + logging.warning('_set_oauth_user_vars is deprecated and will be removed ' + 'soon.') + return _set_bearer_user_vars(allowed_client_ids, scopes) - If the oauth token is valid, this sets either the ENDPOINTS_AUTH_EMAIL and - ENDPOINTS_AUTH_DOMAIN environment variables (in local development) or - the ENDPOINTS_USE_OAUTH_SCOPE one. These provide enough information - that our endpoints.get_current_user() function can get the user. + + +def _set_bearer_user_vars(allowed_client_ids, scopes): + """Validate the oauth bearer token and set endpoints auth user variables. + + If the bearer token is valid, this sets ENDPOINTS_USE_OAUTH_SCOPE. This + provides enough information that our endpoints.get_current_user() function + can get the user. Args: - token_info: Info returned about the oauth token from the tokeninfo endpoint. - audiences: List of audiences that are acceptable, or None for first-party. allowed_client_ids: List of client IDs that are acceptable. scopes: List of acceptable scopes. - local_dev: True if we're running a local dev server, false if we're in prod. """ - if 'email' not in token_info: - logging.warning('Oauth token doesn\'t include an email address.') - return - if not token_info.get('verified_email'): - logging.warning('Oauth token email isn\'t verified.') - return - + for scope in scopes: + try: + client_id = oauth.get_client_id(scope) + except oauth.Error: + continue - if audiences or allowed_client_ids: - if 'audience' not in token_info: - logging.warning('Audience is required and isn\'t specified in token.') + if allowed_client_ids and client_id not in allowed_client_ids: + logging.warning('Client ID is not allowed: %s', client_id) return + os.environ[_ENV_USE_OAUTH_SCOPE] = scope + logging.debug('Returning user from matched oauth_user.') + return + logging.warning('Oauth framework user didn\'t match oauth token user.') + return None - if token_info['audience'] in audiences: - pass - elif (token_info['audience'] == token_info.get('issued_to') and - allowed_client_ids is not None and - token_info['audience'] in allowed_client_ids): - pass - else: - logging.warning('Oauth token audience isn\'t permitted.') - return - - - token_scopes = token_info.get('scope', '').split(' ') - if not any(scope in scopes for scope in token_scopes): - logging.warning('Oauth token scopes don\'t match any acceptable scopes.') - return - - if local_dev: +def _set_bearer_user_vars_local(token, allowed_client_ids, scopes): + """Validate the oauth bearer token on the dev server. + Since the functions in the oauth module return only example results in local + development, this hits the tokeninfo endpoint and attempts to validate the + token. If it's valid, we'll set _ENV_AUTH_EMAIL and _ENV_AUTH_DOMAIN so we + can get the user from the token. + Args: + token: String with the oauth token to validate. + allowed_client_ids: List of client IDs that are acceptable. + scopes: List of acceptable scopes. + """ + result = urlfetch.fetch( + '%s?%s' % (_TOKENINFO_URL, urllib.urlencode({'access_token': token}))) + if result.status_code != 200: + try: + error_description = json.loads(result.content)['error_description'] + except (ValueError, KeyError): + error_description = '' + logging.warning('Token info endpoint returned status %s: %s', + result.status_code, error_description) + return + token_info = json.loads(result.content) - os.environ[_ENV_AUTH_EMAIL] = token_info['email'] - os.environ[_ENV_AUTH_DOMAIN] = '' + if 'email' not in token_info: + logging.warning('Oauth token doesn\'t include an email address.') + return + if not token_info.get('verified_email'): + logging.warning('Oauth token email isn\'t verified.') return + client_id = token_info.get('issued_to') + if allowed_client_ids and client_id not in allowed_client_ids: + logging.warning('Client ID is not allowed: %s', client_id) + return - for scope in scopes: - try: - oauth_user = oauth.get_current_user(scope) - oauth_scope = scope - break - except oauth.Error: - pass - else: - logging.warning('Oauth framework couldn\'t find a user.') - return None - if oauth_user.email() == token_info['email']: - os.environ[_ENV_USE_OAUTH_SCOPE] = oauth_scope + token_scopes = token_info.get('scope', '').split(' ') + if not any(scope in scopes for scope in token_scopes): + logging.warning('Oauth token scopes don\'t match any acceptable scopes.') return - logging.warning('Oauth framework user didn\'t match oauth token user.') - return None + os.environ[_ENV_AUTH_EMAIL] = token_info['email'] + os.environ[_ENV_AUTH_DOMAIN] = '' + logging.debug('Local dev returning user from token.') + return def _is_local_dev(): diff --git a/python/google/appengine/ext/mapreduce/base_handler.py b/python/google/appengine/ext/mapreduce/base_handler.py index 2f1056f6..9d999729 100644 --- a/python/google/appengine/ext/mapreduce/base_handler.py +++ b/python/google/appengine/ext/mapreduce/base_handler.py @@ -35,6 +35,7 @@ +import httplib import logging import simplejson @@ -46,7 +47,6 @@ except ImportError: from google.appengine.ext import webapp from google.appengine.ext.mapreduce import errors from google.appengine.ext.mapreduce import model -from google.appengine.ext.mapreduce import util class Error(Exception): @@ -79,13 +79,8 @@ class TaskQueueHandler(BaseHandler): self.response.set_status( 403, message="Task queue handler received non-task queue request") return - self._setup() self.handle() - def _setup(self): - """Called before handle method to set up handler.""" - pass - def handle(self): """To be implemented by subclasses.""" raise NotImplementedError() @@ -94,6 +89,17 @@ class TaskQueueHandler(BaseHandler): """Number of times this task has been retried.""" return int(self.request.headers.get("X-AppEngine-TaskExecutionCount", 0)) + def retry_task(self): + """Ask taskqueue to retry this task. + + Even though raising an exception can cause a task retry, it + will flood logs with highly visible ERROR logs. Handlers should uses + this method to perform controlled task retries. Only raise exceptions + for those deserve ERROR log entries. + """ + self.response.set_status(httplib.SERVICE_UNAVAILABLE, "Retry task") + self.response.clear() + class JsonHandler(BaseHandler): """Base class for JSON handlers for user interface. @@ -179,39 +185,22 @@ class HugeTaskHandler(TaskQueueHandler): class _RequestWrapper(object): def __init__(self, request): self._request = request - - self.path = self._request.path - self.headers = self._request.headers - - self._encoded = True - - if (not self._request.get(util.HugeTask.PAYLOAD_PARAM) and - not self._request.get(util.HugeTask.PAYLOAD_KEY_PARAM)): - self._encoded = False - return - self._params = util.HugeTask.decode_payload( - {util.HugeTask.PAYLOAD_PARAM: - self._request.get(util.HugeTask.PAYLOAD_PARAM), - util.HugeTask.PAYLOAD_KEY_PARAM: - self._request.get(util.HugeTask.PAYLOAD_KEY_PARAM)}) + self._params = model.HugeTask.decode_payload(request) def get(self, name, default=""): - if self._encoded: - return self._params.get(name, default) - else: - return self._request.get(name, default) + return self._params.get(name, default) def set(self, name, value): - if self._encoded: - self._params.set(name, value) - else: - self._request.set(name, value) + self._params[name] = value + + def __getattr__(self, name): + return getattr(self._request, name) def __init__(self, *args, **kwargs): super(HugeTaskHandler, self).__init__(*args, **kwargs) - def _setup(self): - super(HugeTaskHandler, self)._setup() + def initialize(self, request, response): + super(HugeTaskHandler, self).initialize(request, response) self.request = self._RequestWrapper(self.request) diff --git a/python/google/appengine/ext/mapreduce/handlers.py b/python/google/appengine/ext/mapreduce/handlers.py index 85957b0b..674f68b3 100644 --- a/python/google/appengine/ext/mapreduce/handlers.py +++ b/python/google/appengine/ext/mapreduce/handlers.py @@ -45,9 +45,12 @@ import sys import time import traceback +from google.appengine.ext import ndb + from google.appengine import runtime from google.appengine.api import datastore_errors from google.appengine.api import logservice +from google.appengine.api import servers from google.appengine.api import taskqueue from google.appengine.ext import db from google.appengine.ext.mapreduce import base_handler @@ -60,11 +63,6 @@ from google.appengine.ext.mapreduce import parameters from google.appengine.ext.mapreduce import util from google.appengine.runtime import apiproxy_errors -try: - from google.appengine.ext import ndb -except ImportError: - ndb = None - @@ -75,6 +73,9 @@ _SLICE_DURATION_SEC = 15 _LEASE_GRACE_PERIOD = 1 +_REQUEST_EVENTUAL_TIMEOUT = 10 * 60 + 30 + + _CONTROLLER_PERIOD_SEC = 2 @@ -128,13 +129,10 @@ class MapperWorkerCallbackHandler(base_handler.HugeTaskHandler): tstate: model.TransientShardState from taskqueue paylod. Returns: - True if lease is acquired. False if this task should be dropped. Only - old tasks (comparing to datastore state) will be dropped. Future tasks - are retried until they naturally become old so that we don't ever stuck - MR. - - Raises: - Exception: if the task should be retried by taskqueue. + True if lease is acquired. None if this task should be retried. + False if this task should be dropped. Only old tasks + (comparing to datastore state) will be dropped. Future tasks are + retried until they naturally become old so that we don't ever stuck MR. """ if not shard_state: @@ -162,9 +160,10 @@ class MapperWorkerCallbackHandler(base_handler.HugeTaskHandler): - raise ValueError( + logging.warning( "ShardState for %s is behind slice. Waiting for it to catch up", shard_state.shard_id) + return @@ -180,12 +179,13 @@ class MapperWorkerCallbackHandler(base_handler.HugeTaskHandler): logging.warning( "Task %s-%s is ahead of ShardState %s. Waiting for it to catch up.", tstate.shard_id, tstate.slice_id, shard_state.slice_id) - raise errors.RetrySliceError("Raise an error to trigger retry.") + return if shard_state.slice_start_time: - countdown = self._lease_countdown(shard_state) + countdown = self._wait_time(shard_state, + _LEASE_GRACE_PERIOD + _SLICE_DURATION_SEC) if countdown > 0: logging.warning( "Last retry of slice %s-%s may be still running." @@ -195,15 +195,16 @@ class MapperWorkerCallbackHandler(base_handler.HugeTaskHandler): time.sleep(countdown) - raise errors.RetrySliceError("Raise an error to trigger retry") + return else: - if not self._old_request_ended(shard_state): + if (not self._old_request_ended(shard_state) and + self._wait_time(shard_state, _REQUEST_EVENTUAL_TIMEOUT)): logging.warning( "Last retry of slice %s-%s is still in flight with request_id " "%s. Will try again later.", tstate.shard_id, tstate.slice_id, shard_state.slice_request_id) - raise errors.RetrySliceError("Raise an error to trigger retry") + return config = util.create_datastore_write_config(tstate.mapreduce_spec) @@ -213,6 +214,9 @@ class MapperWorkerCallbackHandler(base_handler.HugeTaskHandler): If failed for any reason, raise error to retry the task (hence all the previous validation code). The task would die naturally eventually. + + Returns: + True if state commit succeeded. None otherwise. """ fresh_state = model.ShardState.get_by_shard_id(tstate.shard_id) if not fresh_state: @@ -224,20 +228,19 @@ class MapperWorkerCallbackHandler(base_handler.HugeTaskHandler): fresh_state.slice_start_time = datetime.datetime.now() fresh_state.slice_request_id = os.environ.get("REQUEST_LOG_ID") fresh_state.put(config=config) + return True else: logging.warning( "Contention on slice %s-%s execution. Will retry again.", tstate.shard_id, tstate.slice_id) time.sleep(random.randrange(1, 5)) + return - raise errors.RetrySliceError() - - _tx() - return True + return _tx() def _old_request_ended(self, shard_state): - """Whether previous slice retry has ended. + """Whether previous slice retry has ended according to Logs API. Args: shard_state: shard state. @@ -248,20 +251,36 @@ class MapperWorkerCallbackHandler(base_handler.HugeTaskHandler): """ assert shard_state.slice_start_time is not None assert shard_state.slice_request_id is not None - logs = list(logservice.fetch(request_ids=[shard_state.slice_request_id])) + request_ids = [shard_state.slice_request_id] + try: + logs = list(logservice.fetch(request_ids=request_ids)) + except logservice.InvalidArgumentError: + + logs = list(logservice.fetch( + request_ids=request_ids, + server_versions=[(servers.get_current_server_name(), + servers.get_current_version_name())])) + if not logs or not logs[0].finished: return False return True - def _lease_countdown(self, shard_state): - """Number of seconds before lease expire.""" - assert shard_state.slice_start_time is not None - delta = datetime.datetime.now() - shard_state.slice_start_time - min_delta = datetime.timedelta( - seconds=_SLICE_DURATION_SEC + _LEASE_GRACE_PERIOD) - if delta < min_delta: + def _wait_time(self, shard_state, secs, now=datetime.datetime.now): + """Time to wait until slice_start_time is secs ago from now. - return int(math.ceil((min_delta - delta).total_seconds())) + Args: + shard_state: shard state. + secs: duration in seconds. + now: a func that gets now. + + Returns: + 0 if no wait. A positive int in seconds otherwise. Always around up. + """ + assert shard_state.slice_start_time is not None + delta = now() - shard_state.slice_start_time + duration = datetime.timedelta(seconds=secs) + if delta < duration: + return util.total_seconds(duration - delta) else: return 0 @@ -321,7 +340,11 @@ class MapperWorkerCallbackHandler(base_handler.HugeTaskHandler): model.MapreduceControl.get_key_by_job_id(spec.mapreduce_id), ]) - if not self._try_acquire_lease(shard_state, tstate): + lease_acquired = self._try_acquire_lease(shard_state, tstate) + if lease_acquired is None: + self.retry_task() + return + if not lease_acquired: return ctx = context.Context(spec, shard_state, @@ -342,13 +365,12 @@ class MapperWorkerCallbackHandler(base_handler.HugeTaskHandler): - if ndb is not None: - ndb_ctx = ndb.get_context() - ndb_ctx.set_cache_policy(lambda key: False) - ndb_ctx.set_memcache_policy(lambda key: False) + ndb_ctx = ndb.get_context() + ndb_ctx.set_cache_policy(lambda key: False) + ndb_ctx.set_memcache_policy(lambda key: False) context.Context._set(ctx) - retry_shard = False + retry_directive = False try: self.process_inputs( @@ -366,11 +388,14 @@ class MapperWorkerCallbackHandler(base_handler.HugeTaskHandler): tstate.output_writer.finalize(ctx, shard_state) except Exception, e: - retry_shard = self._retry_logic(e, shard_state, tstate, spec.mapreduce_id) + retry_directive = self._retry_logic( + e, shard_state, tstate, spec.mapreduce_id) finally: context.Context._set(None) - self._save_state_and_schedule_next(shard_state, tstate, retry_shard) + if retry_directive is None: + return self.retry_task() + self._save_state_and_schedule_next(shard_state, tstate, retry_directive) def process_inputs(self, input_reader, @@ -403,7 +428,7 @@ class MapperWorkerCallbackHandler(base_handler.HugeTaskHandler): for entity in input_reader: if isinstance(entity, db.Model): shard_state.last_work_item = repr(entity.key()) - elif ndb and isinstance(entity, ndb.Model): + elif isinstance(entity, ndb.Model): shard_state.last_work_item = repr(entity.key) else: shard_state.last_work_item = repr(entity)[:100] @@ -488,12 +513,12 @@ class MapperWorkerCallbackHandler(base_handler.HugeTaskHandler): if retry_shard: - task = self._state_to_task(tstate) + task = self._state_to_task(tstate, shard_state) elif shard_state.active: shard_state.advance_for_next_slice() tstate.advance_for_next_slice() countdown = self._get_countdown_for_next_slice(spec) - task = self._state_to_task(tstate, countdown=countdown) + task = self._state_to_task(tstate, shard_state, countdown=countdown) queue_name = os.environ.get("HTTP_X_APPENGINE_QUEUENAME", "default") @db.transactional(retries=5) @@ -509,7 +534,7 @@ class MapperWorkerCallbackHandler(base_handler.HugeTaskHandler): logging.error("Slice's %s", str(shard_state)) return fresh_shard_state.copy_from(shard_state) - + fresh_shard_state.put(config=config) @@ -518,8 +543,7 @@ class MapperWorkerCallbackHandler(base_handler.HugeTaskHandler): assert task is not None - self._add_task(task, fresh_shard_state, spec, queue_name) - fresh_shard_state.put(config=config) + self._add_task(task, spec, queue_name) try: _tx() @@ -551,10 +575,8 @@ class MapperWorkerCallbackHandler(base_handler.HugeTaskHandler): mr_id: mapreduce id. Returns: - True if shard should be retried. False otherwise. - - Raises: - errors.RetrySliceError: in order to trigger a slice retry. + True if shard should be retried. None if slice should be retried. + False otherwise. """ logging.error("Shard %s got error.", shard_state.shard_id) @@ -572,7 +594,9 @@ class MapperWorkerCallbackHandler(base_handler.HugeTaskHandler): if type(e) in errors.SHARD_RETRY_ERRORS: return self._attempt_shard_retry(shard_state, tstate, mr_id) else: - return self._attempt_slice_retry(shard_state, tstate) + if self._attempt_slice_retry(shard_state, tstate): + return + return False def _attempt_shard_retry(self, shard_state, tstate, mr_id): """Whether to retry shard. @@ -628,7 +652,8 @@ class MapperWorkerCallbackHandler(base_handler.HugeTaskHandler): tstate: model.TransientShardState for current shard. Returns: - False when slice can't be retried anymore. + True when slice should be retried. + False when slice can't be retried anymore. Raises: errors.RetrySliceError: in order to trigger a slice retry. @@ -645,7 +670,7 @@ class MapperWorkerCallbackHandler(base_handler.HugeTaskHandler): sys.exc_clear() self._try_free_lease(shard_state, slice_retry=True) - raise errors.RetrySliceError("Raise an error to trigger slice retry") + return True logging.error("Slice reached max retry limit of %s. " "Shard %s failed permanently.", @@ -692,12 +717,14 @@ class MapperWorkerCallbackHandler(base_handler.HugeTaskHandler): @classmethod def _state_to_task(cls, tstate, + shard_state, eta=None, countdown=None): """Generate task for slice according to current states. Args: tstate: An instance of TransientShardState. + shard_state: An instance of ShardState. eta: Absolute time when the MR should execute. May not be specified if 'countdown' is also supplied. This may be timezone-aware or timezone-naive. @@ -705,7 +732,7 @@ class MapperWorkerCallbackHandler(base_handler.HugeTaskHandler): Defaults to zero. Returns: - A util.HugeTask instance for the slice specified by current states. + A model.HugeTask instance for the slice specified by current states. """ base_path = tstate.base_path @@ -714,25 +741,25 @@ class MapperWorkerCallbackHandler(base_handler.HugeTaskHandler): tstate.slice_id, tstate.retries) - worker_task = util.HugeTask(url=base_path + "/worker_callback", - params=tstate.to_dict(), - name=task_name, - eta=eta, - countdown=countdown) + worker_task = model.HugeTask( + url=base_path + "/worker_callback", + params=tstate.to_dict(), + name=task_name, + eta=eta, + countdown=countdown, + parent=shard_state) return worker_task @classmethod def _add_task(cls, worker_task, - shard_state, mapreduce_spec, queue_name): """Schedule slice scanning by adding it to the task queue. Args: - worker_task: a util.HugeTask task for slice. This is NOT a taskqueue + worker_task: a model.HugeTask task for slice. This is NOT a taskqueue task. - shard_state: an instance of ShardState. mapreduce_spec: an instance of model.MapreduceSpec. queue_name: Optional queue to run on; uses the current queue of execution or the default queue if unspecified. @@ -744,7 +771,7 @@ class MapperWorkerCallbackHandler(base_handler.HugeTaskHandler): try: - worker_task.add(queue_name, parent=shard_state) + worker_task.add(queue_name) except (taskqueue.TombstonedTaskError, taskqueue.TaskAlreadyExistsError), e: logging.warning("Task %r already exists. %s: %s", @@ -792,8 +819,8 @@ class MapperWorkerCallbackHandler(base_handler.HugeTaskHandler): """ queue_name = queue_name or os.environ.get("HTTP_X_APPENGINE_QUEUENAME", "default") - task = cls._state_to_task(tstate, eta, countdown) - cls._add_task(task, shard_state, tstate.mapreduce_spec, queue_name) + task = cls._state_to_task(tstate, shard_state, eta, countdown) + cls._add_task(task, tstate.mapreduce_spec, queue_name) class ControllerCallbackHandler(base_handler.HugeTaskHandler): @@ -820,11 +847,11 @@ class ControllerCallbackHandler(base_handler.HugeTaskHandler): ]) if not state: - logging.error("State not found for MR '%s'; dropping controller task.", - spec.mapreduce_id) + logging.warning("State not found for MR '%s'; dropping controller task.", + spec.mapreduce_id) return if not state.active: - logging.info( + logging.warning( "MR %r is not active. Looks like spurious controller task execution.", spec.mapreduce_id) self._clean_up_mr(spec, self.base_path()) @@ -883,12 +910,23 @@ class ControllerCallbackHandler(base_handler.HugeTaskHandler): state.result_status = model.MapreduceState.RESULT_ABORTED else: state.result_status = model.MapreduceState.RESULT_SUCCESS + self._finalize_outputs(spec, state) self._finalize_job(spec, state, self.base_path()) else: + @db.transactional(retries=5) + def _put_state(): + fresh_state = model.MapreduceState.get_by_job_id(spec.mapreduce_id) - config = util.create_datastore_write_config(spec) - state.put(config=config) + if not fresh_state.active: + logging.warning( + "Job %s is not active. Look like spurious task execution. " + "Dropping controller task.", spec.mapreduce_id) + return + config = util.create_datastore_write_config(spec) + state.put(config=config) + + _put_state() def _aggregate_stats(self, mapreduce_state, shard_states): """Update stats in mapreduce state by aggregating stats from shard states. @@ -916,11 +954,24 @@ class ControllerCallbackHandler(base_handler.HugeTaskHandler): return int(self.request.get("serial_id")) @classmethod + def _finalize_outputs(cls, mapreduce_spec, mapreduce_state): + """Finalize outputs. + + Args: + mapreduce_spec: an instance of MapreduceSpec. + mapreduce_state: an instance of MapreduceState. + """ + + if (mapreduce_spec.mapper.output_writer_class() and + mapreduce_state.result_status == model.MapreduceState.RESULT_SUCCESS): + mapreduce_spec.mapper.output_writer_class().finalize_job(mapreduce_state) + + @classmethod def _finalize_job(cls, mapreduce_spec, mapreduce_state, base_path): """Finalize job execution. - Finalizes output writer, invokes done callback and save mapreduce state - in a transaction, and schedule necessary clean ups. + Invokes done callback and save mapreduce state in a transaction, + and schedule necessary clean ups. Args: mapreduce_spec: an instance of MapreduceSpec @@ -929,11 +980,6 @@ class ControllerCallbackHandler(base_handler.HugeTaskHandler): """ config = util.create_datastore_write_config(mapreduce_spec) - - if (mapreduce_spec.mapper.output_writer_class() and - mapreduce_state.result_status == model.MapreduceState.RESULT_SUCCESS): - mapreduce_spec.mapper.output_writer_class().finalize_job(mapreduce_state) - queue_name = mapreduce_spec.params.get( model.MapreduceSpec.PARAM_DONE_CALLBACK_QUEUE, "default") @@ -946,8 +992,16 @@ class ControllerCallbackHandler(base_handler.HugeTaskHandler): headers={"Mapreduce-Id": mapreduce_spec.mapreduce_id}, method=mapreduce_spec.params.get("done_callback_method", "POST")) - def put_state(state): - state.put(config=config) + @db.transactional(retries=5) + def _put_state(): + fresh_state = model.MapreduceState.get_by_job_id( + mapreduce_spec.mapreduce_id) + if not fresh_state.active: + logging.warning( + "Job %s is not active. Look like spurious task execution. " + "Dropping controller task.", mapreduce_spec.mapreduce_id) + return + mapreduce_state.put(config=config) if done_task and not _run_task_hook( mapreduce_spec.get_hooks(), @@ -956,9 +1010,9 @@ class ControllerCallbackHandler(base_handler.HugeTaskHandler): queue_name): done_task.add(queue_name, transactional=True) + _put_state() logging.info("Final result for job '%s' is '%s'", mapreduce_spec.mapreduce_id, mapreduce_state.result_status) - db.run_in_transaction_custom_retries(5, put_state, mapreduce_state) cls._clean_up_mr(mapreduce_spec, base_path) @classmethod @@ -1021,17 +1075,18 @@ class ControllerCallbackHandler(base_handler.HugeTaskHandler): if not queue_name: queue_name = os.environ.get("HTTP_X_APPENGINE_QUEUENAME", "default") - controller_callback_task = util.HugeTask( + controller_callback_task = model.HugeTask( url=base_path + "/controller_callback", name=task_name, params=task_params, - countdown=_CONTROLLER_PERIOD_SEC) + countdown=_CONTROLLER_PERIOD_SEC, + parent=mapreduce_state) if not _run_task_hook(mapreduce_spec.get_hooks(), "enqueue_controller_task", controller_callback_task, queue_name): try: - controller_callback_task.add(queue_name, parent=mapreduce_state) + controller_callback_task.add(queue_name) except (taskqueue.TombstonedTaskError, taskqueue.TaskAlreadyExistsError), e: logging.warning("Task %r with params %r already exists. %s: %s", @@ -1069,6 +1124,7 @@ class KickOffJobHandler(base_handler.HugeTaskHandler): logging.warning("Found no mapper input data to process.") state.active = False state.active_shards = 0 + state.result_status = model.MapreduceState.RESULT_SUCCESS ControllerCallbackHandler._finalize_job(spec, state, self.base_path()) return @@ -1081,6 +1137,7 @@ class KickOffJobHandler(base_handler.HugeTaskHandler): if output_writer_class: output_writer_class.init_job(state) + state.put(config=util.create_datastore_write_config(spec)) KickOffJobHandler._schedule_shards( @@ -1156,9 +1213,9 @@ class KickOffJobHandler(base_handler.HugeTaskHandler): task = MapperWorkerCallbackHandler._state_to_task( model.TransientShardState( base_path, spec, shard_id, 0, input_reader, input_reader, - output_writer=output_writer)) + output_writer=output_writer), + shard_states[shard_number]) MapperWorkerCallbackHandler._add_task(task, - shard_states[shard_number], spec, queue_name) @@ -1261,12 +1318,7 @@ class StartJobHandler(base_handler.PostJsonHandler): _app=None, transactional=False, parent_entity=None): - queue_name = queue_name or os.environ.get("HTTP_X_APPENGINE_QUEUENAME", - "default") - if queue_name[0] == "_": - - queue_name = "default" - + """See control.start_map.""" if not transactional and parent_entity: raise Exception("Parent shouldn't be specfied " "for non-transactional starts.") @@ -1296,49 +1348,57 @@ class StartJobHandler(base_handler.PostJsonHandler): finally: context.Context._set(None) + if not transactional: + + state = model.MapreduceState.create_new(mapreduce_spec.mapreduce_id) + state.mapreduce_spec = mapreduce_spec + state.active = True + state.active_shards = mapper_spec.shard_count + if _app: + state.app_id = _app + config = util.create_datastore_write_config(mapreduce_spec) + state.put(config=config) + parent_entity = state + + cls._add_kickoff_task( + base_path, mapreduce_spec, eta, countdown, parent_entity, + queue_name, transactional, _app) + + return mapreduce_id + + @classmethod + def _add_kickoff_task(cls, + base_path, + mapreduce_spec, + eta, + countdown, + parent, + queue_name, + transactional, + _app): + queue_name = queue_name or os.environ.get("HTTP_X_APPENGINE_QUEUENAME", + "default") + if queue_name[0] == "_": + + queue_name = "default" + kickoff_params = {"mapreduce_spec": mapreduce_spec.to_json_str()} if _app: kickoff_params["app"] = _app - kickoff_worker_task = util.HugeTask( + kickoff_worker_task = model.HugeTask( url=base_path + "/kickoffjob_callback", params=kickoff_params, eta=eta, - countdown=countdown) - + countdown=countdown, + parent=parent) hooks = mapreduce_spec.get_hooks() - config = util.create_datastore_write_config(mapreduce_spec) - - def start_mapreduce(): - parent = parent_entity - if not transactional: - - - - state = model.MapreduceState.create_new(mapreduce_spec.mapreduce_id) - state.mapreduce_spec = mapreduce_spec - state.active = True - state.active_shards = mapper_spec.shard_count - if _app: - state.app_id = _app - state.put(config=config) - parent = state - - if hooks is not None: - try: - hooks.enqueue_kickoff_task(kickoff_worker_task, queue_name) - except NotImplementedError: - - pass - else: - return - kickoff_worker_task.add(queue_name, transactional=True, parent=parent) - - if transactional: - start_mapreduce() + if hooks is not None: + try: + hooks.enqueue_kickoff_task(kickoff_worker_task, queue_name) + except NotImplementedError: + kickoff_worker_task.add(queue_name, transactional=transactional) else: - db.run_in_transaction(start_mapreduce) - - return mapreduce_id + kickoff_worker_task.add(queue_name) class FinalizeJobHandler(base_handler.TaskQueueHandler): @@ -1353,9 +1413,9 @@ class FinalizeJobHandler(base_handler.TaskQueueHandler): config=config) shard_states = model.ShardState.find_by_mapreduce_state(mapreduce_state) for shard_state in shard_states: - db.delete(util._HugeTaskPayload.all().ancestor(shard_state), + db.delete(model._HugeTaskPayload.all().ancestor(shard_state), config=config) - db.delete(util._HugeTaskPayload.all().ancestor(mapreduce_state), + db.delete(model._HugeTaskPayload.all().ancestor(mapreduce_state), config=config) @classmethod diff --git a/python/google/appengine/ext/mapreduce/input_readers.py b/python/google/appengine/ext/mapreduce/input_readers.py index ee7e6e49..1b080a67 100644 --- a/python/google/appengine/ext/mapreduce/input_readers.py +++ b/python/google/appengine/ext/mapreduce/input_readers.py @@ -68,10 +68,8 @@ import time import zipfile from google.net.proto import ProtocolBuffer -try: - from google.appengine.ext import ndb -except ImportError: - ndb = None +from google.appengine.ext import ndb + from google.appengine.api import datastore from google.appengine.api import files from google.appengine.api import logservice @@ -180,7 +178,7 @@ class InputReader(model.JsonMixin): parameters to define the behavior of input readers. Returns: - A list of InputReaders. + A list of InputReaders. None when no input data can be found. """ raise NotImplementedError("split_input() not implemented in %s" % cls) @@ -721,7 +719,7 @@ class DatastoreInputReader(AbstractDatastoreInputReader): entity_type = util.for_name(model_classpath) if isinstance(entity_type, db.Model): return entity_type.kind() - elif ndb and isinstance(entity_type, (ndb.Model, ndb.MetaModel)): + elif isinstance(entity_type, (ndb.Model, ndb.MetaModel)): return entity_type._get_kind() else: @@ -2145,7 +2143,7 @@ class ConsistentKeyReader(_OldAbstractDatastoreInputReader): Returns: all the data in json-compatible map. """ - json_dict = super(DatastoreKeyInputReader, self).to_json() + json_dict = super(ConsistentKeyReader, self).to_json() json_dict[self.START_TIME_US_PARAM] = self.start_time_us return json_dict diff --git a/python/google/appengine/ext/mapreduce/key_ranges.py b/python/google/appengine/ext/mapreduce/key_ranges.py index 4a464da9..8a23425e 100644 --- a/python/google/appengine/ext/mapreduce/key_ranges.py +++ b/python/google/appengine/ext/mapreduce/key_ranges.py @@ -155,15 +155,16 @@ class _KeyRangesFromNSRange(KeyRanges): raise StopIteration() self._last_ns = self._iter.next() + current_ns_range = self._ns_range if self._last_ns == self._ns_range.namespace_end: self._ns_range = None return key_range.KeyRange(namespace=self._last_ns, - _app=self._ns_range.app) + _app=current_ns_range.app) def to_json(self): json = super(_KeyRangesFromNSRange, self).to_json() ns_range = self._ns_range - if self._ns_range is not None and self._last_ns: + if self._ns_range is not None and self._last_ns is not None: ns_range = ns_range.with_start_after(self._last_ns) if ns_range is not None: json.update({"ns_range": ns_range.to_json_object()}) diff --git a/python/google/appengine/ext/mapreduce/model.py b/python/google/appengine/ext/mapreduce/model.py index 9f04504c..122cee22 100644 --- a/python/google/appengine/ext/mapreduce/model.py +++ b/python/google/appengine/ext/mapreduce/model.py @@ -53,8 +53,10 @@ __all__ = ["JsonEncoder", "ShardState", "CountersMap", "TransientShardState", - "QuerySpec"] + "QuerySpec", + "HugeTask"] +import cgi import copy import datetime import logging @@ -62,9 +64,13 @@ import os import random import simplejson import time +import urllib +import zlib from google.appengine.api import datastore_errors from google.appengine.api import datastore_types +from google.appengine.api import taskqueue +from google.appengine.datastore import datastore_rpc from google.appengine.ext import db from google.appengine.ext.mapreduce import context from google.appengine.ext.mapreduce import hooks @@ -80,6 +86,163 @@ _DEFAULT_PROCESSING_RATE_PER_SEC = 1000000 _DEFAULT_SHARD_COUNT = 8 +_MAP_REDUCE_KINDS = ("_GAE_MR_MapreduceControl", + "_GAE_MR_MapreduceState", + "_GAE_MR_ShardState", + "_GAE_MR_TaskPayload") + + +class _HugeTaskPayload(db.Model): + """Model object to store task payload.""" + + payload = db.BlobProperty() + + @classmethod + def kind(cls): + """Returns entity kind.""" + return "_GAE_MR_TaskPayload" + + +class HugeTask(object): + """HugeTask is a taskqueue.Task-like class that can store big payloads. + + Payloads are stored either in the task payload itself or in the datastore. + Task handlers should inherit from base_handler.HugeTaskHandler class. + """ + + PAYLOAD_PARAM = "__payload" + PAYLOAD_KEY_PARAM = "__payload_key" + + + MAX_TASK_PAYLOAD = taskqueue.MAX_PUSH_TASK_SIZE_BYTES - 1024 + MAX_DB_PAYLOAD = datastore_rpc.BaseConnection.MAX_RPC_BYTES + + PAYLOAD_VERSION_HEADER = "AE-MR-Payload-Version" + + + PAYLOAD_VERSION = "1" + + def __init__(self, + url, + params, + name=None, + eta=None, + countdown=None, + parent=None): + """Init. + + Args: + url: task url in str. + params: a dict from str to str. + name: task name. + eta: task eta. + countdown: task countdown. + parent: parent entity of huge task's payload. + + Raises: + ValueError: when payload is too big even for datastore, or parent is + not specified when payload is stored in datastore. + """ + self.url = url + self.name = name + self.eta = eta + self.countdown = countdown + self._headers = { + "Content-Type": "application/octet-stream", + self.PAYLOAD_VERSION_HEADER: self.PAYLOAD_VERSION + } + + + payload_str = urllib.urlencode(params) + compressed_payload = "" + if len(payload_str) > self.MAX_TASK_PAYLOAD: + compressed_payload = zlib.compress(payload_str) + + + if not compressed_payload: + self._payload = payload_str + + elif len(compressed_payload) < self.MAX_TASK_PAYLOAD: + self._payload = self.PAYLOAD_PARAM + compressed_payload + elif len(compressed_payload) > self.MAX_DB_PAYLOAD: + raise ValueError( + "Payload from %s to big to be stored in database: %s" % + self.name, len(compressed_payload)) + + else: + if not parent: + raise ValueError("Huge tasks should specify parent entity.") + + payload_entity = _HugeTaskPayload(payload=compressed_payload, + parent=parent) + payload_key = payload_entity.put() + self._payload = self.PAYLOAD_KEY_PARAM + str(payload_key) + + def add(self, queue_name, transactional=False): + """Add task to the queue.""" + task = self.to_task() + task.add(queue_name, transactional) + + def to_task(self): + """Convert to a taskqueue task.""" + + + + return taskqueue.Task( + url=self.url, + payload=self._payload, + name=self.name, + eta=self.eta, + countdown=self.countdown, + headers=self._headers) + + @classmethod + def decode_payload(cls, request): + """Decode task payload. + + HugeTask controls its own payload entirely including urlencoding. + It doesn't depend on any particular web framework. + + Args: + request: a webapp Request instance. + + Returns: + A dict of str to str. The same as the params argument to __init__. + + Raises: + DeprecationWarning: When task payload constructed from an older + incompatible version of mapreduce. + """ + + + if request.headers.get(cls.PAYLOAD_VERSION_HEADER) != cls.PAYLOAD_VERSION: + raise DeprecationWarning( + "Task is generated by an older incompatible version of mapreduce. " + "Please kill this job manually") + + body = request.body + compressed_payload_str = None + if body.startswith(cls.PAYLOAD_KEY_PARAM): + payload_key = body[len(cls.PAYLOAD_KEY_PARAM):] + payload_entity = _HugeTaskPayload.get(payload_key) + compressed_payload_str = payload_entity.payload + elif body.startswith(cls.PAYLOAD_PARAM): + compressed_payload_str = body[len(cls.PAYLOAD_PARAM):] + + if compressed_payload_str: + payload_str = zlib.decompress(compressed_payload_str) + else: + payload_str = body + + result = {} + for (name, value) in cgi.parse_qs(payload_str).items(): + if len(value) == 1: + result[name] = value[0] + else: + result[name] = value + return result + + class JsonEncoder(simplejson.JSONEncoder): """MR customized json encoder.""" @@ -624,7 +787,7 @@ class MapreduceState(db.Model): active_shards: How many shards are still processing. start_time: When the job started. writer_state: Json property to be used by writer to store its state. - This is filled when single output per job. Will be dprecated. + This is filled when single output per job. Will be deprecated. """ RESULT_SUCCESS = "success" @@ -888,7 +1051,9 @@ class ShardState(db.Model): expired, new request needs to verify that said request has indeed ended according to logs API. Do this only when lease has expired because logs API is expensive. This field should always be set/unset - with slice_start_time. + with slice_start_time. It is possible Logs API doesn't log a request + at all or doesn't log the end of a request. So a new request can + proceed after a long conservative timeout. slice_retries: the number of times a slice has been retried due to data processing error (non taskqueue/datastore). This count is only a lower bound and is used to determined when to fail a slice diff --git a/python/google/appengine/ext/mapreduce/property_range.py b/python/google/appengine/ext/mapreduce/property_range.py index 7b8f6ad7..08631e24 100644 --- a/python/google/appengine/ext/mapreduce/property_range.py +++ b/python/google/appengine/ext/mapreduce/property_range.py @@ -22,10 +22,7 @@ import datetime -try: - from google.appengine.ext import ndb -except ImportError: - ndb = None +from google.appengine.ext import ndb from google.appengine.ext import db from google.appengine.ext.mapreduce import errors @@ -411,21 +408,15 @@ _DISCRETE_PROPERTY_SPLIT_FUNCTIONS = { db.IntegerProperty: _split_integer_property, db.StringProperty: _split_string_property, db.ByteStringProperty: _split_byte_string_property, -} -if ndb: - _DISCRETE_PROPERTY_SPLIT_FUNCTIONS.update({ - ndb.DateTimeProperty: _split_datetime_property, - ndb.IntegerProperty: _split_integer_property, - ndb.StringProperty: _split_string_property, - ndb.BlobProperty: _split_byte_string_property - }) + ndb.DateTimeProperty: _split_datetime_property, + ndb.IntegerProperty: _split_integer_property, + ndb.StringProperty: _split_string_property, + ndb.BlobProperty: _split_byte_string_property +} _CONTINUOUS_PROPERTY_SPLIT_FUNCTIONS = { db.FloatProperty: _split_float_property, -} -if ndb: - _CONTINUOUS_PROPERTY_SPLIT_FUNCTIONS.update({ - ndb.FloatProperty: _split_float_property, - }) + ndb.FloatProperty: _split_float_property, +} diff --git a/python/google/appengine/ext/mapreduce/test_support.py b/python/google/appengine/ext/mapreduce/test_support.py index dee0a466..f0337ee8 100644 --- a/python/google/appengine/ext/mapreduce/test_support.py +++ b/python/google/appengine/ext/mapreduce/test_support.py @@ -42,30 +42,32 @@ import re from google.appengine.ext.mapreduce import main from google.appengine.ext.webapp import mock_webapp -from google.appengine.ext.mapreduce import util def decode_task_payload(task): """Decodes POST task payload. + This can only decode POST payload for a normal task. For huge task, + use model.HugeTask.decode_payload. + Args: - task: a task to decode its payload. + task: a dict representing a taskqueue task as documented in taskqueue_stub. Returns: parameter_name -> parameter_value dict. If multiple parameter values are present, then parameter_value will be a list. """ - body = task["body"] - if not body: + if not task: return {} - decoded = base64.b64decode(body) + + body = base64.b64decode(task["body"]) result = {} - for (name, value) in cgi.parse_qs(decoded).items(): + for (name, value) in cgi.parse_qs(body).items(): if len(value) == 1: result[name] = value[0] else: result[name] = value - return util.HugeTask.decode_payload(result) + return result def execute_task(task, retries=0, handlers_map=None): @@ -85,6 +87,9 @@ def execute_task(task, retries=0, handlers_map=None): Returns: the handler instance used for this task. + + Raises: + Exception: whatever the task raises. """ if not handlers_map: handlers_map = main.create_handlers_map() @@ -95,35 +100,46 @@ def execute_task(task, retries=0, handlers_map=None): for (re_str, handler_class) in handlers_map: re_str = "^" + re_str + "($|\\?)" if re.match(re_str, url): - handler = handler_class() break - - if not handler: + else: raise Exception("Can't determine handler for %s" % task) - handler.initialize(mock_webapp.MockRequest(), - mock_webapp.MockResponse()) - handler.request.set_url(url) + request = mock_webapp.MockRequest() + request.set_url(url) - handler.request.environ["HTTP_HOST"] = "myapp.appspot.com" + request.environ["HTTP_HOST"] = "myapp.appspot.com" for k, v in task.get("headers", []): - handler.request.headers[k] = v + request.headers[k] = v environ_key = "HTTP_" + k.replace("-", "_").upper() - handler.request.environ[environ_key] = v - handler.request.headers["X-AppEngine-TaskExecutionCount"] = retries - handler.request.environ["HTTP_X_APPENGINE_TASKNAME"] = ( + request.environ[environ_key] = v + request.headers["X-AppEngine-TaskExecutionCount"] = retries + request.environ["HTTP_X_APPENGINE_TASKNAME"] = ( task.get("name", "default_task_name")) - handler.request.environ["HTTP_X_APPENGINE_QUEUENAME"] = ( + request.environ["HTTP_X_APPENGINE_QUEUENAME"] = ( task.get("queue_name", "default")) - handler.request.environ["PATH_INFO"] = handler.request.path + request.environ["PATH_INFO"] = request.path + + if task["method"] == "POST": + + request.body = base64.b64decode(task["body"]) + for k, v in decode_task_payload(task).iteritems(): + request.set(k, v) + + response = mock_webapp.MockResponse() + try: + + + handler = handler_class(request, response) + except TypeError: + + handler = handler_class() + handler.initialize(request, response) saved_os_environ = os.environ try: os.environ = dict(os.environ) - os.environ.update(handler.request.environ) + os.environ.update(request.environ) if task["method"] == "POST": - for k, v in decode_task_payload(task).items(): - handler.request.set(k, v) handler.post() elif task["method"] == "GET": handler.get() diff --git a/python/google/appengine/ext/mapreduce/util.py b/python/google/appengine/ext/mapreduce/util.py index 826401ea..de17a2b3 100644 --- a/python/google/appengine/ext/mapreduce/util.py +++ b/python/google/appengine/ext/mapreduce/util.py @@ -42,25 +42,36 @@ __all__ = [ "handler_for_name", "is_generator", "parse_bool", - "HugeTask", + "total_seconds", "try_serialize_handler", "try_deserialize_handler", ] - -import base64 -import cgi import inspect import pickle import types -import urllib -import zlib -from google.appengine.api import taskqueue -from google.appengine.ext import db from google.appengine.datastore import datastore_rpc +def total_seconds(td): + """convert a timedelta to seconds. + + This is patterned after timedelta.total_seconds, which is only + available in python 27. + + Args: + td: a timedelta object. + + Returns: + total seconds within a timedelta. Rounded up to seconds. + """ + secs = td.seconds + td.days * 24 * 3600 + if td.microseconds: + secs += 1 + return secs + + def for_name(fq_name, recursive=False): """Find class/function/method specified by its fully qualified name. @@ -253,115 +264,3 @@ def create_datastore_write_config(mapreduce_spec): else: return datastore_rpc.Configuration() - - -class _HugeTaskPayload(db.Model): - """Model object to store task payload.""" - - payload = db.TextProperty() - - @classmethod - def kind(cls): - """Returns entity kind.""" - return "_GAE_MR_TaskPayload" - - -class HugeTask(object): - """HugeTask is a taskqueue.Task-like class that can store big payloads. - - Payloads are stored either in the task payload itself or in the datastore. - Task handlers should inherit from HugeTaskHandler class. - """ - - PAYLOAD_PARAM = "__payload" - PAYLOAD_KEY_PARAM = "__payload_key" - - MAX_TASK_PAYLOAD = 100000 - MAX_DB_PAYLOAD = 1000000 - - def __init__(self, - url, - params, - name=None, - eta=None, - countdown=None): - self.url = url - self.params = params - self.compressed_payload = None - self.name = name - self.eta = eta - self.countdown = countdown - - payload_str = urllib.urlencode(self.params) - if len(payload_str) > self.MAX_TASK_PAYLOAD: - compressed_payload = base64.b64encode(zlib.compress(payload_str)) - if len(compressed_payload) > self.MAX_DB_PAYLOAD: - raise Exception("Payload from %s to big to be stored in database: %s", - self.name, len(compressed_payload)) - self.compressed_payload = compressed_payload - - def add(self, queue_name, transactional=False, parent=None): - """Add task to the queue.""" - if self.compressed_payload is None: - - task = self.to_task() - task.add(queue_name, transactional) - return - - if len(self.compressed_payload) < self.MAX_TASK_PAYLOAD: - - task = taskqueue.Task( - url=self.url, - params={self.PAYLOAD_PARAM: self.compressed_payload}, - name=self.name, - eta=self.eta, - countdown=self.countdown) - task.add(queue_name, transactional) - return - - - if not parent: - raise Exception("Huge tasks should specify parent entity.") - - payload_entity = _HugeTaskPayload(payload=self.compressed_payload, - parent=parent) - - payload_key = payload_entity.put() - task = taskqueue.Task( - url=self.url, - params={self.PAYLOAD_KEY_PARAM: str(payload_key)}, - name=self.name, - eta=self.eta, - countdown=self.countdown) - task.add(queue_name, transactional) - - def to_task(self): - """Convert to a taskqueue task without doing any kind of encoding.""" - return taskqueue.Task( - url=self.url, - params=self.params, - name=self.name, - eta=self.eta, - countdown=self.countdown) - - @classmethod - def decode_payload(cls, payload_dict): - if (not payload_dict.get(cls.PAYLOAD_PARAM) and - not payload_dict.get(cls.PAYLOAD_KEY_PARAM)): - return payload_dict - - if payload_dict.get(cls.PAYLOAD_PARAM): - payload = payload_dict.get(cls.PAYLOAD_PARAM) - else: - payload_key = payload_dict.get(cls.PAYLOAD_KEY_PARAM) - payload_entity = _HugeTaskPayload.get(payload_key) - payload = payload_entity.payload - payload_str = zlib.decompress(base64.b64decode(payload)) - - result = {} - for (name, value) in cgi.parse_qs(payload_str).items(): - if len(value) == 1: - result[name] = value[0] - else: - result[name] = value - return result diff --git a/python/google/appengine/ext/ndb/__init__.py b/python/google/appengine/ext/ndb/__init__.py index e4604f2d..38d6c97b 100644 --- a/python/google/appengine/ext/ndb/__init__.py +++ b/python/google/appengine/ext/ndb/__init__.py @@ -1,6 +1,6 @@ """NDB -- A new datastore API for the Google App Engine Python runtime.""" -__version__ = '1.0.8' +__version__ = '1.0.9' __all__ = [] diff --git a/python/google/appengine/ext/ndb/google_imports.py b/python/google/appengine/ext/ndb/google_imports.py index e0246960..5167df4a 100644 --- a/python/google/appengine/ext/ndb/google_imports.py +++ b/python/google/appengine/ext/ndb/google_imports.py @@ -5,10 +5,10 @@ this module. If necessary, add new imports here (in both places). """ try: - from google.appengine import api + from google.appengine.datastore import entity_pb normal_environment = True except ImportError: - from google3.apphosting import api + from google3.storage.onestore.v3 import entity_pb normal_environment = False if normal_environment: @@ -27,6 +27,9 @@ if normal_environment: from google.appengine.api.prospective_search import prospective_search_pb from google.appengine.datastore import datastore_query from google.appengine.datastore import datastore_rpc + # This line will fail miserably for any app using auto_import_fixer + # because auto_import_fixer only set up simple alias between + # google and google3. But entity_pb is move to a different path completely. from google.appengine.datastore import entity_pb from google.appengine.ext.blobstore import blobstore as ext_blobstore from google.appengine.ext import db diff --git a/python/google/appengine/ext/ndb/model.py b/python/google/appengine/ext/ndb/model.py index e11277de..5fe25d9a 100644 --- a/python/google/appengine/ext/ndb/model.py +++ b/python/google/appengine/ext/ndb/model.py @@ -327,8 +327,15 @@ class KindError(datastore_errors.BadValueError): """ -class BadProjectionError(datastore_errors.Error): - """Raised when a property name used as a projection is invalid.""" +class InvalidPropertyError(datastore_errors.Error): + """Raised when a property is not applicable to a given use. + + For example, a property must exist and be indexed to be used in a query's + projection or group by clause. + """ + +# Mapping for legacy support. +BadProjectionError = InvalidPropertyError class UnprojectedPropertyError(datastore_errors.Error): @@ -1330,24 +1337,25 @@ class Property(ModelAttribute): def _prepare_for_put(self, entity): pass - def _check_projection(self, rest=None): - """Helper to check whether this property can be used as a projection. + def _check_property(self, rest=None, require_indexed=True): + """Internal helper to check this property for specific requirements. + + Called by Model._check_properties(). Args: rest: Optional subproperty to check, of the form 'name1.name2...nameN'. Raises: - BadProjectionError if this property is not indexed or if a - subproperty is specified. (StructuredProperty overrides this - method to handle subprpoperties.) + InvalidPropertyError if this property does not meet the given + requirements or if a subproperty is specified. (StructuredProperty + overrides this method to handle subproperties.) """ - if not self._indexed: - raise BadProjectionError('Projecting on unindexed property %s' % - self._name) + if require_indexed and not self._indexed: + raise InvalidPropertyError('Property is unindexed %s' % self._name) if rest: - raise BadProjectionError('Projecting on subproperty %s.%s ' - 'but %s is not a structured property' % - (self._name, rest, self._name)) + raise InvalidPropertyError('Referencing subproperty %s.%s ' + 'but %s is not a structured property' % + (self._name, rest, self._name)) def _get_for_dict(self, entity): """Retrieve the value like _get_value(), processed for _to_dict(). @@ -2312,18 +2320,17 @@ class StructuredProperty(_StructuredGetForDictMixin): if value is not None: value._prepare_for_put() - def _check_projection(self, rest=None): - """Override for Model._check_projection(). + def _check_property(self, rest=None, require_indexed=True): + """Override for Property._check_property(). Raises: - BadProjectionError if no subproperty is specified or if something + InvalidPropertyError if no subproperty is specified or if something is wrong with the subproperty. """ if not rest: - raise BadProjectionError('Projecting on structured property %s ' - 'requires a subproperty' % - self._name) - self._modelclass._check_projections([rest]) + raise InvalidPropertyError( + 'Structured property %s requires a subproperty' % self._name) + self._modelclass._check_properties([rest], require_indexed=require_indexed) class LocalStructuredProperty(_StructuredGetForDictMixin, BlobProperty): @@ -2713,6 +2720,8 @@ class Model(_NotEqualMixin): """ if len(args) > 1: raise TypeError('Model constructor takes no positional arguments.') + # self is passed implicitly through args so users can define a property + # named 'self'. (self,) = args get_arg = self.__get_arg key = get_arg(kwds, 'key') @@ -2740,7 +2749,7 @@ class Model(_NotEqualMixin): @classmethod def __get_arg(cls, kwds, kwd): - """Helper method to parse keywords that may be property names.""" + """Internal helper method to parse keywords that may be property names.""" alt_kwd = '_' + kwd if alt_kwd in kwds: return kwds.pop(alt_kwd) @@ -3117,22 +3126,23 @@ class Model(_NotEqualMixin): prop._prepare_for_put(self) @classmethod - def _check_projections(cls, projections): - """Helper to check that a list of projections is valid for this class. + def _check_properties(cls, property_names, require_indexed=True): + """Internal helper to check the given properties exist and meet specified + requirements. Called from query.py. Args: - projections: List or tuple of projections -- each being a string - giving a property name, possibly containing dots (to address - subproperties of structured properties). + property_names: List or tuple of property names -- each being a string, + possibly containing dots (to address subproperties of structured + properties). Raises: - BadProjectionError if one of the properties is invalid. + InvalidPropertyError if one of the properties is invalid. AssertionError if the argument is not a list or tuple of strings. """ - assert isinstance(projections, (list, tuple)), repr(projections) - for name in projections: + assert isinstance(property_names, (list, tuple)), repr(property_names) + for name in property_names: assert isinstance(name, basestring), repr(name) if '.' in name: name, rest = name.split('.', 1) @@ -3140,21 +3150,21 @@ class Model(_NotEqualMixin): rest = None prop = cls._properties.get(name) if prop is None: - cls._unknown_projection(name) + cls._unknown_property(name) else: - prop._check_projection(rest) + prop._check_property(rest, require_indexed=require_indexed) @classmethod - def _unknown_projection(cls, name): - """Helper to raise an exception for an unknown property name. + def _unknown_property(cls, name): + """Internal helper to raise an exception for an unknown property name. - This is called by _check_projections(). It is overridden by + This is called by _check_properties(). It is overridden by Expando, where this is a no-op. Raises: - BadProjectionError. + InvalidPropertyError. """ - raise BadProjectionError('Projecting on unknown property %s' % name) + raise InvalidPropertyError('Unknown property %s' % name) def _validate_key(self, key): """Validation for _key attribute (designed to be overridden). @@ -3174,13 +3184,26 @@ class Model(_NotEqualMixin): def _query(cls, *args, **kwds): """Create a Query object for this class. - Keyword arguments are passed to the Query() constructor. If - positional arguments are given they are used to apply an initial - filter. + Args: + distinct: Optional bool, short hand for group_by = projection. + *args: Used to apply an initial filter + **kwds: are passed to the Query() constructor. Returns: A Query object. """ + # Validating distinct. + if 'distinct' in kwds: + if 'group_by' in kwds: + raise TypeError( + 'cannot use distinct= and group_by= at the same time') + projection = kwds.get('projection') + if not projection: + raise TypeError( + 'cannot use distinct= without projection=') + if kwds.pop('distinct'): + kwds['group_by'] = projection + # TODO: Disallow non-empty args and filter=. from .query import Query # Import late to avoid circular imports. qry = Query(kind=cls._get_kind(), **kwds) @@ -3457,8 +3480,8 @@ class Expando(Model): setattr(self, name, value) @classmethod - def _unknown_projection(cls, name): - # It is not an error to project on an unknown Expando property. + def _unknown_property(cls, name): + # It is not an error as the property may be a dynamic property. pass def __getattr__(self, name): diff --git a/python/google/appengine/ext/ndb/query.py b/python/google/appengine/ext/ndb/query.py index 27a93609..7dcaf7e0 100644 --- a/python/google/appengine/ext/ndb/query.py +++ b/python/google/appengine/ext/ndb/query.py @@ -765,9 +765,9 @@ class Query(object): @utils.positional(1) def __init__(self, kind=None, ancestor=None, filters=None, orders=None, - app=None, namespace=None, default_options=None): + app=None, namespace=None, default_options=None, + projection=None, group_by=None): """Constructor. - Args: kind: Optional kind string. ancestor: Optional ancestor Key. @@ -776,7 +776,14 @@ class Query(object): app: Optional app id. namespace: Optional namespace. default_options: Optional QueryOptions object. + projection: Optional list or tuple of properties to project. + group_by: Optional list or tuple of properties to group by. """ + # TODO(arfuller): Accept projection=Model.key to mean keys_only. + # TODO(arfuller): Consider adding incremental function + # group_by_property(*args) and project(*args, distinct=False). + + # Validating input. if ancestor is not None: if isinstance(ancestor, ParameterizedThing): if isinstance(ancestor, ParameterizedFunction): @@ -784,7 +791,7 @@ class Query(object): raise TypeError('ancestor cannot be a GQL function other than KEY') else: if not isinstance(ancestor, model.Key): - raise TypeError('ancestor must be a Key') + raise TypeError('ancestor must be a Key; received %r' % (ancestor,)) if not ancestor.id(): raise ValueError('ancestor cannot be an incomplete key') if app is not None: @@ -798,32 +805,62 @@ class Query(object): if filters is not None: if not isinstance(filters, Node): raise TypeError('filters must be a query Node or None; received %r' % - filters) + (filters,)) if orders is not None: if not isinstance(orders, datastore_query.Order): raise TypeError('orders must be an Order instance or None; received %r' - % orders) + % (orders,)) if default_options is not None: if not isinstance(default_options, datastore_rpc.BaseConfiguration): raise TypeError('default_options must be a Configuration or None; ' - 'received %r' % default_options) - self.__kind = kind # String - self.__ancestor = ancestor # Key - self.__filters = filters # None or Node subclass - self.__orders = orders # None or datastore_query.Order instance + 'received %r' % (default_options,)) + if projection is not None: + if default_options.projection is not None: + raise TypeError('cannot use projection= and ' + 'default_options.projection at the same time') + if default_options.keys_only is not None: + raise TypeError('cannot use projection= and ' + 'default_options.keys_only at the same time') + + self.__kind = kind # String. + self.__ancestor = ancestor # Key. + self.__filters = filters # None or Node subclass. + self.__orders = orders # None or datastore_query.Order instance. self.__app = app self.__namespace = namespace self.__default_options = default_options - # Check the projection in the default options. (This is done as a - # side effect of calling _fix_projection(); it's done late because - # that function expects self to be completely initialized.) - if kind is not None and default_options is not None: - default_projection = QueryOptions.projection(default_options) - if default_projection is not None: - self._fix_projection(default_projection) + + # Checked late as _check_properties depends on local state. + self.__projection = None + if projection is not None: + if not projection: + raise TypeError('projection argument cannot be empty') + if not isinstance(projection, (tuple, list)): + raise TypeError( + 'projection must be a tuple, list or None; received %r' % + (projection,)) + self._check_properties(self._to_property_names(projection)) + self.__projection = tuple(projection) + + self.__group_by = None + if group_by is not None: + if not group_by: + raise TypeError('group_by argument cannot be empty') + if not isinstance(group_by, (tuple, list)): + raise TypeError( + 'group_by must be a tuple, list or None; received %r' % (group_by,)) + self._check_properties(self._to_property_names(group_by)) + self.__group_by = tuple(group_by) def __repr__(self): args = [] + if self.app is not None: + args.append('app=%r' % self.app) + if (self.namespace is not None and + self.namespace != namespace_manager.get_namespace()): + # Only show the namespace if set and not the current namespace. + # (This is similar to what Key.__repr__() does.) + args.append('namespace=%r' % self.namespace) if self.kind is not None: args.append('kind=%r' % self.kind) if self.ancestor is not None: @@ -833,13 +870,10 @@ class Query(object): if self.orders is not None: # TODO: Format orders better. args.append('orders=...') # PropertyOrder doesn't have a good repr(). - if self.app is not None: - args.append('app=%r' % self.app) - if (self.namespace is not None and - self.namespace != namespace_manager.get_namespace()): - # Only show the namespace if set and not the current namespace. - # (This is similar to what Key.__repr__() does.) - args.append('namespace=%r' % self.namespace) + if self.projection: + args.append('projection=%r' % (self._to_property_names(self.projection))) + if self.group_by: + args.append('group_by=%r' % (self._to_property_names(self.group_by))) if self.default_options is not None: args.append('default_options=%r' % self.default_options) return '%s(%s)' % (self.__class__.__name__, ', '.join(args)) @@ -858,7 +892,8 @@ class Query(object): return self.__class__(kind=self.kind, ancestor=self.ancestor, filters=self.filters, orders=self.orders, app=self.app, namespace=namespace, - default_options=self.default_options) + default_options=self.default_options, + projection=self.projection, group_by=self.group_by) def _get_query(self, connection): self.bind() # Raises an exception if there are unbound parameters. @@ -871,12 +906,16 @@ class Query(object): if filters is not None: post_filters = filters._post_filters() filters = filters._to_filter() + group_by = None + if self.group_by: + group_by = self._to_property_names(self.group_by) dsquery = datastore_query.Query(app=self.app, namespace=self.namespace, kind=kind.decode('utf-8') if kind else None, ancestor=ancestor, filter_predicate=filters, - order=self.orders) + order=self.orders, + group_by=group_by) if post_filters is not None: dsquery = datastore_query._AugmentedQuery( dsquery, @@ -945,7 +984,9 @@ class Query(object): subquery = self.__class__(kind=self.kind, ancestor=self.ancestor, filters=subfilter, orders=self.orders, app=self.app, namespace=self.namespace, - default_options=self.default_options) + default_options=self.default_options, + projection=self.projection, + group_by=self.group_by) subqueries.append(subquery) return _MultiQuery(subqueries) @@ -984,6 +1025,27 @@ class Query(object): """Accessor for the default_options (a QueryOptions instance or None).""" return self.__default_options + @property + def group_by(self): + """Accessor for the group by properties (a tuple instance or None).""" + return self.__group_by + + @property + def projection(self): + """Accessor for the projected properties (a tuple instance or None).""" + return self.__projection + + @property + def is_distinct(self): + """True if results are guaranteed to contain a unique set of property + values. + + This happens when every property in the group_by is also in the projection. + """ + return bool(self.__group_by and + set(self._to_property_names(self.__group_by)) <= + set(self._to_property_names(self.__projection))) + def filter(self, *args): """Return a new Query with additional filter(s) applied.""" if not args: @@ -1005,7 +1067,8 @@ class Query(object): return self.__class__(kind=self.kind, ancestor=self.ancestor, filters=pred, orders=self.orders, app=self.app, namespace=self.namespace, - default_options=self.default_options) + default_options=self.default_options, + projection=self.projection, group_by=self.group_by) def order(self, *args): """Return a new Query with additional sort order(s) applied.""" @@ -1033,7 +1096,8 @@ class Query(object): return self.__class__(kind=self.kind, ancestor=self.ancestor, filters=self.filters, orders=orders, app=self.app, namespace=self.namespace, - default_options=self.default_options) + default_options=self.default_options, + projection=self.projection, group_by=self.group_by) # Datastore API using the default context. @@ -1310,28 +1374,44 @@ class Query(object): raise TypeError('You cannot use config= and options= at the same time') q_options['config'] = q_options.pop('options') if q_options.get('projection'): - q_options['projection'] = self._fix_projection(q_options['projection']) + try: + q_options['projection'] = self._to_property_names( + q_options['projection']) + except TypeError, e: + raise datastore_errors.BadArgumentError(e) + self._check_properties(q_options['projection']) options = QueryOptions(**q_options) + + # Populate projection if it hasn't been overridden. + if (options.keys_only is None and + options.projection is None and + self.__projection): + options = QueryOptions( + projection=self._to_property_names(self.__projection), config=options) + # Populate default options if self.default_options is not None: options = self.default_options.merge(options) + return options - def _fix_projection(self, projections): - if not isinstance(projections, (list, tuple)): - projections = [projections] # It will be type-checked below. + def _to_property_names(self, properties): + if not isinstance(properties, (list, tuple)): + properties = [properties] # It will be type-checked below. fixed = [] - for proj in projections: + for proj in properties: if isinstance(proj, basestring): fixed.append(proj) elif isinstance(proj, model.Property): fixed.append(proj._name) else: - raise datastore_errors.BadArgumentError( - 'Unexpected projection (%r); should be string or Property') + raise TypeError( + 'Unexpected property (%r); should be string or Property' % (proj,)) + return fixed + + def _check_properties(self, fixed, **kwargs): modelclass = model.Model._kind_map.get(self.__kind) if modelclass is not None: - modelclass._check_projections(fixed) - return fixed + modelclass._check_properties(fixed, **kwargs) def analyze(self): """Return a list giving the parameters required by a query.""" @@ -1376,7 +1456,8 @@ class Query(object): return self.__class__(kind=self.kind, ancestor=ancestor, filters=filters, orders=self.orders, app=self.app, namespace=self.namespace, - default_options=self.default_options) + default_options=self.default_options, + projection=self.projection, group_by=self.group_by) def gql(query_string, *args, **kwds): @@ -1468,14 +1549,19 @@ def _gql(query_string, query_class=Query): keys_only = gql_qry._keys_only if not keys_only: keys_only = None + options = QueryOptions(offset=offset, limit=limit, keys_only=keys_only) projection = gql_qry.projection() - options = QueryOptions(offset=offset, limit=limit, keys_only=keys_only, - projection=projection) + if gql_qry.is_distinct(): + group_by = projection + else: + group_by = None qry = query_class(kind=kind, ancestor=ancestor, filters=filters, orders=orders, - default_options=options) + default_options=options, + projection=projection, + group_by=group_by) return qry diff --git a/python/google/appengine/ext/remote_api/remote_api_services.py b/python/google/appengine/ext/remote_api/remote_api_services.py index 7f91a28a..60de5e35 100644 --- a/python/google/appengine/ext/remote_api/remote_api_services.py +++ b/python/google/appengine/ext/remote_api/remote_api_services.py @@ -88,7 +88,6 @@ SERVICE_PB_MAP = { 'Get': (datastore_pb.GetRequest, datastore_pb.GetResponse), 'Put': (datastore_pb.PutRequest, datastore_pb.PutResponse), 'Delete': (datastore_pb.DeleteRequest, datastore_pb.DeleteResponse), - 'Count': (datastore_pb.Query, api_base_pb.Integer64Proto), 'AllocateIds':(datastore_pb.AllocateIdsRequest, datastore_pb.AllocateIdsResponse), 'RunQuery': (datastore_pb.Query, @@ -280,13 +279,13 @@ SERVICE_PB_MAP = { 'PurgeQueue': (taskqueue_service_pb.TaskQueuePurgeQueueRequest, taskqueue_service_pb.TaskQueuePurgeQueueResponse), 'DeleteQueue': (taskqueue_service_pb.TaskQueueDeleteQueueRequest, - taskqueue_service_pb.TaskQueueDeleteQueueRequest), + taskqueue_service_pb.TaskQueueDeleteQueueResponse), 'DeleteGroup': (taskqueue_service_pb.TaskQueueDeleteGroupRequest, - taskqueue_service_pb.TaskQueueDeleteGroupRequest), + taskqueue_service_pb.TaskQueueDeleteGroupResponse), 'QueryTasks': (taskqueue_service_pb.TaskQueueQueryTasksRequest, taskqueue_service_pb.TaskQueueQueryTasksResponse), - 'FetchTasks': (taskqueue_service_pb.TaskQueueFetchTaskRequest, - taskqueue_service_pb.TaskQueueFetchTaskRequest), + 'FetchTask': (taskqueue_service_pb.TaskQueueFetchTaskRequest, + taskqueue_service_pb.TaskQueueFetchTaskResponse), 'QueryAndOwnTasks': ( taskqueue_service_pb.TaskQueueQueryAndOwnTasksRequest, taskqueue_service_pb.TaskQueueQueryAndOwnTasksResponse), @@ -326,7 +325,5 @@ SERVICE_PB_MAP = { channel_service_pb.CreateChannelResponse), 'SendChannelMessage': (channel_service_pb.SendMessageRequest, api_base_pb.VoidProto), - 'GetChannelPresence': (channel_service_pb.ChannelPresenceRequest, - channel_service_pb.ChannelPresenceResponse), }, } diff --git a/python/google/appengine/tools/appcfg.py b/python/google/appengine/tools/appcfg.py index c1bfb598..f8f0eef9 100644 --- a/python/google/appengine/tools/appcfg.py +++ b/python/google/appengine/tools/appcfg.py @@ -143,6 +143,12 @@ class Error(Exception): class OAuthNotAvailable(Error): """The appengine_rpc_httplib2 module could not be imported.""" + pass + + +class CannotStartServingError(Error): + """We could not start serving the version being uploaded.""" + pass def PrintUpdate(msg): @@ -1570,6 +1576,14 @@ class AppVersionUpload(object): result += ', version: %s' % self.version return result + @staticmethod + def _ValidateBeginYaml(resp): + """Validates the given /api/appversion/create response string.""" + response_dict = yaml.safe_load(resp) + if not response_dict or 'warnings' not in response_dict: + return False + return response_dict + def Begin(self): """Begins the transaction, returning a list of files that need uploading. @@ -1594,7 +1608,16 @@ class AppVersionUpload(object): url.static_files = os.path.join(STATIC_FILE_PREFIX, url.static_files) url.upload = os.path.join(STATIC_FILE_PREFIX, url.upload) - self.Send('/api/appversion/create', payload=config_copy.ToYAML()) + response = self.Send( + '/api/appversion/create', + payload=config_copy.ToYAML()) + + result = self._ValidateBeginYaml(response) + if result: + warnings = result.get('warnings') + for warning in warnings: + StatusUpdate('WARNING: %s' % warning) + self.in_transaction = True files_to_clone = [] @@ -1784,6 +1807,9 @@ class AppVersionUpload(object): self.in_transaction = False else: + if result == '0': + raise CannotStartServingError( + 'Another operation on this version is in progress.') success, unused_contents = RetryWithBackoff( lambda: (self.IsServing(), None), PrintRetryMessage, 1, 2, 60, 20) if not success: @@ -1853,6 +1879,17 @@ class AppVersionUpload(object): self.started = True return result + @staticmethod + def _ValidateIsServingYaml(resp): + """Validates the given /isserving YAML string. + + Returns the resulting dictionary if the response is valid. + """ + response_dict = yaml.safe_load(resp) + if 'serving' not in response_dict: + return False + return response_dict + def IsServing(self): """Check if the new app version is serving. @@ -1865,8 +1902,23 @@ class AppVersionUpload(object): assert self.started, 'StartServing() must be called before IsServing().' StatusUpdate('Checking if updated app version is serving.') + + self.params['new_serving_resp'] = '1' result = self.Send('/api/appversion/isserving') - return result == '1' + del self.params['new_serving_resp'] + if result in ['0', '1']: + return result == '1' + result = AppVersionUpload._ValidateIsServingYaml(result) + if not result: + raise CannotStartServingError( + 'Internal error: Could not parse IsServing response.') + message = result.get('message') + fatal = result.get('fatal') + if message: + StatusUpdate(message) + if fatal: + raise CannotStartServingError(fatal) + return result['serving'] def Rollback(self): """Rolls back the transaction if one is in progress.""" @@ -1988,6 +2040,11 @@ class AppVersionUpload(object): logging.info('HTTP Error (%s)', err) self.Rollback() raise + except CannotStartServingError, err: + + logging.error(err.message) + self.Rollback() + raise except: logging.exception('An unexpected error occurred. Aborting.') self.Rollback() @@ -2350,6 +2407,9 @@ class AppCfgApp(object): except yaml_errors.EventListenerError, e: print >>self.error_fh, ('Error parsing yaml file:\n%s' % e) return 1 + except CannotStartServingError: + print >>self.error_fh, 'Could not start serving the given version.' + return 1 return 0 def _GetActionDescriptions(self): diff --git a/python/google/appengine/tools/dev-channel-js.js b/python/google/appengine/tools/dev-channel-js.js index 640bcb14..527f56ac 100644 --- a/python/google/appengine/tools/dev-channel-js.js +++ b/python/google/appengine/tools/dev-channel-js.js @@ -1,5 +1,12 @@ (function() { var goog = goog || {}; goog.global = this; +goog.exportPath_ = function(name, opt_object, opt_objectToExportTo) { + var parts = name.split("."), cur = opt_objectToExportTo || goog.global; + !(parts[0] in cur) && cur.execScript && cur.execScript("var " + parts[0]); + for(var part;parts.length && (part = parts.shift());) { + !parts.length && void 0 !== opt_object ? cur[part] = opt_object : cur = cur[part] ? cur[part] : cur[part] = {} + } +}; goog.define = function(name, defaultValue) { var value = defaultValue; goog.exportPath_(name, value) @@ -15,13 +22,6 @@ goog.setTestOnly = function(opt_message) { throw opt_message = opt_message || "", Error("Importing test-only code into non-debug environment" + opt_message ? ": " + opt_message : "."); } }; -goog.exportPath_ = function(name, opt_object, opt_objectToExportTo) { - var parts = name.split("."), cur = opt_objectToExportTo || goog.global; - !(parts[0] in cur) && cur.execScript && cur.execScript("var " + parts[0]); - for(var part;parts.length && (part = parts.shift());) { - !parts.length && goog.isDef(opt_object) ? cur[part] = opt_object : cur = cur[part] ? cur[part] : cur[part] = {} - } -}; goog.getObjectByName = function(name, opt_obj) { for(var parts = name.split("."), cur = opt_obj || goog.global, part;part = parts.shift();) { if(goog.isDefAndNotNull(cur[part])) { @@ -38,7 +38,15 @@ goog.globalize = function(obj, opt_global) { global[x] = obj[x] } }; -goog.addDependency = function() { +goog.addDependency = function(relPath, provides, requires) { + if(goog.DEPENDENCIES_ENABLED) { + for(var provide, require, path = relPath.replace(/\\/g, "/"), deps = goog.dependencies_, i = 0;provide = provides[i];i++) { + deps.nameToPath[provide] = path, path in deps.pathToNames || (deps.pathToNames[path] = {}), deps.pathToNames[path][provide] = !0 + } + for(var j = 0;require = requires[j];j++) { + path in deps.requires || (deps.requires[path] = {}), deps.requires[path][require] = !0 + } + } }; goog.useStrictRequires = !1; goog.ENABLE_DEBUG_LOADER = !0; @@ -63,6 +71,72 @@ goog.addSingletonGetter = function(ctor) { } }; goog.instantiatedSingletons_ = []; +goog.DEPENDENCIES_ENABLED = !1; +goog.DEPENDENCIES_ENABLED && (goog.included_ = {}, goog.dependencies_ = {pathToNames:{}, nameToPath:{}, requires:{}, visited:{}, written:{}}, goog.inHtmlDocument_ = function() { + var doc = goog.global.document; + return"undefined" != typeof doc && "write" in doc +}, goog.findBasePath_ = function() { + if(goog.global.CLOSURE_BASE_PATH) { + goog.basePath = goog.global.CLOSURE_BASE_PATH + }else { + if(goog.inHtmlDocument_()) { + for(var doc = goog.global.document, scripts = doc.getElementsByTagName("script"), i = scripts.length - 1;0 <= i;--i) { + var src = scripts[i].src, qmark = src.lastIndexOf("?"), l = -1 == qmark ? src.length : qmark; + if("base.js" == src.substr(l - 7, 7)) { + goog.basePath = src.substr(0, l - 7); + break + } + } + } + } +}, goog.importScript_ = function(src) { + var importScript = goog.global.CLOSURE_IMPORT_SCRIPT || goog.writeScriptTag_; + !goog.dependencies_.written[src] && importScript(src) && (goog.dependencies_.written[src] = !0) +}, goog.writeScriptTag_ = function(src) { + if(goog.inHtmlDocument_()) { + var doc = goog.global.document; + if("complete" == doc.readyState) { + var isDeps = /\bdeps.js$/.test(src); + if(isDeps) { + return!1 + } + throw Error('Cannot write "' + src + '" after document load'); + } + doc.write('