Load /Users/solydzajs/Desktop/google_appengine into
[Melange.git] / thirdparty / google_appengine / google / appengine / ext / remote_api / remote_api_stub.py
blob0b84a118056e3214a32f436231e8a21717696f2f
1 #!/usr/bin/env python
3 # Copyright 2007 Google Inc.
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
9 # http://www.apache.org/licenses/LICENSE-2.0
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
18 """An apiproxy stub that calls a remote handler via HTTP.
20 This allows easy remote access to the App Engine datastore, and potentially any
21 of the other App Engine APIs, using the same interface you use when accessing
22 the service locally.
24 An example Python script:
25 ---
26 from google.appengine.ext import db
27 from google.appengine.ext.remote_api import remote_api_stub
28 from myapp import models
29 import getpass
31 def auth_func():
32 return (raw_input('Username:'), getpass.getpass('Password:'))
34 remote_api_stub.ConfigureRemoteDatastore('my-app', '/remote_api', auth_func)
36 # Now you can access the remote datastore just as if your code was running on
37 # App Engine!
39 houses = models.House.all().fetch(100)
40 for a_house in q:
41 a_house.doors += 1
42 db.put(houses)
43 ---
45 A few caveats:
46 - Where possible, avoid iterating over queries directly. Fetching as many
47 results as you will need is faster and more efficient.
48 - If you need to iterate, consider instead fetching items in batches with a sort
49 order and constructing a new query starting from where the previous one left
50 off. The __key__ pseudo-property can be used as a sort key for this purpose,
51 and does not even require a custom index if you are iterating over all
52 entities of a given type.
53 - Likewise, it's a good idea to put entities in batches. Instead of calling put
54 for each individual entity, accumulate them and put them in batches using
55 db.put(), if you can.
56 - Requests and responses are still limited to 1MB each, so if you have large
57 entities or try and fetch or put many of them at once, your requests may fail.
58 """
64 import google
65 import os
66 import pickle
67 import random
68 import sha
69 import sys
70 import thread
71 import threading
72 import yaml
74 from google.appengine.api import apiproxy_stub_map
75 from google.appengine.datastore import datastore_pb
76 from google.appengine.ext.remote_api import remote_api_pb
77 from google.appengine.runtime import apiproxy_errors
78 from google.appengine.tools import appengine_rpc
81 class Error(Exception):
82 """Base class for exceptions in this module."""
85 class ConfigurationError(Error):
86 """Exception for configuration errors."""
89 def GetUserAgent():
90 """Determines the value of the 'User-agent' header to use for HTTP requests.
92 Returns:
93 String containing the 'user-agent' header value, which includes the SDK
94 version, the platform information, and the version of Python;
95 e.g., "remote_api/1.0.1 Darwin/9.2.0 Python/2.5.2".
96 """
97 product_tokens = []
99 product_tokens.append("Google-remote_api/1.0")
101 product_tokens.append(appengine_rpc.GetPlatformToken())
103 python_version = ".".join(str(i) for i in sys.version_info)
104 product_tokens.append("Python/%s" % python_version)
106 return " ".join(product_tokens)
109 def GetSourceName():
110 return "Google-remote_api-1.0"
113 class TransactionData(object):
114 """Encapsulates data about an individual transaction."""
116 def __init__(self, thread_id):
117 self.thread_id = thread_id
118 self.preconditions = {}
119 self.entities = {}
122 class RemoteStub(object):
123 """A stub for calling services on a remote server over HTTP.
125 You can use this to stub out any service that the remote server supports.
128 def __init__(self, server, path):
129 """Constructs a new RemoteStub that communicates with the specified server.
131 Args:
132 server: An instance of a subclass of
133 google.appengine.tools.appengine_rpc.AbstractRpcServer.
134 path: The path to the handler this stub should send requests to.
136 self._server = server
137 self._path = path
139 def MakeSyncCall(self, service, call, request, response):
140 request_pb = remote_api_pb.Request()
141 request_pb.set_service_name(service)
142 request_pb.set_method(call)
143 request_pb.mutable_request().set_contents(request.Encode())
145 response_pb = remote_api_pb.Response()
146 response_pb.ParseFromString(self._server.Send(self._path,
147 request_pb.Encode()))
149 if response_pb.has_exception():
150 raise pickle.loads(response_pb.exception().contents())
151 else:
152 response.ParseFromString(response_pb.response().contents())
155 class RemoteDatastoreStub(RemoteStub):
156 """A specialised stub for accessing the App Engine datastore remotely.
158 A specialised stub is required because there are some datastore operations
159 that preserve state between calls. This stub makes queries possible.
160 Transactions on the remote datastore are unfortunately still impossible.
163 def __init__(self, server, path):
164 super(RemoteDatastoreStub, self).__init__(server, path)
165 self.__queries = {}
166 self.__transactions = {}
168 self.__next_local_cursor = 1
169 self.__local_cursor_lock = threading.Lock()
170 self.__next_local_tx = 1
171 self.__local_tx_lock = threading.Lock()
173 def MakeSyncCall(self, service, call, request, response):
174 assert service == 'datastore_v3'
176 explanation = []
177 assert request.IsInitialized(explanation), explanation
179 handler = getattr(self, '_Dynamic_' + call, None)
180 if handler:
181 handler(request, response)
182 else:
183 super(RemoteDatastoreStub, self).MakeSyncCall(service, call, request,
184 response)
186 assert response.IsInitialized(explanation), explanation
188 def _Dynamic_RunQuery(self, query, query_result):
189 self.__local_cursor_lock.acquire()
190 try:
191 cursor_id = self.__next_local_cursor
192 self.__next_local_cursor += 1
193 finally:
194 self.__local_cursor_lock.release()
195 self.__queries[cursor_id] = query
197 query_result.mutable_cursor().set_cursor(cursor_id)
198 query_result.set_more_results(True)
200 def _Dynamic_Next(self, next_request, query_result):
201 cursor = next_request.cursor().cursor()
202 if cursor not in self.__queries:
203 raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
204 'Cursor %d not found' % cursor)
205 query = self.__queries[cursor]
207 if query is None:
208 query_result.set_more_results(False)
209 return
211 request = datastore_pb.Query()
212 request.CopyFrom(query)
213 if request.has_limit():
214 request.set_limit(min(request.limit(), next_request.count()))
215 else:
216 request.set_limit(next_request.count())
218 super(RemoteDatastoreStub, self).MakeSyncCall(
219 'remote_datastore', 'RunQuery', request, query_result)
221 query.set_offset(query.offset() + query_result.result_size())
222 if query.has_limit():
223 query.set_limit(query.limit() - query_result.result_size())
224 if not query_result.more_results():
225 self.__queries[cursor] = None
227 def _Dynamic_Get(self, get_request, get_response):
228 txid = None
229 if get_request.has_transaction():
230 txid = get_request.transaction().handle()
231 txdata = self.__transactions[txid]
232 assert (txdata.thread_id == thread.get_ident(),
233 "Transactions are single-threaded.")
235 keys = [(k, k.Encode()) for k in get_request.key_list()]
237 new_request = datastore_pb.GetRequest()
238 for key, enckey in keys:
239 if enckey not in txdata.entities:
240 new_request.add_key().CopyFrom(key)
241 else:
242 new_request = get_request
244 if new_request.key_size() > 0:
245 super(RemoteDatastoreStub, self).MakeSyncCall(
246 'datastore_v3', 'Get', new_request, get_response)
248 if txid is not None:
249 newkeys = new_request.key_list()
250 entities = get_response.entity_list()
251 for key, entity in zip(newkeys, entities):
252 entity_hash = None
253 if entity.has_entity():
254 entity_hash = sha.new(entity.entity().Encode()).digest()
255 txdata.preconditions[key.Encode()] = (key, entity_hash)
257 new_response = datastore_pb.GetResponse()
258 it = iter(get_response.entity_list())
259 for key, enckey in keys:
260 if enckey in txdata.entities:
261 cached_entity = txdata.entities[enckey][1]
262 if cached_entity:
263 new_response.add_entity().mutable_entity().CopyFrom(cached_entity)
264 else:
265 new_response.add_entity()
266 else:
267 new_entity = it.next()
268 if new_entity.has_entity():
269 assert new_entity.entity().key() == key
270 new_response.add_entity().CopyFrom(new_entity)
271 else:
272 new_response.add_entity()
273 get_response.CopyFrom(new_response)
275 def _Dynamic_Put(self, put_request, put_response):
276 if put_request.has_transaction():
277 entities = put_request.entity_list()
279 requires_id = lambda x: x.id() == 0 and not x.has_name()
280 new_ents = [e for e in entities
281 if requires_id(e.key().path().element_list()[-1])]
282 id_request = remote_api_pb.PutRequest()
283 if new_ents:
284 for ent in new_ents:
285 e = id_request.add_entity()
286 e.mutable_key().CopyFrom(ent.key())
287 e.mutable_entity_group()
288 id_response = datastore_pb.PutResponse()
289 super(RemoteDatastoreStub, self).MakeSyncCall(
290 'remote_datastore', 'GetIDs', id_request, id_response)
291 assert id_request.entity_size() == id_response.key_size()
292 for key, ent in zip(id_response.key_list(), new_ents):
293 ent.mutable_key().CopyFrom(key)
294 ent.mutable_entity_group().add_element().CopyFrom(
295 key.path().element(0))
297 txid = put_request.transaction().handle()
298 txdata = self.__transactions[txid]
299 assert (txdata.thread_id == thread.get_ident(),
300 "Transactions are single-threaded.")
301 for entity in entities:
302 txdata.entities[entity.key().Encode()] = (entity.key(), entity)
303 put_response.add_key().CopyFrom(entity.key())
304 else:
305 super(RemoteDatastoreStub, self).MakeSyncCall(
306 'datastore_v3', 'Put', put_request, put_response)
308 def _Dynamic_Delete(self, delete_request, response):
309 if delete_request.has_transaction():
310 txid = delete_request.transaction().handle()
311 txdata = self.__transactions[txid]
312 assert (txdata.thread_id == thread.get_ident(),
313 "Transactions are single-threaded.")
314 for key in delete_request.key_list():
315 txdata.entities[key.Encode()] = (key, None)
316 else:
317 super(RemoteDatastoreStub, self).MakeSyncCall(
318 'datastore_v3', 'Delete', delete_request, response)
320 def _Dynamic_BeginTransaction(self, request, transaction):
321 self.__local_tx_lock.acquire()
322 try:
323 txid = self.__next_local_tx
324 self.__transactions[txid] = TransactionData(thread.get_ident())
325 self.__next_local_tx += 1
326 finally:
327 self.__local_tx_lock.release()
328 transaction.set_handle(txid)
330 def _Dynamic_Commit(self, transaction, transaction_response):
331 txid = transaction.handle()
332 if txid not in self.__transactions:
333 raise apiproxy_errors.ApplicationError(
334 datastore_pb.Error.BAD_REQUEST,
335 'Transaction %d not found.' % (txid,))
337 txdata = self.__transactions[txid]
338 assert (txdata.thread_id == thread.get_ident(),
339 "Transactions are single-threaded.")
340 del self.__transactions[txid]
342 tx = remote_api_pb.TransactionRequest()
343 for key, hash in txdata.preconditions.values():
344 precond = tx.add_precondition()
345 precond.mutable_key().CopyFrom(key)
346 if hash:
347 precond.set_hash(hash)
349 puts = tx.mutable_puts()
350 deletes = tx.mutable_deletes()
351 for key, entity in txdata.entities.values():
352 if entity:
353 puts.add_entity().CopyFrom(entity)
354 else:
355 deletes.add_key().CopyFrom(key)
357 super(RemoteDatastoreStub, self).MakeSyncCall(
358 'remote_datastore', 'Transaction',
359 tx, datastore_pb.PutResponse())
361 def _Dynamic_Rollback(self, transaction, transaction_response):
362 txid = transaction.handle()
363 self.__local_tx_lock.acquire()
364 try:
365 if txid not in self.__transactions:
366 raise apiproxy_errors.ApplicationError(
367 datastore_pb.Error.BAD_REQUEST,
368 'Transaction %d not found.' % (txid,))
370 assert (txdata[txid].thread_id == thread.get_ident(),
371 "Transactions are single-threaded.")
372 del self.__transactions[txid]
373 finally:
374 self.__local_tx_lock.release()
376 def _Dynamic_CreateIndex(self, index, id_response):
377 raise apiproxy_errors.CapabilityDisabledError(
378 'The remote datastore does not support index manipulation.')
380 def _Dynamic_UpdateIndex(self, index, void):
381 raise apiproxy_errors.CapabilityDisabledError(
382 'The remote datastore does not support index manipulation.')
384 def _Dynamic_DeleteIndex(self, index, void):
385 raise apiproxy_errors.CapabilityDisabledError(
386 'The remote datastore does not support index manipulation.')
389 def ConfigureRemoteDatastore(app_id,
390 path,
391 auth_func,
392 servername=None,
393 rpc_server_factory=appengine_rpc.HttpRpcServer,
394 rtok=None,
395 secure=False):
396 """Does necessary setup to allow easy remote access to an AppEngine datastore.
398 Either servername must be provided or app_id must not be None. If app_id
399 is None and a servername is provided, this function will send a request
400 to the server to retrieve the app_id.
402 Args:
403 app_id: The app_id of your app, as declared in app.yaml.
404 path: The path to the remote_api handler for your app
405 (for example, '/remote_api').
406 auth_func: A function that takes no arguments and returns a
407 (username, password) tuple. This will be called if your application
408 requires authentication to access the remote_api handler (it should!)
409 and you do not already have a valid auth cookie.
410 servername: The hostname your app is deployed on. Defaults to
411 <app_id>.appspot.com.
412 rpc_server_factory: A factory to construct the rpc server for the datastore.
413 rtok: The validation token to sent with app_id lookups. If None, a random
414 token is used.
415 secure: Use SSL when communicating with the server.
417 Raises:
418 urllib2.HTTPError: if app_id is not provided and there is an error while
419 retrieving it.
420 ConfigurationError: if there is a error configuring the DatstoreFileStub.
422 if not servername and not app_id:
423 raise ConfigurationError('app_id or servername required')
424 if not servername:
425 servername = '%s.appspot.com' % (app_id,)
426 server = rpc_server_factory(servername, auth_func, GetUserAgent(),
427 GetSourceName(), debug_data=False, secure=secure)
428 if not app_id:
429 if not rtok:
430 random.seed()
431 rtok = str(random.randint)
432 urlargs = {'rtok': rtok}
433 response = server.Send(path, payload=None, **urlargs)
434 if not response.startswith('{'):
435 raise ConfigurationError(
436 'Invalid response recieved from server: %s' % response)
437 app_info = yaml.load(response)
438 if not app_info or 'rtok' not in app_info or 'app_id' not in app_info:
439 raise ConfigurationError('Error parsing app_id lookup response')
440 if app_info['rtok'] != rtok:
441 raise ConfigurationError('Token validation failed during app_id lookup.')
442 app_id = app_info['app_id']
444 os.environ['APPLICATION_ID'] = app_id
445 apiproxy_stub_map.apiproxy = apiproxy_stub_map.APIProxyStubMap()
446 stub = RemoteDatastoreStub(server, path)
447 apiproxy_stub_map.apiproxy.RegisterStub('datastore_v3', stub)