Merge lp:~nchohan/appscale/GAE1.4.0-namespaces into lp:appscale

Proposed by Navraj Chohan
Status: Merged
Merged at revision: 628
Proposed branch: lp:~nchohan/appscale/GAE1.4.0-namespaces
Merge into: lp:appscale
Diff against target: 33458 lines (+20094/-9568)
98 files modified
AppController/helperfunctions.rb (+0/-2)
AppDB/appscale_server.py (+41/-62)
AppDB/cassandra/py_cassandra.py (+4/-4)
AppDB/dbconstants.py (+1/-1)
AppDB/helper_functions.py (+1/-1)
AppDB/hypertable/py_hypertable.py (+2/-2)
AppDB/soap_server.py (+13/-28)
AppServer/RELEASE_NOTES (+65/-0)
AppServer/VERSION (+2/-2)
AppServer/google/appengine/api/apiproxy_stub.py (+1/-2)
AppServer/google/appengine/api/apiproxy_stub_map.py (+5/-4)
AppServer/google/appengine/api/appinfo.py (+1/-2)
AppServer/google/appengine/api/blobstore/blobstore_stub.py (+18/-25)
AppServer/google/appengine/api/channel/channel.py (+56/-22)
AppServer/google/appengine/api/channel/channel_service_pb.py (+0/-2)
AppServer/google/appengine/api/datastore.py (+541/-888)
AppServer/google/appengine/api/datastore_distributed.py (+2/-10)
AppServer/google/appengine/api/datastore_file_stub.py (+138/-363)
AppServer/google/appengine/api/datastore_types.py (+7/-7)
AppServer/google/appengine/api/images/__init__.py (+5/-19)
AppServer/google/appengine/api/images/images_stub.py (+5/-1)
AppServer/google/appengine/api/labs/taskqueue/__init__.py (+55/-3)
AppServer/google/appengine/api/labs/taskqueue/taskqueue.py (+0/-953)
AppServer/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py (+0/-5229)
AppServer/google/appengine/api/labs/taskqueue/taskqueue_stub.py (+0/-986)
AppServer/google/appengine/api/mail_stub.py (+7/-2)
AppServer/google/appengine/api/matcher/matcher_stub.py (+1/-5)
AppServer/google/appengine/api/memcache/memcache_stub.py (+5/-1)
AppServer/google/appengine/api/memcache_distributed.py (+0/-295)
AppServer/google/appengine/api/queueinfo.py (+57/-5)
AppServer/google/appengine/api/taskqueue/__init__.py (+34/-0)
AppServer/google/appengine/api/taskqueue/taskqueue.py (+1116/-0)
AppServer/google/appengine/api/taskqueue/taskqueue_distributed.py (+321/-0)
AppServer/google/appengine/api/taskqueue/taskqueue_service_pb.py (+5397/-0)
AppServer/google/appengine/api/taskqueue/taskqueue_stub.py (+1009/-0)
AppServer/google/appengine/api/user_service_stub.py (+1/-1)
AppServer/google/appengine/api/users.py (+2/-8)
AppServer/google/appengine/datastore/datastore_pb.py (+48/-12)
AppServer/google/appengine/datastore/datastore_query.py (+1184/-0)
AppServer/google/appengine/datastore/datastore_rpc.py (+1638/-0)
AppServer/google/appengine/datastore/datastore_sqlite_stub.py (+207/-196)
AppServer/google/appengine/datastore/datastore_stub_util.py (+575/-56)
AppServer/google/appengine/ext/admin/__init__.py (+2/-1)
AppServer/google/appengine/ext/admin/templates/base.html (+8/-3)
AppServer/google/appengine/ext/admin/templates/queues.html (+3/-3)
AppServer/google/appengine/ext/appstats/sample_appengine_config.py (+2/-1)
AppServer/google/appengine/ext/appstats/static/appstats_js.js (+77/-77)
AppServer/google/appengine/ext/appstats/templates/main.html (+2/-2)
AppServer/google/appengine/ext/appstats/ui.py (+2/-2)
AppServer/google/appengine/ext/builtins/appstats/include.yaml (+0/-1)
AppServer/google/appengine/ext/bulkload/bulkloader_config.py (+5/-2)
AppServer/google/appengine/ext/datastore_admin/static/css/compiled.css (+1/-1)
AppServer/google/appengine/ext/datastore_admin/static/js/compiled.js (+1/-1)
AppServer/google/appengine/ext/datastore_admin/testutil.py (+1/-1)
AppServer/google/appengine/ext/db/__init__.py (+70/-85)
AppServer/google/appengine/ext/db/metadata.py (+190/-0)
AppServer/google/appengine/ext/deferred/deferred.py (+1/-1)
AppServer/google/appengine/ext/key_range/__init__.py (+2/-0)
AppServer/google/appengine/ext/mapreduce/handlers.py (+1/-1)
AppServer/google/appengine/ext/mapreduce/input_readers.py (+6/-2)
AppServer/google/appengine/ext/remote_api/remote_api_services.py (+1/-7)
AppServer/google/appengine/ext/remote_api/remote_api_stub.py (+1/-0)
AppServer/google/appengine/ext/search/__init__.py (+16/-15)
AppServer/google/appengine/ext/webapp/__init__.py (+4/-0)
AppServer/google/appengine/ext/webapp/template.py (+81/-19)
AppServer/google/appengine/ext/zipserve/__init__.py (+1/-1)
AppServer/google/appengine/runtime/apiproxy.py (+19/-7)
AppServer/google/appengine/runtime/apiproxy_errors.py (+3/-0)
AppServer/google/appengine/tools/appcfg.py (+9/-9)
AppServer/google/appengine/tools/dev-channel-js.js (+48/-39)
AppServer/google/appengine/tools/dev_appserver.py (+53/-19)
AppServer/google/appengine/tools/dev_appserver_blobimage.py (+5/-10)
AppServer/google/appengine/tools/dev_appserver_login.py (+5/-9)
AppServer/google/appengine/tools/dev_appserver_main.py (+54/-21)
AppServer/google/net/proto/ProtocolBuffer.py (+0/-1)
AppServer/google/net/proto/RawMessage.py (+1/-1)
AppServer/google/net/proto2/__init__.py (+16/-0)
AppServer/google/net/proto2/proto/__init__.py (+16/-0)
AppServer/google/net/proto2/proto/descriptor_pb2.py (+1581/-0)
AppServer/google/net/proto2/python/__init__.py (+16/-0)
AppServer/google/net/proto2/python/internal/__init__.py (+16/-0)
AppServer/google/net/proto2/python/internal/api_implementation.py (+34/-0)
AppServer/google/net/proto2/python/internal/containers.py (+239/-0)
AppServer/google/net/proto2/python/internal/decoder.py (+632/-0)
AppServer/google/net/proto2/python/internal/encoder.py (+722/-0)
AppServer/google/net/proto2/python/internal/message_listener.py (+64/-0)
AppServer/google/net/proto2/python/internal/python_message.py (+969/-0)
AppServer/google/net/proto2/python/internal/type_checkers.py (+253/-0)
AppServer/google/net/proto2/python/internal/wire_format.py (+234/-0)
AppServer/google/net/proto2/python/public/__init__.py (+16/-0)
AppServer/google/net/proto2/python/public/descriptor.py (+549/-0)
AppServer/google/net/proto2/python/public/message.py (+225/-0)
AppServer/google/net/proto2/python/public/reflection.py (+124/-0)
AppServer/google/net/proto2/python/public/service.py (+210/-0)
AppServer/google/net/proto2/python/public/service_reflection.py (+263/-0)
AppServer/google/net/proto2/python/public/text_format.py (+657/-0)
AppServer/lib/fancy_urllib/fancy_urllib/__init__.py (+18/-7)
AppServer/lib/webob/LICENSE (+0/-20)
To merge this branch: bzr merge lp:~nchohan/appscale/GAE1.4.0-namespaces
Reviewer Review Type Date Requested Status
Chris Bunch Approve
Review via email: mp+44652@code.launchpad.net

Description of the change

GAE 1.4.0
Namespace for DB
Fixed Cassandra issue with improper range queries (showed up for namespaces)
Updated pbserver for putting appid for begin trans responses
Removed appscale versioning from AppServer completely
Fixed path issue for MR streaming to point to hadoop-0.20.2
Moved taskqueue code to new location, no longer in labs (there is an auto function for taskqueue, we should discuss this)
Ran pychecker on a few files, and fixed small code issues

To post a comment you must log in.
628. By Navraj Chohan

GAE 1.4.0, Namespace for DB, Fixed Cassandra issue with improper range queries (showed up for namespaces), Updated pbserver for putting appid for begin trans responses, Removed appscale versioning from AppServer completely, Fixed path issue for MR streaming to point to hadoop-0.20.2, Moved taskqueue code to new location, no longer in labs (there is an auto function for taskqueue, we should discuss this), Ran pychecker on a few files, and fixed small code issues

Revision history for this message
Chris Bunch (cgb-cs) wrote :

Approved - also added new sample apps to tools

review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== modified file 'AppController/helperfunctions.rb'
--- AppController/helperfunctions.rb 2010-12-11 18:01:33 +0000
+++ AppController/helperfunctions.rb 2010-12-24 09:11:16 +0000
@@ -225,10 +225,8 @@
225# "--address=#{public_ip}",225# "--address=#{public_ip}",
226# "--port=#{port}",226# "--port=#{port}",
227# "--datastore_path=#{db_location}",227# "--datastore_path=#{db_location}",
228 "--appscale_version=#{app_version}",
229 "/var/apps/#{app_name}/app",228 "/var/apps/#{app_name}/app",
230 "-a #{public_ip}",229 "-a #{public_ip}",
231 "--appscale_version #{app_version}",
232 ">> /var/apps/#{app_name}/log/server.log 2>&1 &"]230 ">> /var/apps/#{app_name}/log/server.log 2>&1 &"]
233 start_app = cmd.join(" ")231 start_app = cmd.join(" ")
234 Djinn.log_debug(start_app)232 Djinn.log_debug(start_app)
235233
=== modified file 'AppDB/appscale_server.py'
--- AppDB/appscale_server.py 2010-12-10 23:34:17 +0000
+++ AppDB/appscale_server.py 2010-12-24 09:11:16 +0000
@@ -127,8 +127,8 @@
127127
128128
129129
130def getTableName(app_id, kind, version):130def getTableName(app_id, kind, namespace):
131 return app_id + "___" + kind + "___" + version131 return app_id + "___" + kind + "___" + namespace
132132
133def getRowKey(app_id, ancestor_list):133def getRowKey(app_id, ancestor_list):
134 if ancestor_list == None:134 if ancestor_list == None:
@@ -217,8 +217,8 @@
217 return key217 return key
218218
219219
220def getJournalTable(app_id, appscale_version):220def getJournalTable(app_id, namespace):
221 return JOURNAL_TABLE + "___" + app_id + "___" + str(appscale_version)221 return JOURNAL_TABLE + "___" + app_id + "___" + namespace
222222
223# isChild is None if False223# isChild is None if False
224# if isChild is None, root is ignored 224# if isChild is None, root is ignored
@@ -296,7 +296,7 @@
296 296
297 # remote api request297 # remote api request
298 # sends back a response 298 # sends back a response
299 def remote_request(self, app_id, appscale_version, http_request_data):299 def remote_request(self, app_id, http_request_data):
300 apirequest = remote_api_pb.Request(http_request_data)300 apirequest = remote_api_pb.Request(http_request_data)
301 apiresponse = remote_api_pb.Response()301 apiresponse = remote_api_pb.Response()
302 response = None302 response = None
@@ -318,35 +318,27 @@
318 http_request_data = request_data.contents()318 http_request_data = request_data.contents()
319 if method == "Put":319 if method == "Put":
320 response, errcode, errdetail = self.put_request(app_id, 320 response, errcode, errdetail = self.put_request(app_id,
321 appscale_version,
322 http_request_data)321 http_request_data)
323 elif method == "Get":322 elif method == "Get":
324 response, errcode, errdetail = self.get_request(app_id, 323 response, errcode, errdetail = self.get_request(app_id,
325 appscale_version,
326 http_request_data)324 http_request_data)
327 elif method == "Delete": 325 elif method == "Delete":
328 response, errcode, errdetail = self.delete_request(app_id, 326 response, errcode, errdetail = self.delete_request(app_id,
329 appscale_version,
330 http_request_data)327 http_request_data)
331 elif method == "RunQuery":328 elif method == "RunQuery":
332 response, errcode, errdetail = self.run_query(app_id, 329 response, errcode, errdetail = self.run_query(app_id,
333 appscale_version,
334 http_request_data)330 http_request_data)
335 elif method == "BeginTransaction":331 elif method == "BeginTransaction":
336 response, errcode, errdetail = self.begin_transaction_request(app_id, 332 response, errcode, errdetail = self.begin_transaction_request(app_id,
337 appscale_version,
338 http_request_data)333 http_request_data)
339 elif method == "Commit":334 elif method == "Commit":
340 response, errcode, errdetail = self.commit_transaction_request(app_id, 335 response, errcode, errdetail = self.commit_transaction_request(app_id,
341 appscale_version,
342 http_request_data)336 http_request_data)
343 elif method == "Rollback":337 elif method == "Rollback":
344 response, errcode, errdetail = self.rollback_transaction_request(app_id, 338 response, errcode, errdetail = self.rollback_transaction_request(app_id,
345 appscale_version,
346 http_request_data)339 http_request_data)
347 elif method == "AllocateIds":340 elif method == "AllocateIds":
348 response, errcode, errdetail = self.allocate_ids_request(app_id, 341 response, errcode, errdetail = self.allocate_ids_request(app_id,
349 appscale_version,
350 http_request_data)342 http_request_data)
351 elif method == "CreateIndex":343 elif method == "CreateIndex":
352 errcode = datastore_pb.Error.PERMISSION_DENIED344 errcode = datastore_pb.Error.PERMISSION_DENIED
@@ -405,9 +397,10 @@
405 print "errdetail:",errdetail397 print "errdetail:",errdetail
406 self.write( apiresponse.Encode() ) 398 self.write( apiresponse.Encode() )
407399
408 def run_query(self, app_id, appscale_version, http_request_data):400 def run_query(self, app_id, http_request_data):
409 global app_datastore401 global app_datastore
410 query = datastore_pb.Query(http_request_data)402 query = datastore_pb.Query(http_request_data)
403 namespace = query.name_space()
411 #logger.debug("QUERY:%s" % query)404 #logger.debug("QUERY:%s" % query)
412 results = []405 results = []
413 if query.has_transaction():406 if query.has_transaction():
@@ -441,7 +434,7 @@
441 else:434 else:
442 kind = query.kind()435 kind = query.kind()
443 # Fetch query from the datastore # 436 # Fetch query from the datastore #
444 table_name = getTableName(app_id, kind, appscale_version)437 table_name = getTableName(app_id, kind, namespace)
445 r = app_datastore.get_table( table_name, ENTITY_TABLE_SCHEMA)438 r = app_datastore.get_table( table_name, ENTITY_TABLE_SCHEMA)
446 err = r[0]439 err = r[0]
447 if err not in ERROR_CODES: 440 if err not in ERROR_CODES:
@@ -494,7 +487,7 @@
494 elif prev_version != long(ii):487 elif prev_version != long(ii):
495 # if the versions don't match, a valid version must be fetched488 # if the versions don't match, a valid version must be fetched
496 journal_key = getJournalKey(row_key, prev_version)489 journal_key = getJournalKey(row_key, prev_version)
497 journal_table = getJournalTable(app_id, appscale_version)490 journal_table = getJournalTable(app_id, namespace)
498 journal_result = app_datastore.get_entity( journal_table, 491 journal_result = app_datastore.get_entity( journal_table,
499 journal_key, 492 journal_key,
500 JOURNAL_SCHEMA )493 JOURNAL_SCHEMA )
@@ -616,14 +609,15 @@
616 return (clone_qr_pb.Encode(), 0, "")609 return (clone_qr_pb.Encode(), 0, "")
617610
618611
619 def begin_transaction_request(self, app_id, appscale_version, http_request_data):612 def begin_transaction_request(self, app_id, http_request_data):
620 transaction_pb = datastore_pb.Transaction()613 transaction_pb = datastore_pb.Transaction()
621 # handle = zk.getTransactionID(app_id)614 # handle = zk.getTransactionID(app_id)
622 handle = zoo_keeper.getTransactionID(app_id)615 handle = zoo_keeper.getTransactionID(app_id)
623 transaction_pb.set_handle(handle)616 transaction_pb.set_handle(handle)
617 transaction_pb.set_app(app_id)
624 return (transaction_pb.Encode(), 0, "")618 return (transaction_pb.Encode(), 0, "")
625619
626 def commit_transaction_request(self, app_id, appscale_version, http_request_data):620 def commit_transaction_request(self, app_id, http_request_data):
627 txn = datastore_pb.Transaction(http_request_data)621 txn = datastore_pb.Transaction(http_request_data)
628 commitres_pb = datastore_pb.CommitResponse()622 commitres_pb = datastore_pb.CommitResponse()
629623
@@ -635,13 +629,13 @@
635 datastore_pb.Error.INTERNAL_ERROR,629 datastore_pb.Error.INTERNAL_ERROR,
636 "Unable to release lock")630 "Unable to release lock")
637631
638 def rollback_transaction_request(self, app_id, appscale_version, http_request_data):632 def rollback_transaction_request(self, app_id, http_request_data):
639 txn = datastore_pb.Transaction(http_request_data)633 txn = datastore_pb.Transaction(http_request_data)
640 zoo_keeper.notifyFailedTransaction(app_id, txn.handle())634 zoo_keeper.notifyFailedTransaction(app_id, txn.handle())
641 return (api_base_pb.VoidProto().Encode(), 0, "")635 return (api_base_pb.VoidProto().Encode(), 0, "")
642636
643637
644 def allocate_ids_request(self, app_id, appscale_version, http_request_data):638 def allocate_ids_request(self, app_id, http_request_data):
645 return (api_base_pb.VoidProto().Encode(),639 return (api_base_pb.VoidProto().Encode(),
646 datastore_pb.Error.PERMISSION_DENIED,640 datastore_pb.Error.PERMISSION_DENIED,
647 'Allocation of block ids not implemented.')641 'Allocation of block ids not implemented.')
@@ -714,7 +708,6 @@
714 rollback_req = datastore_pb.Transaction()708 rollback_req = datastore_pb.Transaction()
715 rollback_req.set_handle(internal_txn)709 rollback_req.set_handle(internal_txn)
716 self.rollback_transaction_request(app_id, 710 self.rollback_transaction_request(app_id,
717 "version",
718 rollback_req.Encode())711 rollback_req.Encode())
719712
720 """ Transaction algorithm for single puts:713 """ Transaction algorithm for single puts:
@@ -732,7 +725,7 @@
732 -Commit the transaction725 -Commit the transaction
733 -Release the lock from ZK 726 -Release the lock from ZK
734 """727 """
735 def put_request(self, app_id, appscale_version, http_request_data):728 def put_request(self, app_id, http_request_data):
736 global app_datastore729 global app_datastore
737 global keySecret730 global keySecret
738 global tableHashTable731 global tableHashTable
@@ -841,7 +834,6 @@
841 if not putreq_pb.has_transaction():834 if not putreq_pb.has_transaction():
842 begintime = time.time()835 begintime = time.time()
843 txn, err, errcode = self.begin_transaction_request(app_id,836 txn, err, errcode = self.begin_transaction_request(app_id,
844 appscale_version,
845 http_request_data)837 http_request_data)
846 838
847 # parse from contents839 # parse from contents
@@ -870,7 +862,8 @@
870 # Notify Soap Server of any new tables862 # Notify Soap Server of any new tables
871 #######################################863 #######################################
872 # insert key 864 # insert key
873 table_name = getTableName(app_id, kind, appscale_version)865 namespace = e.key().name_space()
866 table_name = getTableName(app_id, kind, namespace)
874 #print "Put Using table name:",table_name867 #print "Put Using table name:",table_name
875 # Notify Users/Apps table if a new class is being added 868 # Notify Users/Apps table if a new class is being added
876 if table_name not in tableHashTable:869 if table_name not in tableHashTable:
@@ -879,7 +872,7 @@
879 # This function is reentrant872 # This function is reentrant
880 # If the class was deleted, and added a second time there is no 873 # If the class was deleted, and added a second time there is no
881 # notifying the users/app server of its creation874 # notifying the users/app server of its creation
882 if tableServer.add_class(app_id, kind, keySecret) == "true":875 if tableServer.add_class(app_id, kind, namespace, keySecret) == "true":
883 tableHashTable[table_name] = 1876 tableHashTable[table_name] = 1
884 877
885 # Store One Entity #878 # Store One Entity #
@@ -933,7 +926,7 @@
933 "Timeout: Unable to update ZooKeeper on change set for transaction")926 "Timeout: Unable to update ZooKeeper on change set for transaction")
934 journalPut = putThread()927 journalPut = putThread()
935 journal_key = getJournalKey(row_key, txn.handle())928 journal_key = getJournalKey(row_key, txn.handle())
936 journal_table = getJournalTable(app_id, appscale_version)929 journal_table = getJournalTable(app_id, namespace)
937 journalPut.setup(app_datastore,930 journalPut.setup(app_datastore,
938 journal_table,931 journal_table,
939 journal_key,932 journal_key,
@@ -968,7 +961,6 @@
968 if not putreq_pb.has_transaction():961 if not putreq_pb.has_transaction():
969 committime = time.time()962 committime = time.time()
970 com_res, errcode, errdetail = self.commit_transaction_request(app_id, 963 com_res, errcode, errdetail = self.commit_transaction_request(app_id,
971 appscale_version,
972 txn.Encode())964 txn.Encode())
973 if PROFILE: appscale_log.write("COMMIT %d %f\n"%(txn.handle(), time.time() - committime))965 if PROFILE: appscale_log.write("COMMIT %d %f\n"%(txn.handle(), time.time() - committime))
974 966
@@ -982,7 +974,7 @@
982 return (putresp_pb.Encode(), 0, "")974 return (putresp_pb.Encode(), 0, "")
983975
984976
985 def get_request(self, app_id, appscale_version, http_request_data):977 def get_request(self, app_id, http_request_data):
986 global app_datastore978 global app_datastore
987 getreq_pb = datastore_pb.GetRequest(http_request_data)979 getreq_pb = datastore_pb.GetRequest(http_request_data)
988 #logger.debug("GET_REQUEST: %s" % getreq_pb)980 #logger.debug("GET_REQUEST: %s" % getreq_pb)
@@ -1018,7 +1010,8 @@
1018 if last_path.has_type():1010 if last_path.has_type():
1019 kind = last_path.type()1011 kind = last_path.type()
1020 #logger.debug("get: %s___%s___%s %s" % (app_id, kind, appscale_version, str(entity_id)))1012 #logger.debug("get: %s___%s___%s %s" % (app_id, kind, appscale_version, str(entity_id)))
1021 table_name = getTableName(app_id, kind, appscale_version)1013 namespace = key.name_space()
1014 table_name = getTableName(app_id, kind, namespace)
1022 row_key = getRowKey(app_id,key.path().element_list())1015 row_key = getRowKey(app_id,key.path().element_list())
1023 #print "get row key:" + str(row_key)1016 #print "get row key:" + str(row_key)
1024 #print "table_name:" + str(table_name)1017 #print "table_name:" + str(table_name)
@@ -1048,7 +1041,7 @@
1048 if prev_version == long(NONEXISTANT_TRANSACTION):1041 if prev_version == long(NONEXISTANT_TRANSACTION):
1049 entity = None1042 entity = None
1050 else:1043 else:
1051 journal_table = getJournalTable(app_id, appscale_version)1044 journal_table = getJournalTable(app_id, namespace)
1052 journal_key = getJournalKey(row_key, prev_version)1045 journal_key = getJournalKey(row_key, prev_version)
1053 r = app_datastore.get_entity(journal_table, journal_key, ENTITY_TABLE_SCHEMA[:1] ) 1046 r = app_datastore.get_entity(journal_table, journal_key, ENTITY_TABLE_SCHEMA[:1] )
1054 err = r[0]1047 err = r[0]
@@ -1075,7 +1068,7 @@
1075 rollback to know which entity group a possible failed1068 rollback to know which entity group a possible failed
1076 transaction belongs to.1069 transaction belongs to.
1077 """ 1070 """
1078 def delete_request(self, app_id, appscale_version, http_request_data):1071 def delete_request(self, app_id, http_request_data):
1079 global app_datastore1072 global app_datastore
1080 root_key = None1073 root_key = None
1081 txn = None1074 txn = None
@@ -1104,14 +1097,13 @@
1104 last_path = key.path().element_list()[-1]1097 last_path = key.path().element_list()[-1]
1105 if last_path.has_type():1098 if last_path.has_type():
1106 kind = last_path.type()1099 kind = last_path.type()
11071100 namespace = key.name_space()
1108 row_key = getRowKey(app_id, key.path().element_list())1101 row_key = getRowKey(app_id, key.path().element_list())
11091102
1110 # All deletes are transactional and per entity if 1103 # All deletes are transactional and per entity if
1111 # not already wrapped in a transaction1104 # not already wrapped in a transaction
1112 if not delreq_pb.has_transaction():1105 if not delreq_pb.has_transaction():
1113 txn, err, errcode = self.begin_transaction_request(app_id,1106 txn, err, errcode = self.begin_transaction_request(app_id,
1114 appscale_version,
1115 http_request_data)1107 http_request_data)
1116 # parse from contents1108 # parse from contents
1117 txn = datastore_pb.Transaction(txn)1109 txn = datastore_pb.Transaction(txn)
@@ -1140,13 +1132,13 @@
1140 ##########################1132 ##########################
1141 # Get the previous version1133 # Get the previous version
1142 ########################## 1134 ##########################
1143 table_name = getTableName(app_id, kind, appscale_version)1135 table_name = getTableName(app_id, kind, namespace)
1144 field_name_list = ENTITY_TABLE_SCHEMA[1:]1136 field_name_list = ENTITY_TABLE_SCHEMA[1:]
1145 r = app_datastore.get_entity( table_name, row_key, field_name_list )1137 r = app_datastore.get_entity( table_name, row_key, field_name_list )
1146 err = r[0]1138 err = r[0]
1147 if err not in ERROR_CODES: 1139 if err not in ERROR_CODES:
1148 # the table does not exist, hence, the previous value was null1140 # the table does not exist, hence, the previous value was null
1149 # TODO, make its not because the database is down1141 # TODO, make sure its not because the database is down
1150 r = ["DB_ERROR:", NONEXISTANT_TRANSACTION] #1142 r = ["DB_ERROR:", NONEXISTANT_TRANSACTION] #
1151 if len(r) == 1:1143 if len(r) == 1:
1152 r.append(NONEXISTANT_TRANSACTION)1144 r.append(NONEXISTANT_TRANSACTION)
@@ -1174,7 +1166,7 @@
1174 "Timeout: Unable to update ZooKeeper on change set for transaction")1166 "Timeout: Unable to update ZooKeeper on change set for transaction")
11751167
1176 encoded_delete = DELETED + "/" + row_key 1168 encoded_delete = DELETED + "/" + row_key
1177 journal_table = getJournalTable(app_id, appscale_version)1169 journal_table = getJournalTable(app_id, namespace)
1178 journal_key = getJournalKey(row_key, txn.handle())1170 journal_key = getJournalKey(row_key, txn.handle())
1179 1171
1180 field_name_list = JOURNAL_SCHEMA1172 field_name_list = JOURNAL_SCHEMA
@@ -1191,7 +1183,7 @@
1191 datastore_pb.Error.INTERNAL_ERROR,1183 datastore_pb.Error.INTERNAL_ERROR,
1192 err + ", Unable to write to journal")1184 err + ", Unable to write to journal")
1193 1185
1194 table_name = getTableName(app_id, kind, appscale_version)1186 table_name = getTableName(app_id, kind, namespace)
1195 field_name_list = ENTITY_TABLE_SCHEMA1187 field_name_list = ENTITY_TABLE_SCHEMA
1196 field_value_list = [encoded_delete, str(txn.handle())]1188 field_value_list = [encoded_delete, str(txn.handle())]
1197 err, res = app_datastore.put_entity( table_name, 1189 err, res = app_datastore.put_entity( table_name,
@@ -1210,7 +1202,6 @@
1210 1202
1211 if not delreq_pb.has_transaction():1203 if not delreq_pb.has_transaction():
1212 com_res, errcode, errdetail = self.commit_transaction_request(app_id, 1204 com_res, errcode, errdetail = self.commit_transaction_request(app_id,
1213 appscale_version,
1214 txn.Encode())1205 txn.Encode())
1215 if errcode != 0:1206 if errcode != 0:
1216 return (delresp_pb.Encode(), errcode, errdetail)1207 return (delresp_pb.Encode(), errcode, errdetail)
@@ -1218,20 +1209,20 @@
1218 return (delresp_pb.Encode(), 0, "")1209 return (delresp_pb.Encode(), 0, "")
12191210
12201211
1221 def optimized_delete_request(self, app_id, appscale_version, http_request_data):1212 def optimized_delete_request(self, app_id, http_request_data):
1222 pass1213 pass
1223 def run_optimized_query(self, app_id, appscale_version, http_request_data):1214 def run_optimized_query(self, app_id, http_request_data):
1224 return 1215 return
1225 def optimized_put_request(self, app_id, appscale_version, http_request_data):1216 def optimized_put_request(self, app_id, http_request_data):
1226 pass1217 pass
12271218
1228 def void_proto(self, app_id, appscale_version, http_request_data):1219 def void_proto(self, app_id, http_request_data):
1229 resp_pb = api_base_pb.VoidProto() 1220 resp_pb = api_base_pb.VoidProto()
1230 print "Got void"1221 print "Got void"
1231 #logger.debug("VOID_RESPONSE: %s to void" % resp_pb)1222 #logger.debug("VOID_RESPONSE: %s to void" % resp_pb)
1232 return (resp_pb.Encode(), 0, "" )1223 return (resp_pb.Encode(), 0, "" )
1233 1224
1234 def str_proto(self, app_id, appscale_version, http_request_data):1225 def str_proto(self, app_id, http_request_data):
1235 str_pb = api_base_pb.StringProto( http_request_data )1226 str_pb = api_base_pb.StringProto( http_request_data )
1236 composite_pb = datastore_pb.CompositeIndices()1227 composite_pb = datastore_pb.CompositeIndices()
1237 print "Got a string proto"1228 print "Got a string proto"
@@ -1240,7 +1231,7 @@
1240 #logger.debug("CompositeIndex response to string: %s" % composite_pb)1231 #logger.debug("CompositeIndex response to string: %s" % composite_pb)
1241 return (composite_pb.Encode(), 0, "" ) 1232 return (composite_pb.Encode(), 0, "" )
1242 1233
1243 def int64_proto(self, app_id, appscale_version, http_request_data):1234 def int64_proto(self, app_id, http_request_data):
1244 int64_pb = api_base_pb.Integer64Proto( http_request_data ) 1235 int64_pb = api_base_pb.Integer64Proto( http_request_data )
1245 resp_pb = api_base_pb.VoidProto()1236 resp_pb = api_base_pb.VoidProto()
1246 print "Got a int 64"1237 print "Got a int 64"
@@ -1249,7 +1240,7 @@
1249 #logger.debug("VOID_RESPONSE to int64: %s" % resp_pb)1240 #logger.debug("VOID_RESPONSE to int64: %s" % resp_pb)
1250 return (resp_pb.Encode(), 0, "")1241 return (resp_pb.Encode(), 0, "")
1251 1242
1252 def compositeindex_proto(self, app_id, appscale_version, http_request_data):1243 def compositeindex_proto(self, app_id, http_request_data):
1253 compindex_pb = entity_pb.CompositeIndex( http_request_data)1244 compindex_pb = entity_pb.CompositeIndex( http_request_data)
1254 resp_pb = api_base_pb.VoidProto()1245 resp_pb = api_base_pb.VoidProto()
1255 print "Got Composite Index"1246 print "Got Composite Index"
@@ -1300,14 +1291,14 @@
1300 ##############1291 ##############
1301 # OTHER TYPE #1292 # OTHER TYPE #
1302 ##############1293 ##############
1303 def unknown_request(self, app_id, appscale_version, http_request_data, pb_type):1294 def unknown_request(self, app_id, http_request_data, pb_type):
1304 #logger.debug("Received Unknown Protocol Buffer %s" % pb_type )1295 #logger.debug("Received Unknown Protocol Buffer %s" % pb_type )
1305 print "ERROR: Received Unknown Protocol Buffer <" + pb_type +">.",1296 print "ERROR: Received Unknown Protocol Buffer <" + pb_type +">.",
1306 print "Nothing has been implemented to handle this Protocol Buffer type."1297 print "Nothing has been implemented to handle this Protocol Buffer type."
1307 print "http request data:"1298 print "http request data:"
1308 print http_request_data 1299 print http_request_data
1309 print "http done"1300 print "http done"
1310 self.void_proto(app_id, appscale_version, http_request_data)1301 self.void_proto(app_id, http_request_data)
13111302
1312 1303
1313 #########################1304 #########################
@@ -1322,27 +1313,15 @@
1322 app_data = app_data.split(':')1313 app_data = app_data.split(':')
1323 #logger.debug("POST len: %d" % len(app_data))1314 #logger.debug("POST len: %d" % len(app_data))
13241315
1325 if len(app_data) == 5:1316 if len(app_data) == 4:
1326 app_id, user_email, nick_name, auth_domain, appscale_version = app_data
1327 os.environ['AUTH_DOMAIN'] = auth_domain
1328 os.environ['USER_EMAIL'] = user_email
1329 os.environ['USER_NICKNAME'] = nick_name
1330 os.environ['APPLICATION_ID'] = app_id
1331 elif len(app_data) == 4:
1332 app_id, user_email, nick_name, auth_domain = app_data1317 app_id, user_email, nick_name, auth_domain = app_data
1333 os.environ['AUTH_DOMAIN'] = auth_domain1318 os.environ['AUTH_DOMAIN'] = auth_domain
1334 os.environ['USER_EMAIL'] = user_email1319 os.environ['USER_EMAIL'] = user_email
1335 os.environ['USER_NICKNAME'] = nick_name1320 os.environ['USER_NICKNAME'] = nick_name
1336 os.environ['APPLICATION_ID'] = app_id 1321 os.environ['APPLICATION_ID'] = app_id
1337 appscale_version = "1"
1338 elif len(app_data) == 2:
1339 app_id, appscale_version = app_data
1340 app_id = app_data[0]
1341 os.environ['APPLICATION_ID'] = app_id
1342 elif len(app_data) == 1:1322 elif len(app_data) == 1:
1343 app_id = app_data[0]1323 app_id = app_data[0]
1344 os.environ['APPLICATION_ID'] = app_id 1324 os.environ['APPLICATION_ID'] = app_id
1345 appscale_version = "1"
1346 else:1325 else:
1347 #logger.debug("UNABLE TO EXTRACT APPLICATION DATA")1326 #logger.debug("UNABLE TO EXTRACT APPLICATION DATA")
1348 return1327 return
@@ -1352,9 +1331,9 @@
1352 #logger.debug("For app version: " + appscale_version)1331 #logger.debug("For app version: " + appscale_version)
13531332
1354 if pb_type == "Request":1333 if pb_type == "Request":
1355 self.remote_request(app_id, appscale_version, http_request_data)1334 self.remote_request(app_id, http_request_data)
1356 else:1335 else:
1357 self.unknown_request(app_id, appscale_version, http_request_data, pb_type)1336 self.unknown_request(app_id, http_request_data, pb_type)
1358 self.finish() 1337 self.finish()
1359def usage():1338def usage():
1360 print "AppScale Server" 1339 print "AppScale Server"
13611340
=== modified file 'AppDB/cassandra/py_cassandra.py'
--- AppDB/cassandra/py_cassandra.py 2010-10-04 21:01:14 +0000
+++ AppDB/cassandra/py_cassandra.py 2010-12-24 09:11:16 +0000
@@ -134,8 +134,8 @@
134 keyslices = []134 keyslices = []
135 column_parent = ColumnParent(column_family="Standard1")135 column_parent = ColumnParent(column_family="Standard1")
136 predicate = SlicePredicate(column_names=column_names)136 predicate = SlicePredicate(column_names=column_names)
137 start_key = table_name137 start_key = table_name + "/"
138 end_key = table_name + '~'138 end_key = table_name + '/~'
139 try: 139 try:
140 client = self.__setup_connection()140 client = self.__setup_connection()
141 keyslices = client.get_range_slice(MAIN_TABLE, 141 keyslices = client.get_range_slice(MAIN_TABLE,
@@ -211,8 +211,8 @@
211 predicate = SlicePredicate(column_names=[])211 predicate = SlicePredicate(column_names=[])
212 curtime = self.timestamp()212 curtime = self.timestamp()
213 path = ColumnPath(COLUMN_FAMILY)213 path = ColumnPath(COLUMN_FAMILY)
214 start_key = table_name214 start_key = table_name + "/"
215 end_key = table_name + '~'215 end_key = table_name + '/~'
216 try: 216 try:
217 client = self.__setup_connection()217 client = self.__setup_connection()
218 keyslices = client.get_range_slice(MAIN_TABLE, 218 keyslices = client.get_range_slice(MAIN_TABLE,
219219
=== modified file 'AppDB/dbconstants.py'
--- AppDB/dbconstants.py 2010-05-10 19:52:37 +0000
+++ AppDB/dbconstants.py 2010-12-24 09:11:16 +0000
@@ -1,6 +1,6 @@
1# Constants1# Constants
22
3import os,sys3import os
4APPSCALE_HOME=os.environ.get("APPSCALE_HOME")4APPSCALE_HOME=os.environ.get("APPSCALE_HOME")
55
6LOG_DIR = "%s/AppDB/logs" % APPSCALE_HOME6LOG_DIR = "%s/AppDB/logs" % APPSCALE_HOME
77
=== modified file 'AppDB/helper_functions.py'
--- AppDB/helper_functions.py 2010-06-23 07:11:04 +0000
+++ AppDB/helper_functions.py 2010-12-24 09:11:16 +0000
@@ -52,7 +52,7 @@
52 self.loggingOn = True52 self.loggingOn = True
5353
54 def debug(self, string):54 def debug(self, string):
55 if self.loggingOn == True:55 if self.loggingOn:
56 self.log_logger.info(string)56 self.log_logger.info(string)
5757
58def randomString(length):58def randomString(length):
5959
=== modified file 'AppDB/hypertable/py_hypertable.py'
--- AppDB/hypertable/py_hypertable.py 2010-11-16 04:25:22 +0000
+++ AppDB/hypertable/py_hypertable.py 2010-12-24 09:11:16 +0000
@@ -76,8 +76,8 @@
76 self.lock.acquire()76 self.lock.acquire()
77 self.conn = ThriftClient(self.get_local_ip(), THRIFT_PORT)77 self.conn = ThriftClient(self.get_local_ip(), THRIFT_PORT)
78 self.ns = self.conn.open_namespace(NS)78 self.ns = self.conn.open_namespace(NS)
79 if PROFILING:79 #if PROFILING:
80 self.logger.debug("HT InitConnection: %s"%str(endtime - starttime))80 # self.logger.debug("HT InitConnection: %s"%str(endtime - starttime))
81 return self.conn81 return self.conn
8282
83 def __closeConnection(self, conn):83 def __closeConnection(self, conn):
8484
=== modified file 'AppDB/soap_server.py'
--- AppDB/soap_server.py 2010-10-04 21:01:14 +0000
+++ AppDB/soap_server.py 2010-12-24 09:11:16 +0000
@@ -2,28 +2,20 @@
2# 2nd major revision: No longer are tables being cached in memory2# 2nd major revision: No longer are tables being cached in memory
3# See LICENSE file3# See LICENSE file
44
5import string, cgi
6import sys
7import os
85
9# we don't use PYTHONPATH now.6# we don't use PYTHONPATH now.
10#PYTHON_PATH = os.environ.get("PYTHONPATH")7#PYTHON_PATH = os.environ.get("PYTHONPATH")
11#print "Python path: ",PYTHON_PATH8#print "Python path: ",PYTHON_PATH
12print sys.path9#print sys.path
1310
11import sys
14import SOAPpy12import SOAPpy
15import time13import time
16import socket
17import datetime14import datetime
18import re15import re
19import cgitb; #cgitb.enable()
20import getopt
21import logging
22import logging.handlers
23from dbconstants import *16from dbconstants import *
24import appscale_datastore17import appscale_datastore
25import appscale_logger18import appscale_logger
26import pickle
27from M2Crypto import SSL19from M2Crypto import SSL
2820
29logger = appscale_logger.getLogger("soap_server")21logger = appscale_logger.getLogger("soap_server")
@@ -323,7 +315,7 @@
323 if secret != super_secret:315 if secret != super_secret:
324 #logger.error("commit_new_user: bad secret")316 #logger.error("commit_new_user: bad secret")
325 return "Error: bad secret"317 return "Error: bad secret"
326 if DEBUG: "Commiting a new user %s"%user318 if DEBUG: print "Commiting a new user %s"%user
327 error = "Error: username should be an email"319 error = "Error: username should be an email"
328 # look for the @ and . in the email320 # look for the @ and . in the email
329 if user.find("@") == -1 or user.find(".") == -1:321 if user.find("@") == -1 or user.find(".") == -1:
@@ -368,7 +360,7 @@
368360
369 error = "Error: appname/language can only be alpha numeric"361 error = "Error: appname/language can only be alpha numeric"
370 362
371 if language.isalnum() == False:363 if not language.isalnum():
372 #logger.error("language %s is not alpha numeric" % language)364 #logger.error("language %s is not alpha numeric" % language)
373 if DEBUG: print error365 if DEBUG: print error
374 return error366 return error
@@ -411,7 +403,7 @@
411 else:403 else:
412 #logger.error("creating a new app: %s failed %s" % (appname, result[0])) 404 #logger.error("creating a new app: %s failed %s" % (appname, result[0]))
413 return "false"405 return "false"
414 return "true" 406 return ret
415 else:407 else:
416 error = "Error: User not found"408 error = "Error: User not found"
417 #logger.error(error)409 #logger.error(error)
@@ -426,7 +418,6 @@
426 #logger.debug("get_tar: bad secret")418 #logger.debug("get_tar: bad secret")
427 return "Error: bad secret"419 return "Error: bad secret"
428 if DEBUG: print "get_tar: entry" 420 if DEBUG: print "get_tar: entry"
429 error = "Error: unable to find application tar ball. "
430 result = db.get_entity(APP_TABLE, app_name, ["tar_ball"])421 result = db.get_entity(APP_TABLE, app_name, ["tar_ball"])
431 if result[0] in ERROR_CODES and len(result) == 2:422 if result[0] in ERROR_CODES and len(result) == 2:
432 #logger.info("get_tar app:%s length of tar %s" % (app_name, str(len(result[1]))) )423 #logger.info("get_tar app:%s length of tar %s" % (app_name, str(len(result[1]))) )
@@ -443,7 +434,7 @@
443 if DEBUG: print "commit_tar: entry"434 if DEBUG: print "commit_tar: entry"
444435
445 #logger.info("commit_tar app:%s, secret:%s" % (app_name, secret))436 #logger.info("commit_tar app:%s, secret:%s" % (app_name, secret))
446 if DEBUG: "Committing a tar for %s"%app_name437 if DEBUG: print "Committing a tar for %s"%app_name
447 if secret != super_secret:438 if secret != super_secret:
448 #logger.error("commit_tar: bad secret")439 #logger.error("commit_tar: bad secret")
449 return "Error: bad secret"440 return "Error: bad secret"
@@ -625,7 +616,7 @@
625 return "false"616 return "false"
626 return "true" 617 return "true"
627618
628def add_class(appname, classname, secret):619def add_class(appname, classname, namespace, secret):
629 global db620 global db
630 global super_secret621 global super_secret
631 global app_schema622 global app_schema
@@ -653,7 +644,7 @@
653 # already in classes list644 # already in classes list
654 return "true"645 return "true"
655646
656 classes += [str(classname)]647 classes += [str(classname+"___"+namespace)]
657 classes = ':'.join(classes)648 classes = ':'.join(classes)
658649
659 result = db.put_entity(APP_TABLE, appname, columns, [classes]) 650 result = db.put_entity(APP_TABLE, appname, columns, [classes])
@@ -673,8 +664,8 @@
673 if result[0] not in ERROR_CODES or len(result) == 1:664 if result[0] not in ERROR_CODES or len(result) == 1:
674 #logger.error("delete_app: Unable to get entity for app %s" %appname)665 #logger.error("delete_app: Unable to get entity for app %s" %appname)
675 return "false: unable to get entity for app"666 return "false: unable to get entity for app"
667 """
676 owner = result[1]668 owner = result[1]
677 """
678 result = db.get_entity(USER_TABLE, owner, ['applications'])669 result = db.get_entity(USER_TABLE, owner, ['applications'])
679 if result[0] not in ERROR_CODES and len(result) == 1:670 if result[0] not in ERROR_CODES and len(result) == 1:
680 logger.error("delete_app: Unable to get entity for app %s" %appname)671 logger.error("delete_app: Unable to get entity for app %s" %appname)
@@ -697,7 +688,7 @@
697 return "false: unable to put for user modified app list"688 return "false: unable to put for user modified app list"
698 """689 """
699 # look up all the class tables of this app and delete their tables690 # look up all the class tables of this app and delete their tables
700 result = db.get_entity(APP_TABLE, appname, ["classes", "version"])691 result = db.get_entity(APP_TABLE, appname, ["classes"])
701 if result[0] not in ERROR_CODES or len(result) == 1:692 if result[0] not in ERROR_CODES or len(result) == 1:
702 #logger.error("delete_app: Unable to get classes for app %s"%appname)693 #logger.error("delete_app: Unable to get classes for app %s"%appname)
703 return "false: unable to get classes for app"694 return "false: unable to get classes for app"
@@ -706,19 +697,13 @@
706 classes = result[0].split(':')697 classes = result[0].split(':')
707 else:698 else:
708 classes = []699 classes = []
709 if result[1]:
710 appscale_version = result[1]
711 else:
712 appscale_version = "1"
713 #logger.error("delete_app: Unable to get version number for app %s"%appname)
714
715 result = db.put_entity(APP_TABLE, appname, ["host", "port"], ["", ""])700 result = db.put_entity(APP_TABLE, appname, ["host", "port"], ["", ""])
716 if result[0] not in ERROR_CODES:701 if result[0] not in ERROR_CODES:
717 #logger.error("delete_app: Unable to delete instances for app %s"%appname)702 #logger.error("delete_app: Unable to delete instances for app %s"%appname)
718 return "false: unable to delete instances"703 return "false: unable to delete instances"
719704
720 for classname in classes:705 for classname in classes:
721 table_name = appname + "___" + classname + "___" + appscale_version706 table_name = appname + "___" + classname
722 db.delete_table(table_name)707 db.delete_table(table_name)
723 #logger.error("delete_app: removed %s"%table_name)708 #logger.error("delete_app: removed %s"%table_name)
724709
@@ -791,8 +776,8 @@
791 return "Error: User does not exist" 776 return "Error: User does not exist"
792777
793 result = result[1:]778 result = result[1:]
794 appdrop_rem_token = result[0] 779 #appdrop_rem_token = result[0]
795 appdrop_rem_token_exp = result[1] 780 #appdrop_rem_token_exp = result[1]
796 t = datetime.datetime.now()781 t = datetime.datetime.now()
797 date_change = str(time.mktime(t.timetuple()))782 date_change = str(time.mktime(t.timetuple()))
798783
799784
=== modified file 'AppServer/BUGS' (properties changed: -x to +x)
=== modified file 'AppServer/LICENSE' (properties changed: -x to +x)
=== modified file 'AppServer/README' (properties changed: -x to +x)
=== modified file 'AppServer/RELEASE_NOTES' (properties changed: -x to +x)
--- AppServer/RELEASE_NOTES 2010-11-30 10:37:25 +0000
+++ AppServer/RELEASE_NOTES 2010-12-24 09:11:16 +0000
@@ -3,6 +3,71 @@
33
4App Engine Python SDK - Release Notes4App Engine Python SDK - Release Notes
55
6Version 1.4.0
7================================
8- The Always On feature allows applications to pay and keep 3 instances of their
9 application always running, which can significantly reduce application
10 latency.
11- Developers can now enable Warmup Requests. By specifying a handler in an
12 app's app.yaml, App Engine will attempt to send a Warmup Request to initialize
13 new instances before a user interacts with it. This can reduce the latency an
14 end-user sees for initializing your application.
15- The Channel API is now available for all users.
16- Task Queue has been officially released, and is no longer an experimental
17 feature. The API import paths that use 'labs' have been deprecated. Task queue
18 storage will count towards an application's overall storage quota, and will
19 thus be charged for.
20- The deadline for Task Queue and Cron requests has been raised to 10 minutes.
21 Datastore and API deadlines within those requests remain unchanged.
22- For the Task Queue, developers can specify task retry_parameters in their
23 queue.yaml.
24- Apps that have enabled billing are allowed up to 100 queues with the Task
25 Queue API.
26- Metadata Queries on the datastore for datastore kinds, namespaces, and entity
27 properties are available.
28- URLFetch allowed response size has been increased, up to 32 MB. Request size
29 is still limited to 1 MB.
30- The request and response sizes for the Images API have been increased to
31 32 MB.
32- The total size of Memcache batch operations is increased to 32 MB. The 1 MB
33 limit on individual Memcache objects still applies.
34- The attachment size for outgoing emails has been increased from 1 MB to 10 MB.
35 The size limit for incoming emails is still 10 MB.
36- Size and quantity limits on datastore batch get/put/delete operations have
37 been removed. Individual entities are still limited to 1 MB, but your app may
38 batch as many entities together for get/put/delete calls as the overall
39 datastore deadline will allow for.
40- When iterating over query results, the datastore will now asynchronously
41 prefetch results, reducing latency in many cases by 10-15%.
42- The Admin Console Blacklist page lists the top blacklist rejected visitors.
43- The automatic image thumbnailing service supports arbitrary crop sizes up to
44 1600px.
45- Overall average instance latency in the Admin Console is now a weighted
46 average over QPS per instance.
47- The developer who uploaded an app version can download that version's code
48 using the appcfg.py download_app command. This feature can be disabled on
49 a per application basis in the admin console, under the 'Permissions' tab.
50 Once disabled, code download for the application CANNOT be re-enabled.
51- Fixed an issue where custom Admin Console pages did not work for Google
52 Apps for your Domain users.
53- In the Python runtime, an instance is killed and restarted when a request
54 handler hits DeadlineExceededError. This should fix an issue related to
55 intermittent SystemErrors using Django.
56 http://code.google.com/p/googleappengine/issues/detail?id=772
57- Allow Django initialization to be moved to appengine_config.py to avoid
58 Django version conflicts when mixing webapp.template with pure Django.
59 http://code.google.com/p/googleappengine/issues/detail?id=1758
60- Fixed an issue with OpenId over SSL.
61 http://code.google.com/p/googleappengine/issues/detail?id=3393
62- Fixed an issue on the dev_appserver where login/logout code didn't work using
63 Python 2.6.
64 http://code.google.com/p/googleappengine/issues/detail?id=3566
65- Fixed an issue in the dev_appserver where get_serving_url did not work
66 for transparent, cropped PNGs:
67 http://code.google.com/p/googleappengine/issues/detail?id=3887
68- Fixed an issue with the DatastoreFileStub.
69 http://code.google.com/p/googleappengine/issues/detail?id=3895
70
6Version 1.3.871Version 1.3.8
7==================================72==================================
8- Builtin app.yaml handlers are available for common application functions,73- Builtin app.yaml handlers are available for common application functions,
974
=== modified file 'AppServer/VERSION' (properties changed: -x to +x)
--- AppServer/VERSION 2010-11-30 10:37:25 +0000
+++ AppServer/VERSION 2010-12-24 09:11:16 +0000
@@ -1,3 +1,3 @@
1release: "1.3.8"1release: "1.4.0"
2timestamp: 12841577412timestamp: 1287687253
3api_versions: ['1']3api_versions: ['1']
44
=== modified file 'AppServer/demos/guestbook/app.yaml' (properties changed: -x to +x)
=== modified file 'AppServer/google/appengine/api/apiproxy_stub.py'
--- AppServer/google/appengine/api/apiproxy_stub.py 2010-11-30 10:37:25 +0000
+++ AppServer/google/appengine/api/apiproxy_stub.py 2010-12-24 09:11:16 +0000
@@ -27,7 +27,6 @@
27import time27import time
2828
2929
30
31MAX_REQUEST_SIZE = 1 << 2030MAX_REQUEST_SIZE = 1 << 20
32DS_STAT_LEVEL = 3131DS_STAT_LEVEL = 31
33logging.addLevelName(DS_STAT_LEVEL, "DS_STAT")32logging.addLevelName(DS_STAT_LEVEL, "DS_STAT")
@@ -81,6 +80,7 @@
81 messages = []80 messages = []
82 assert request.IsInitialized(messages), messages81 assert request.IsInitialized(messages), messages
83 start = time.time()82 start = time.time()
83
84 method = getattr(self, '_Dynamic_' + call)84 method = getattr(self, '_Dynamic_' + call)
85 method(request, response)85 method(request, response)
86 end = time.time()86 end = time.time()
@@ -88,4 +88,3 @@
88 if service == "datastore_v3":88 if service == "datastore_v3":
89 logging.log(DS_STAT_LEVEL,"qtype %s time %s" % (call, (end-start)))89 logging.log(DS_STAT_LEVEL,"qtype %s time %s" % (call, (end-start)))
9090
91
9291
=== modified file 'AppServer/google/appengine/api/apiproxy_stub_map.py'
--- AppServer/google/appengine/api/apiproxy_stub_map.py 2010-12-10 23:34:17 +0000
+++ AppServer/google/appengine/api/apiproxy_stub_map.py 2010-12-24 09:11:16 +0000
@@ -237,7 +237,6 @@
237 service: string237 service: string
238 stub: stub238 stub: stub
239 """239 """
240 # Changes made to allow changing stubs dynamically
241 #assert not self.__stub_map.has_key(service), repr(service)240 #assert not self.__stub_map.has_key(service), repr(service)
242 self.__stub_map[service] = stub241 self.__stub_map[service] = stub
243242
@@ -370,6 +369,8 @@
370 self.__rpc.callback = self.__internal_callback369 self.__rpc.callback = self.__internal_callback
371 self.callback = callback370 self.callback = callback
372371
372 self.__class__.__local.may_interrupt_wait = False
373
373 def __internal_callback(self):374 def __internal_callback(self):
374 """This is the callback set on the low-level RPC object.375 """This is the callback set on the low-level RPC object.
375376
@@ -589,9 +590,9 @@
589 cls.__local.may_interrupt_wait = True590 cls.__local.may_interrupt_wait = True
590 try:591 try:
591 running.__rpc.Wait()592 running.__rpc.Wait()
592 except apiproxy_errors.InterruptedError:593 except apiproxy_errors.InterruptedError, err:
593 running.__rpc._RPC__exception = None594 err.rpc._RPC__exception = None
594 running.__rpc._RPC__traceback = None595 err.rpc._RPC__traceback = None
595 finally:596 finally:
596 cls.__local.may_interrupt_wait = False597 cls.__local.may_interrupt_wait = False
597 finished, runnning = cls.__check_one(rpcs)598 finished, runnning = cls.__check_one(rpcs)
598599
=== modified file 'AppServer/google/appengine/api/appinfo.py'
--- AppServer/google/appengine/api/appinfo.py 2010-11-30 10:37:25 +0000
+++ AppServer/google/appengine/api/appinfo.py 2010-12-24 09:11:16 +0000
@@ -34,14 +34,13 @@
34from google.appengine.api import yaml_listener34from google.appengine.api import yaml_listener
35from google.appengine.api import yaml_object35from google.appengine.api import yaml_object
3636
37
38_URL_REGEX = r'(?!\^)/|\.|(\(.).*(?!\$).'37_URL_REGEX = r'(?!\^)/|\.|(\(.).*(?!\$).'
39_FILES_REGEX = r'(?!\^).*(?!\$).'38_FILES_REGEX = r'(?!\^).*(?!\$).'
4039
41_DELTA_REGEX = r'([0-9]+)([DdHhMm]|[sS]?)'40_DELTA_REGEX = r'([0-9]+)([DdHhMm]|[sS]?)'
42_EXPIRATION_REGEX = r'\s*(%s)(\s+%s)*\s*' % (_DELTA_REGEX, _DELTA_REGEX)41_EXPIRATION_REGEX = r'\s*(%s)(\s+%s)*\s*' % (_DELTA_REGEX, _DELTA_REGEX)
4342
44_SERVICE_RE_STRING = r'(mail|xmpp_message|rest|startup)'43_SERVICE_RE_STRING = r'(mail|xmpp_message|xmpp_subscribe|xmpp_presence|rest|warmup)'
4544
46_PAGE_NAME_REGEX = r'^.+$'45_PAGE_NAME_REGEX = r'^.+$'
4746
4847
=== modified file 'AppServer/google/appengine/api/blobstore/__init__.py' (properties changed: -x to +x)
=== modified file 'AppServer/google/appengine/api/blobstore/blobstore_stub.py'
--- AppServer/google/appengine/api/blobstore/blobstore_stub.py 2010-12-13 07:51:16 +0000
+++ AppServer/google/appengine/api/blobstore/blobstore_stub.py 2010-12-24 09:11:16 +0000
@@ -17,8 +17,9 @@
1717
18"""18"""
19Modifications for AppScale by Navraj Chohan19Modifications for AppScale by Navraj Chohan
20
20Datastore backed Blobstore API stub.21Datastore backed Blobstore API stub.
21 22
22Class:23Class:
23 BlobstoreServiceStub: BlobstoreService stub backed by datastore.24 BlobstoreServiceStub: BlobstoreService stub backed by datastore.
24"""25"""
@@ -30,6 +31,7 @@
3031
31import os32import os
32import time33import time
34
33from google.appengine.api import apiproxy_stub35from google.appengine.api import apiproxy_stub
34from google.appengine.api import datastore36from google.appengine.api import datastore
35from google.appengine.api import datastore_errors37from google.appengine.api import datastore_errors
@@ -39,7 +41,6 @@
39from google.appengine.api.blobstore import blobstore_service_pb41from google.appengine.api.blobstore import blobstore_service_pb
40from google.appengine.runtime import apiproxy_errors42from google.appengine.runtime import apiproxy_errors
4143
42import logging
4344
44__all__ = ['BlobStorage',45__all__ = ['BlobStorage',
45 'BlobstoreServiceStub',46 'BlobstoreServiceStub',
@@ -47,7 +48,6 @@
47 'CreateUploadSession',48 'CreateUploadSession',
48 'Error',49 'Error',
49 ]50 ]
50
51BLOB_PORT = "6106"51BLOB_PORT = "6106"
5252
53class Error(Exception):53class Error(Exception):
@@ -84,6 +84,7 @@
84 'success_path': path,84 'success_path': path,
85 'user': user,85 'user': user,
86 'state': 'init'})86 'state': 'init'})
87
87 datastore.Put(entity)88 datastore.Put(entity)
88 return str(entity.key())89 return str(entity.key())
8990
@@ -169,8 +170,6 @@
169 self.__time_function = time_function170 self.__time_function = time_function
170 self.__next_session_id = 1171 self.__next_session_id = 1
171 self.__uploader_path = uploader_path172 self.__uploader_path = uploader_path
172 self.__block_cache = ""
173 self.__block_key_cache = ""
174173
175 @property174 @property
176 def storage(self):175 def storage(self):
@@ -225,10 +224,7 @@
225 """224 """
226 session = self._CreateSession(request.success_path(),225 session = self._CreateSession(request.success_path(),
227 users.get_current_user())226 users.get_current_user())
228 logging.info("bsstub: %s"%(self.__storage._app_id))
229
230 response.set_url('http://%s:%s/%s%s/%s' % (self._GetEnviron('SERVER_NAME'),227 response.set_url('http://%s:%s/%s%s/%s' % (self._GetEnviron('SERVER_NAME'),
231 #self._GetEnviron('NGINX_PORT'),
232 BLOB_PORT,228 BLOB_PORT,
233 self.__uploader_path,229 self.__uploader_path,
234 self.__storage._app_id,230 self.__storage._app_id,
@@ -245,6 +241,11 @@
245 response: Not used but should be a VoidProto.241 response: Not used but should be a VoidProto.
246 """242 """
247 for blob_key in request.blob_key_list():243 for blob_key in request.blob_key_list():
244 key = datastore_types.Key.from_path(blobstore.BLOB_INFO_KIND,
245 str(blob_key),
246 namespace='')
247
248 datastore.Delete(key)
248 self.__storage.DeleteBlob(blob_key)249 self.__storage.DeleteBlob(blob_key)
249250
250 def _Dynamic_FetchData(self, request, response):251 def _Dynamic_FetchData(self, request, response):
@@ -282,23 +283,14 @@
282 blobstore_service_pb.BlobstoreServiceError.BLOB_FETCH_SIZE_TOO_LARGE)283 blobstore_service_pb.BlobstoreServiceError.BLOB_FETCH_SIZE_TOO_LARGE)
283284
284 blob_key = request.blob_key()285 blob_key = request.blob_key()
285 blob_info_key = datastore.Key.from_path(blobstore.BLOB_INFO_KIND,286 #blob_info_key = datastore.Key.from_path(blobstore.BLOB_INFO_KIND,
286 blob_key,287 # blob_key,
287 namespace='')288 # namespace='')
288 try:
289 datastore.Get(blob_info_key)
290 except datastore_errors.EntityNotFoundError, err:
291 raise apiproxy_errors.ApplicationError(
292 blobstore_service_pb.BlobstoreServiceError.BLOB_NOT_FOUND)
293
294 # Find out the block number from the size
295 # Append that key to the info key and fename=tch the data
296 # Must deal with over lapping boundaries
297 block_count = int(start_index/blobstore.MAX_BLOB_FETCH_SIZE)289 block_count = int(start_index/blobstore.MAX_BLOB_FETCH_SIZE)
298 block_modulo = int(start_index%blobstore.MAX_BLOB_FETCH_SIZE)290 block_modulo = int(start_index%blobstore.MAX_BLOB_FETCH_SIZE)
299291
300 block_count_end = int(end_index/blobstore.MAX_BLOB_FETCH_SIZE)292 block_count_end = int(end_index/blobstore.MAX_BLOB_FETCH_SIZE)
301 block_modulo_end = int(end_index%blobstore.MAX_BLOB_FETCH_SIZE)293 #block_modulo_end = int(end_index%blobstore.MAX_BLOB_FETCH_SIZE)
302294
303 block_key = str(blob_key) + "__" + str(block_count)295 block_key = str(blob_key) + "__" + str(block_count)
304 block_key = datastore.Key.from_path("__BlobChunk__",296 block_key = datastore.Key.from_path("__BlobChunk__",
@@ -308,12 +300,13 @@
308 if self.__block_key_cache != str(block_key):300 if self.__block_key_cache != str(block_key):
309 try:301 try:
310 block = datastore.Get(block_key)302 block = datastore.Get(block_key)
311 except datastore_errors.EntityNotFoundError, err:303 except datastore_errors.EntityNotFoundError:
312 raise apiproxy_errors.ApplicationError(304 raise apiproxy_errors.ApplicationError(
313 blobstore_service_pb.BlobstoreServiceError.BLOB_NOT_FOUND)305 blobstore_service_pb.BlobstoreServiceError.BLOB_NOT_FOUND)
314306
315 self.__block_cache = block["block"]307 self.__block_cache = block["block"]
316 self.__block_key_cache = str(block_key)308 self.__block_key_cache = str(block_key)
309
317 # Matching boundaries, start and end are within one fetch310 # Matching boundaries, start and end are within one fetch
318 if block_count_end == block_count:311 if block_count_end == block_count:
319 # Is there enough data to satisfy fetch_size bytes?312 # Is there enough data to satisfy fetch_size bytes?
@@ -337,12 +330,12 @@
337 namespace='')330 namespace='')
338 try:331 try:
339 block = datastore.Get(block_key)332 block = datastore.Get(block_key)
340 except datastore_errors.EntityNotFoundError, err:333 except datastore_errors.EntityNotFoundError:
341 raise apiproxy_errors.ApplicationError(334 raise apiproxy_errors.ApplicationError(
342 blobstore_service_pb.BlobstoreServiceError.BLOB_NOT_FOUND)335 blobstore_service_pb.BlobstoreServiceError.BLOB_NOT_FOUND)
343336
344 self.__block_cache = block["block"]337 self.__block_cache = block["block"]
345 self.__block_key_cache = str(block_key)338 self.__block_key_cache = str(block_key)
346 data.append(self.__block_cache[0,fetch_size - data_size]) 339 data.append(self.__block_cache[0,fetch_size - data_size])
347 response.set_data(data)340 response.set_data(data)
348 341
349342
=== modified file 'AppServer/google/appengine/api/channel/channel.py'
--- AppServer/google/appengine/api/channel/channel.py 2010-11-30 10:40:47 +0000
+++ AppServer/google/appengine/api/channel/channel.py 2010-12-24 09:11:16 +0000
@@ -32,29 +32,24 @@
32from google.appengine.api.channel import channel_service_pb32from google.appengine.api.channel import channel_service_pb
33from google.appengine.runtime import apiproxy_errors33from google.appengine.runtime import apiproxy_errors
3434
35MAX_DURATION = 60 * 60 * 435
3636MAXIMUM_CLIENT_ID_LENGTH = 64
37MAX_SIMULTANEOUS_CONNECTIONS = 1037
38MAXIMUM_MESSAGE_LENGTH = 32767
3839
3940
40class Error(Exception):41class Error(Exception):
41 """Base error class for this module."""42 """Base error class for this module."""
4243
4344
44class InvalidChannelKeyError(Error):45class InvalidChannelClientIdError(Error):
45 """Error that indicates a bad channel id."""46 """Error that indicates a bad client id."""
4647
47class InvalidChannelKeyError(Error):
48 """Error that indicates a bad channel key."""
4948
50class InvalidMessageError(Error):49class InvalidMessageError(Error):
51 """Error that indicates a message is malformed."""50 """Error that indicates a message is malformed."""
5251
5352
54class ChannelTimeoutError(Error):
55 """Error that indicates the given channel has timed out."""
56
57
58def _ToChannelError(error):53def _ToChannelError(error):
59 """Translate an application error to a channel Error, if possible.54 """Translate an application error to a channel Error, if possible.
6055
@@ -67,11 +62,9 @@
67 """62 """
68 error_map = {63 error_map = {
69 channel_service_pb.ChannelServiceError.INVALID_CHANNEL_KEY:64 channel_service_pb.ChannelServiceError.INVALID_CHANNEL_KEY:
70 InvalidChannelKeyError,65 InvalidChannelClientIdError,
71 channel_service_pb.ChannelServiceError.BAD_MESSAGE:66 channel_service_pb.ChannelServiceError.BAD_MESSAGE:
72 InvalidMessageError,67 InvalidMessageError,
73 channel_service_pb.ChannelServiceError.CHANNEL_TIMEOUT:
74 ChannelTimeoutError
75 }68 }
7669
77 if error.application_error in error_map:70 if error.application_error in error_map:
@@ -88,24 +81,52 @@
88 return 'xmpp'81 return 'xmpp'
8982
9083
91def create_channel(application_key):84def _ValidateClientId(client_id):
85 """Valides a client id.
86
87 Args:
88 client_id: The client id provided by the application.
89
90 Returns:
91 If the client id is of type str, returns the original client id.
92 If the client id is of type unicode, returns the id encoded to utf-8.
93
94 Raises:
95 InvalidChannelClientIdError: if client id is not an instance of str or
96 unicode, or if the (utf-8 encoded) string is longer than 64 characters.
97 """
98 if isinstance(client_id, unicode):
99 client_id = client_id.encode('utf-8')
100 elif not isinstance(client_id, str):
101 raise InvalidChannelClientIdError
102
103 if len(client_id) > MAXIMUM_CLIENT_ID_LENGTH:
104 raise InvalidChannelClientIdError
105
106 return client_id
107
108
109def create_channel(client_id):
92 """Create a channel.110 """Create a channel.
93111
94 Args:112 Args:
95 application_key: A key to identify this channel on the server side.113 client_id: A string to identify this channel on the server side.
96114
97 Returns:115 Returns:
98 A string id that the client can use to connect to the channel.116 A token that the client can use to connect to the channel.
99117
100 Raises:118 Raises:
101 InvalidChannelTimeoutError: if the specified timeout is invalid.119 InvalidChannelClientIdError: if clientid is not an instance of str or
120 unicode, or if the (utf-8 encoded) string is longer than 64 characters.
102 Other errors returned by _ToChannelError121 Other errors returned by _ToChannelError
103 """122 """
104123
124 client_id = _ValidateClientId(client_id)
125
105 request = channel_service_pb.CreateChannelRequest()126 request = channel_service_pb.CreateChannelRequest()
106 response = channel_service_pb.CreateChannelResponse()127 response = channel_service_pb.CreateChannelResponse()
107128
108 request.set_application_key(application_key)129 request.set_application_key(client_id)
109130
110 try:131 try:
111 apiproxy_stub_map.MakeSyncCall(_GetService(),132 apiproxy_stub_map.MakeSyncCall(_GetService(),
@@ -118,20 +139,33 @@
118 return response.client_id()139 return response.client_id()
119140
120141
121def send_message(application_key, message):142def send_message(client_id, message):
122 """Send a message to a channel.143 """Send a message to a channel.
123144
124 Args:145 Args:
125 application_key: The key passed to create_channel.146 client_id: The client id passed to create_channel.
126 message: A string representing the message to send.147 message: A string representing the message to send.
127148
128 Raises:149 Raises:
150 InvalidChannelClientIdError: if client_id is not an instance of str or
151 unicode, or if the (utf-8 encoded) string is longer than 64 characters.
152 InvalidMessageError: if the message isn't a string or is too long.
129 Errors returned by _ToChannelError153 Errors returned by _ToChannelError
130 """154 """
155 client_id = _ValidateClientId(client_id)
156
157 if isinstance(message, unicode):
158 message = message.encode('utf-8')
159 elif not isinstance(message, str):
160 raise InvalidMessageError
161
162 if len(message) > MAXIMUM_MESSAGE_LENGTH:
163 raise InvalidMessageError
164
131 request = channel_service_pb.SendMessageRequest()165 request = channel_service_pb.SendMessageRequest()
132 response = api_base_pb.VoidProto()166 response = api_base_pb.VoidProto()
133167
134 request.set_application_key(application_key)168 request.set_application_key(client_id)
135 request.set_message(message)169 request.set_message(message)
136170
137 try:171 try:
138172
=== modified file 'AppServer/google/appengine/api/channel/channel_service_pb.py'
--- AppServer/google/appengine/api/channel/channel_service_pb.py 2010-11-30 10:40:47 +0000
+++ AppServer/google/appengine/api/channel/channel_service_pb.py 2010-12-24 09:11:16 +0000
@@ -30,14 +30,12 @@
30 INTERNAL_ERROR = 130 INTERNAL_ERROR = 1
31 INVALID_CHANNEL_KEY = 231 INVALID_CHANNEL_KEY = 2
32 BAD_MESSAGE = 332 BAD_MESSAGE = 3
33 CHANNEL_TIMEOUT = 4
3433
35 _ErrorCode_NAMES = {34 _ErrorCode_NAMES = {
36 0: "OK",35 0: "OK",
37 1: "INTERNAL_ERROR",36 1: "INTERNAL_ERROR",
38 2: "INVALID_CHANNEL_KEY",37 2: "INVALID_CHANNEL_KEY",
39 3: "BAD_MESSAGE",38 3: "BAD_MESSAGE",
40 4: "CHANNEL_TIMEOUT",
41 }39 }
4240
43 def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")41 def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
4442
=== modified file 'AppServer/google/appengine/api/datastore.py'
--- AppServer/google/appengine/api/datastore.py 2010-11-30 10:37:25 +0000
+++ AppServer/google/appengine/api/datastore.py 2010-12-24 09:11:16 +0000
@@ -31,34 +31,26 @@
3131
3232
3333
34
34import heapq35import heapq
35import itertools36import itertools
36import logging37import logging
37import os38import os
38import re39import re
39import string
40import sys40import sys
41import threading
41import traceback42import traceback
42from xml.sax import saxutils43from xml.sax import saxutils
4344
44from google.appengine.api import api_base_pb
45from google.appengine.api import apiproxy_rpc
46from google.appengine.api import apiproxy_stub_map45from google.appengine.api import apiproxy_stub_map
47from google.appengine.api import capabilities46from google.appengine.api import capabilities
48from google.appengine.api import datastore_errors47from google.appengine.api import datastore_errors
49from google.appengine.api import datastore_types48from google.appengine.api import datastore_types
50from google.appengine.datastore import datastore_index
51from google.appengine.datastore import datastore_pb49from google.appengine.datastore import datastore_pb
52from google.appengine.runtime import apiproxy_errors50from google.appengine.datastore import datastore_rpc
51from google.appengine.datastore import datastore_query
53from google.appengine.datastore import entity_pb52from google.appengine.datastore import entity_pb
5453
55try:
56 __import__('google.appengine.api.labs.taskqueue.taskqueue_service_pb')
57 taskqueue_service_pb = sys.modules.get(
58 'google.appengine.api.labs.taskqueue.taskqueue_service_pb')
59except ImportError:
60 from google.appengine.api.taskqueue import taskqueue_service_pb
61
62MAX_ALLOWABLE_QUERIES = 3054MAX_ALLOWABLE_QUERIES = 30
6355
64MAXIMUM_RESULTS = 100056MAXIMUM_RESULTS = 1000
@@ -72,22 +64,15 @@
7264
73_MAX_INDEXED_PROPERTIES = 500065_MAX_INDEXED_PROPERTIES = 5000
7466
75_MAX_ID_BATCH_SIZE = 1000 * 1000 * 100067_MAX_ID_BATCH_SIZE = datastore_rpc._MAX_ID_BATCH_SIZE
7668
77Key = datastore_types.Key69Key = datastore_types.Key
78typename = datastore_types.typename70typename = datastore_types.typename
7971
80_txes = {}72_ALLOWED_API_KWARGS = frozenset(['rpc', 'config'])
8173
82_ALLOWED_API_KWARGS = frozenset(['rpc'])74STRONG_CONSISTENCY = datastore_rpc.Configuration.STRONG_CONSISTENCY
8375EVENTUAL_CONSISTENCY = datastore_rpc.Configuration.EVENTUAL_CONSISTENCY
84_ALLOWED_FAILOVER_READ_METHODS = set(
85 ('Get', 'RunQuery', 'RunCompiledQuery', 'Count', 'Next'))
86
87ARBITRARY_FAILOVER_MS = -1
88
89STRONG_CONSISTENCY = 0
90EVENTUAL_CONSISTENCY = 1
9176
92_MAX_INT_32 = 2**31-177_MAX_INT_32 = 2**31-1
9378
@@ -161,24 +146,101 @@
161 return (keys, multiple)146 return (keys, multiple)
162147
163148
164def GetRpcFromKwargs(kwargs):149def _GetConfigFromKwargs(kwargs):
150 """Get a Configuration object from the keyword arguments.
151
152 This is purely an internal helper for the various public APIs below
153 such as Get().
154
155 Args:
156 kwargs: A dict containing the keyword arguments passed to a public API.
157
158 Returns:
159 A UserRPC instance, or a Configuration instance, or None.
160
161 Raises:
162 TypeError if unexpected keyword arguments are present.
163 """
165 if not kwargs:164 if not kwargs:
166 return None165 return None
167 args_diff = set(kwargs) - _ALLOWED_API_KWARGS166 args_diff = set(kwargs) - _ALLOWED_API_KWARGS
168 if args_diff:167 if args_diff:
169 raise TypeError('Invalid arguments: %s' % ', '.join(args_diff))168 raise datastore_errors.BadArgumentError(
170 return kwargs.get('rpc')169 'Unexpected keyword arguments: %s' % ', '.join(args_diff))
171170 rpc = kwargs.get('rpc')
172171 config = kwargs.get('config')
173def _MakeSyncCall(service, call, request, response, rpc=None):172 if rpc is not None:
173 if config is not None:
174 raise datastore_errors.BadArgumentError(
175 'Expected rpc= or config= argument but not both')
176 if isinstance(rpc, (apiproxy_stub_map.UserRPC,
177 datastore_rpc.Configuration)):
178 return rpc
179 raise datastore_errors.BadArgumentError(
180 'rpc= argument should be None or a UserRPC instance')
181 if config is not None:
182 if not isinstance(config, (datastore_rpc.Configuration,
183 apiproxy_stub_map.UserRPC)):
184 raise datastore_errors.BadArgumentError(
185 'config= argument should be None or a Configuration instance')
186 return config
187
188
189class DatastoreAdapter(datastore_rpc.AbstractAdapter):
190 """Adapter between datatypes defined here (Entity etc.) and protobufs.
191
192 See the base class in datastore_rpc.py for more docs.
193 """
194
195 def key_to_pb(self, key):
196 return key._Key__reference
197
198 def pb_to_key(self, pb):
199 return Key._FromPb(pb)
200
201 def entity_to_pb(self, entity):
202 return entity._ToPb()
203
204 def pb_to_entity(self, pb):
205 return Entity._FromPb(pb)
206
207
208_adapter = DatastoreAdapter()
209_thread_local = threading.local()
210
211_ENV_KEY = '__DATASTORE_CONNECTION_INITIALIZED__'
212
213
214def _GetConnection():
215 """Retrieve a datastore connection local to the thread."""
216 connection = None
217 if os.getenv(_ENV_KEY):
218 try:
219 connection = _thread_local.connection
220 except AttributeError:
221 pass
222 if connection is None:
223 connection = datastore_rpc.Connection(adapter=_adapter)
224 _SetConnection(connection)
225 return connection
226
227
228def _SetConnection(connection):
229 """Sets the datastore connection local to the thread."""
230 _thread_local.connection = connection
231 os.environ[_ENV_KEY] = '1'
232
233
234
235def _MakeSyncCall(service, call, request, response, config=None):
174 """The APIProxy entry point for a synchronous API call.236 """The APIProxy entry point for a synchronous API call.
175237
176 Args:238 Args:
177 service: string representing which service to call239 service: For backwards compatibility, must be 'datastore_v3'.
178 call: string representing which function to call240 call: String representing which function to call.
179 request: protocol buffer for the request241 request: Protocol buffer for the request.
180 response: protocol buffer for the response242 response: Protocol buffer for the response.
181 rpc: datastore.DatastoreRPC to use for this request.243 config: Optional Configuration to use for this request.
182244
183 Returns:245 Returns:
184 Response protocol buffer. Caller should always use returned value246 Response protocol buffer. Caller should always use returned value
@@ -187,67 +249,105 @@
187 Raises:249 Raises:
188 apiproxy_errors.Error or a subclass.250 apiproxy_errors.Error or a subclass.
189 """251 """
190 if not rpc:252 conn = _GetConnection()
191 rpc = CreateRPC(service)253 if isinstance(request, datastore_pb.Query):
192254 conn._set_request_read_policy(request, config)
193 rpc.make_call(call, request, response)255 conn._set_request_transaction(request)
194 rpc.wait()256 rpc = conn.make_rpc_call(config, call, request, response)
195 rpc.check_success()257 conn.check_rpc_success(rpc)
196 return response258 return response
197259
198260
199def CreateRPC(service='datastore_v3', deadline=None, callback=None,261def CreateRPC(service='datastore_v3',
200 read_policy=STRONG_CONSISTENCY):262 deadline=None, callback=None, read_policy=None):
201 """Create an rpc for use in configuring datastore calls.263 """Create an rpc for use in configuring datastore calls.
202264
203 Args:265 NOTE: This functions exists for backwards compatibility. Please use
204 deadline: float, deadline for calls in seconds.266 CreateConfig() instead. NOTE: the latter uses 'on_completion',
205 callback: callable, a callback triggered when this rpc completes,267 which is a function taking an argument, wherease CreateRPC uses
206 accepts one argument: the returned rpc.268 'callback' which is a function without arguments.
207 read_policy: flag, set to EVENTUAL_CONSISTENCY to enable eventually269
208 consistent reads270 Args:
209271 service: Optional string; for backwards compatibility, must be
210 Returns:272 'datastore_v3'.
211 A datastore.DatastoreRPC instance.273 deadline: Optional int or float, deadline for calls in seconds.
212 """274 callback: Optional callable, a callback triggered when this rpc
213 return DatastoreRPC(service, deadline, callback, read_policy)275 completes; takes no arguments.
214276 read_policy: Optional read policy; set to EVENTUAL_CONSISTENCY to
215277 enable eventually consistent reads (i.e. reads that may be
216class DatastoreRPC(apiproxy_stub_map.UserRPC):278 satisfied from an older version of the datastore in some cases).
217 """Specialized RPC for the datastore.279 The default read policy may have to wait until in-flight
218280 transactions are committed.
219 Wraps the default RPC class and sets appropriate values for use by the281
220 datastore.282 Returns:
221283 A UserRPC instance.
222 This class or a sublcass of it is intended to be instatiated by284 """
223 developers interested in setting specific request parameters, such as285 assert service == 'datastore_v3'
224 deadline, on API calls. It will be used to make the actual call.286 conn = _GetConnection()
225 """287 config = None
226288 if deadline is not None:
227 def __init__(self, service='datastore_v3', deadline=None, callback=None,289 config = datastore_rpc.Configuration(deadline=deadline)
228 read_policy=STRONG_CONSISTENCY):290 rpc = conn.create_rpc(config)
229 super(DatastoreRPC, self).__init__(service, deadline, callback)291 rpc.callback = callback
230 self.read_policy = read_policy292 if read_policy is not None:
231293 rpc.read_policy = read_policy
232 def make_call(self, call, request, response):294 return rpc
233 if self.read_policy == EVENTUAL_CONSISTENCY:295
234 if call not in _ALLOWED_FAILOVER_READ_METHODS:296
235 raise datastore_errors.BadRequestError(297def CreateConfig(**kwds):
236 'read_policy is only supported on read operations.')298 """Create a Configuration object for use in configuring datastore calls.
237 if call != 'Next':299
238 request.set_failover_ms(ARBITRARY_FAILOVER_MS)300 This configuration can be passed to most datastore calls using the
239 super(DatastoreRPC, self).make_call(call, request, response)301 'config=...' argument.
240302
241 def clone(self):303 Args:
242 """Make a shallow copy of this instance.304 deadline: Optional deadline; default None (which means the
243305 system default deadline will be used, typically 5 seconds).
244 This is usually used when an RPC has been specified with some configuration306 on_completion: Optional callback function; default None. If
245 options and is being used as a template for multiple RPCs outside of a307 specified, it will be called with a UserRPC object as argument
246 developer's easy control.308 when an RPC completes.
247 """309 read_policy: Optional read policy; set to EVENTUAL_CONSISTENCY to
248 assert self.state == apiproxy_rpc.RPC.IDLE310 enable eventually consistent reads (i.e. reads that may be
249 return self.__class__(311 satisfied from an older version of the datastore in some cases).
250 self.service, self.deadline, self.callback, self.read_policy)312 The default read policy may have to wait until in-flight
313 transactions are committed.
314 **kwds: Other keyword arguments as long as they are supported by
315 datastore_rpc.Configuration().
316
317 Returns:
318 A datastore_rpc.Configuration instance.
319 """
320 return datastore_rpc.Configuration(**kwds)
321
322
323def _Rpc2Config(rpc):
324 """Internal helper to construct a Configuration from a UserRPC object.
325
326 If the argument is a UserRPC object, it returns a Configuration
327 object constructed using the same deadline and read_policy;
328 otherwise it returns the argument unchanged.
329
330 NOTE: If the argument is a UserRPC object, its callback is *not*
331 transferred to the Configuration object; the Configuration's
332 on_completion attribute is set to None. This is done because (a)
333 the signature of on_completion differs from the callback signature;
334 (b) the caller probably doesn't expect the callback to be called
335 more than once; and (c) the callback, being argument-less, wouldn't
336 know which UserRPC object was actually completing. But yes,
337 technically, this is a backwards incompatibility.
338
339 Args:
340 rpc: None, a UserRPC object, or a datastore_rpc.Configuration object.
341
342 Returns:
343 None or a datastore_rpc.Configuration object.
344 """
345 if rpc is None or isinstance(rpc, datastore_rpc.Configuration):
346 return rpc
347 read_policy = getattr(rpc, 'read_policy', None)
348 return datastore_rpc.Configuration(deadline=rpc.deadline,
349 read_policy=read_policy,
350 config=_GetConnection().config)
251351
252352
253def Put(entities, **kwargs):353def Put(entities, **kwargs):
@@ -261,7 +361,7 @@
261361
262 Args:362 Args:
263 entities: Entity or list of Entities363 entities: Entity or list of Entities
264 rpc: datastore.RPC to use for this request.364 config: Optional Configuration to use for this request.
265365
266 Returns:366 Returns:
267 Key or list of Keys367 Key or list of Keys
@@ -269,7 +369,10 @@
269 Raises:369 Raises:
270 TransactionFailedError, if the Put could not be committed.370 TransactionFailedError, if the Put could not be committed.
271 """371 """
272 rpc = GetRpcFromKwargs(kwargs)372 config = _GetConfigFromKwargs(kwargs)
373 if getattr(config, 'read_policy', None) == EVENTUAL_CONSISTENCY:
374 raise datastore_errors.BadRequestError(
375 'read_policy is only supported on read operations.')
273 entities, multiple = NormalizeAndTypeCheck(entities, Entity)376 entities, multiple = NormalizeAndTypeCheck(entities, Entity)
274377
275 if multiple and not entities:378 if multiple and not entities:
@@ -280,36 +383,25 @@
280 raise datastore_errors.BadRequestError(383 raise datastore_errors.BadRequestError(
281 'App and kind must not be empty, in entity: %s' % entity)384 'App and kind must not be empty, in entity: %s' % entity)
282385
283 req = datastore_pb.PutRequest()386 def extra_hook(keys):
284 req.entity_list().extend([e._ToPb() for e in entities])387 num_keys = len(keys)
285388 num_entities = len(entities)
286 keys = [e.key() for e in entities]389 if num_keys != num_entities:
287 tx = _MaybeSetupTransaction(req, keys)390 raise datastore_errors.InternalError(
288391 'Put accepted %d entities but returned %d keys.' %
289 try:392 (num_entities, num_keys))
290 resp = _MakeSyncCall(393
291 'datastore_v3', 'Put', req, datastore_pb.PutResponse(), rpc)394 for entity, key in zip(entities, keys):
292 except apiproxy_errors.ApplicationError, err:395 if entity._Entity__key._Key__reference != key._Key__reference:
293 raise _ToDatastoreError(err)396 assert not entity._Entity__key.has_id_or_name()
294397 entity._Entity__key._Key__reference.CopyFrom(key._Key__reference)
295 keys = resp.key_list()398
296 num_keys = len(keys)399 if multiple:
297 num_entities = len(entities)400 return keys
298 if num_keys != num_entities:401 else:
299 raise datastore_errors.InternalError(402 return keys[0]
300 'Put accepted %d entities but returned %d keys.' %403
301 (num_entities, num_keys))404 return _GetConnection().async_put(config, entities, extra_hook).get_result()
302
303 for entity, key in zip(entities, keys):
304 entity._Entity__key._Key__reference.CopyFrom(key)
305
306 if tx:
307 tx.entity_group = entities[0].entity_group()
308
309 if multiple:
310 return [Key._FromPb(k) for k in keys]
311 else:
312 return Key._FromPb(resp.key(0))
313405
314406
315def Get(keys, **kwargs):407def Get(keys, **kwargs):
@@ -329,39 +421,26 @@
329 Args:421 Args:
330 # the primary key(s) of the entity(ies) to retrieve422 # the primary key(s) of the entity(ies) to retrieve
331 keys: Key or string or list of Keys or strings423 keys: Key or string or list of Keys or strings
332 rpc: datastore.RPC to use for this request.424 config: Optional Configuration to use for this request.
333425
334 Returns:426 Returns:
335 Entity or list of Entity objects427 Entity or list of Entity objects
336 """428 """
337 rpc = GetRpcFromKwargs(kwargs)429 config = _GetConfigFromKwargs(kwargs)
338 keys, multiple = NormalizeAndTypeCheckKeys(keys)430 keys, multiple = NormalizeAndTypeCheckKeys(keys)
339431
340 if multiple and not keys:432 if multiple and not keys:
341 return []433 return []
342 req = datastore_pb.GetRequest()434
343 req.key_list().extend([key._Key__reference for key in keys])435 def extra_hook(entities):
344 _MaybeSetupTransaction(req, keys)436 if multiple:
345437 return entities
346 try:
347 resp = _MakeSyncCall(
348 'datastore_v3', 'Get', req, datastore_pb.GetResponse(), rpc)
349 except apiproxy_errors.ApplicationError, err:
350 raise _ToDatastoreError(err)
351
352 entities = []
353 for group in resp.entity_list():
354 if group.has_entity():
355 entities.append(Entity._FromPb(group.entity()))
356 else:438 else:
357 entities.append(None)439 if entities[0] is None:
440 raise datastore_errors.EntityNotFoundError()
441 return entities[0]
358442
359 if multiple:443 return _GetConnection().async_get(config, keys, extra_hook).get_result()
360 return entities
361 else:
362 if entities[0] is None:
363 raise datastore_errors.EntityNotFoundError()
364 return entities[0]
365444
366445
367def Delete(keys, **kwargs):446def Delete(keys, **kwargs):
@@ -374,27 +453,21 @@
374 Args:453 Args:
375 # the primary key(s) of the entity(ies) to delete454 # the primary key(s) of the entity(ies) to delete
376 keys: Key or string or list of Keys or strings455 keys: Key or string or list of Keys or strings
377 rpc: datastore.RPC to use for this request.456 config: Optional Configuration to use for this request.
378457
379 Raises:458 Raises:
380 TransactionFailedError, if the Delete could not be committed.459 TransactionFailedError, if the Delete could not be committed.
381 """460 """
382 rpc = GetRpcFromKwargs(kwargs)461 config = _GetConfigFromKwargs(kwargs)
462 if getattr(config, 'read_policy', None) == EVENTUAL_CONSISTENCY:
463 raise datastore_errors.BadRequestError(
464 'read_policy is only supported on read operations.')
383 keys, multiple = NormalizeAndTypeCheckKeys(keys)465 keys, multiple = NormalizeAndTypeCheckKeys(keys)
384466
385 if multiple and not keys:467 if multiple and not keys:
386 return468 return
387469
388 req = datastore_pb.DeleteRequest()470 _GetConnection().async_delete(config, keys).get_result()
389 req.key_list().extend([key._Key__reference for key in keys])
390
391 tx = _MaybeSetupTransaction(req, keys)
392
393 try:
394 _MakeSyncCall(
395 'datastore_v3', 'Delete', req, datastore_pb.DeleteResponse(), rpc)
396 except apiproxy_errors.ApplicationError, err:
397 raise _ToDatastoreError(err)
398471
399472
400class Entity(dict):473class Entity(dict):
@@ -704,7 +777,7 @@
704 return pb777 return pb
705778
706 @staticmethod779 @staticmethod
707 def FromPb(pb):780 def FromPb(pb, validate_reserved_properties=True):
708 """Static factory method. Returns the Entity representation of the781 """Static factory method. Returns the Entity representation of the
709 given protocol buffer (datastore_pb.Entity).782 given protocol buffer (datastore_pb.Entity).
710783
@@ -719,10 +792,12 @@
719 real_pb.ParseFromString(pb)792 real_pb.ParseFromString(pb)
720 pb = real_pb793 pb = real_pb
721794
722 return Entity._FromPb(pb, require_valid_key=False)795 return Entity._FromPb(
796 pb, require_valid_key=False,
797 validate_reserved_properties=validate_reserved_properties)
723798
724 @staticmethod799 @staticmethod
725 def _FromPb(pb, require_valid_key=True):800 def _FromPb(pb, require_valid_key=True, validate_reserved_properties=True):
726 """Static factory method. Returns the Entity representation of the801 """Static factory method. Returns the Entity representation of the
727 given protocol buffer (datastore_pb.Entity). Not intended to be used by802 given protocol buffer (datastore_pb.Entity). Not intended to be used by
728 application developers.803 application developers.
@@ -790,7 +865,8 @@
790 for name, value in temporary_values.iteritems():865 for name, value in temporary_values.iteritems():
791 decoded_name = unicode(name.decode('utf-8'))866 decoded_name = unicode(name.decode('utf-8'))
792867
793 datastore_types.ValidateReadProperty(decoded_name, value)868 datastore_types.ValidateReadProperty(
869 decoded_name, value, read_only=(not validate_reserved_properties))
794870
795 dict.__setitem__(e, decoded_name, value)871 dict.__setitem__(e, decoded_name, value)
796872
@@ -874,33 +950,29 @@
874 the query. The returned count is cached; successive Count() calls will not950 the query. The returned count is cached; successive Count() calls will not
875 re-scan the datastore unless the query is changed.951 re-scan the datastore unless the query is changed.
876 """952 """
877 ASCENDING = datastore_pb.Query_Order.ASCENDING953 ASCENDING = datastore_query.PropertyOrder.ASCENDING
878 DESCENDING = datastore_pb.Query_Order.DESCENDING954 DESCENDING = datastore_query.PropertyOrder.DESCENDING
879955
880 ORDER_FIRST = datastore_pb.Query.ORDER_FIRST956 ORDER_FIRST = datastore_query.QueryOptions.ORDER_FIRST
881 ANCESTOR_FIRST = datastore_pb.Query.ANCESTOR_FIRST957 ANCESTOR_FIRST = datastore_query.QueryOptions.ANCESTOR_FIRST
882 FILTER_FIRST = datastore_pb.Query.FILTER_FIRST958 FILTER_FIRST = datastore_query.QueryOptions.FILTER_FIRST
883959
884 OPERATORS = {'<': datastore_pb.Query_Filter.LESS_THAN,960 OPERATORS = {'==': datastore_query.PropertyFilter._OPERATORS['=']}
885 '<=': datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL,961 OPERATORS.update(datastore_query.PropertyFilter._OPERATORS)
886 '>': datastore_pb.Query_Filter.GREATER_THAN,962
887 '>=': datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL,963 INEQUALITY_OPERATORS = datastore_query.PropertyFilter._INEQUALITY_OPERATORS
888 '=': datastore_pb.Query_Filter.EQUAL,964
889 '==': datastore_pb.Query_Filter.EQUAL,
890 }
891 INEQUALITY_OPERATORS = frozenset(['<', '<=', '>', '>='])
892 UPPERBOUND_INEQUALITY_OPERATORS = frozenset(['<', '<='])965 UPPERBOUND_INEQUALITY_OPERATORS = frozenset(['<', '<='])
893 FILTER_REGEX = re.compile(966 FILTER_REGEX = re.compile(
894 '^\s*([^\s]+)(\s+(%s)\s*)?$' % '|'.join(OPERATORS.keys()),967 '^\s*([^\s]+)(\s+(%s)\s*)?$' % '|'.join(OPERATORS),
895 re.IGNORECASE | re.UNICODE)968 re.IGNORECASE | re.UNICODE)
896969
897 __kind = None970 __kind = None
898 __app = None971 __app = None
899 __namespace = None972 __namespace = None
900 __orderings = None973 __orderings = None
901 __cached_count = None
902 __hint = None974 __hint = None
903 __ancestor = None975 __ancestor_pb = None
904 __compile = None976 __compile = None
905977
906 __cursor = None978 __cursor = None
@@ -1099,33 +1171,114 @@
1099 # this query1171 # this query
1100 Query1172 Query
1101 """1173 """
1102 self.__ancestor = _GetCompleteKeyOrError(ancestor)1174 self.__ancestor_pb = _GetCompleteKeyOrError(ancestor)._ToPb()
1103 return self1175 return self
11041176
1105 def IsKeysOnly(self):1177 def IsKeysOnly(self):
1106 """Returns True if this query is keys only, false otherwise."""1178 """Returns True if this query is keys only, false otherwise."""
1107 return self.__keys_only1179 return self.__keys_only
11081180
1109 def GetCompiledCursor(self):1181 def GetQueryOptions(self):
1182 """Returns a datastore_query.QueryOptions for the current instance."""
1183 return datastore_query.QueryOptions(keys_only=self.__keys_only,
1184 produce_cursors=self.__compile,
1185 start_cursor=self.__cursor,
1186 end_cursor=self.__end_cursor,
1187 hint=self.__hint)
1188
1189 def GetQuery(self):
1190 """Returns a datastore_query.Query for the current instance."""
1191 return datastore_query.Query(app=self.__app,
1192 namespace=self.__namespace,
1193 kind=self.__kind,
1194 ancestor=self.__ancestor_pb,
1195 filter_predicate=self.GetFilterPredicate(),
1196 order=self.GetOrder())
1197
1198 def GetOrder(self):
1199 """Gets a datastore_query.Order for the current instance.
1200
1201 Returns:
1202 datastore_query.Order or None if there are no sort orders set on the
1203 current Query.
1204 """
1205
1206 orders = [datastore_query.PropertyOrder(property, direction)
1207 for property, direction in self.__orderings]
1208 if orders:
1209 return datastore_query.CompositeOrder(orders)
1210 return None
1211
1212 def GetFilterPredicate(self):
1213 """Returns a datastore_query.FilterPredicate for the current instance.
1214
1215 Returns:
1216 datastore_query.FilterPredicate or None if no filters are set on the
1217 current Query.
1218 """
1219 ordered_filters = [(i, f) for f, i in self.__filter_order.iteritems()]
1220 ordered_filters.sort()
1221
1222 property_filters = []
1223 for _, filter_str in ordered_filters:
1224 if filter_str not in self:
1225 continue
1226
1227 values = self[filter_str]
1228 match = self._CheckFilter(filter_str, values)
1229 name = match.group(1)
1230
1231 op = match.group(3)
1232 if op is None or op == '==':
1233 op = '='
1234
1235 property_filters.append(datastore_query.make_filter(name, op, values))
1236
1237 if property_filters:
1238 return datastore_query.CompositeFilter(
1239 datastore_query.CompositeFilter.AND,
1240 property_filters)
1241 return None
1242
1243 def GetCursor(self):
1244 """Get the cursor from the last run of this query.
1245
1246 The source of this cursor varies depending on what the last call was:
1247 - Run: A cursor that points immediately after the last result pulled off
1248 the returned iterator.
1249 - Get: A cursor that points immediately after the last result in the
1250 returned list.
1251 - Count: A cursor that points immediately after the last result counted.
1252
1253 Returns:
1254 A datastore_query.Cursor object that can be used in subsiquent query
1255 requests.
1256 """
1110 try:1257 try:
1111 compiled_cursor = self.__last_iterator.GetCompiledCursor(self)1258 cursor = self.__cursor_source()
1112 if not compiled_cursor:1259 if not cursor:
1113 raise AttributeError()1260 raise AttributeError()
1114 except AttributeError:1261 except AttributeError:
1115 raise AssertionError('No cursor available, either this query has not '1262 raise AssertionError('No cursor available, either this query has not '
1116 'been executed or there is no compilation '1263 'been executed or there is no compilation '
1117 'available for this kind of query')1264 'available for this kind of query')
1118 return compiled_cursor1265 return cursor
11191266
1120 def GetCompiledQuery(self):1267 def GetBatcher(self, config=None):
1121 try:1268 """Runs this query and returns a datastore_query.Batcher.
1122 if not self.__compiled_query:1269
1123 raise AttributeError()1270 This is not intended to be used by application developers. Use Get()
1124 except AttributeError:1271 instead!
1125 raise AssertionError('No compiled query available, either this query has '1272
1126 'not been executed or there is no compilation '1273 Args:
1127 'available for this kind of query')1274 config: Optional Configuration to use for this request.
1128 return self.__compiled_query1275
1276 Returns:
1277 # an iterator that provides access to the query results
1278 Iterator
1279 """
1280 query_options = self.GetQueryOptions().merge(config)
1281 return self.GetQuery().run(_GetConnection(), query_options)
11291282
1130 def Run(self, **kwargs):1283 def Run(self, **kwargs):
1131 """Runs this query.1284 """Runs this query.
@@ -1142,80 +1295,17 @@
1142 offset: integer, offset for the query.1295 offset: integer, offset for the query.
1143 prefetch_count: integer, number of results to return in the first query.1296 prefetch_count: integer, number of results to return in the first query.
1144 next_count: number of results to return in subsequent next queries.1297 next_count: number of results to return in subsequent next queries.
1145 rpc: datastore.RPC to use for this request.1298 config: Optional Configuration to use for this request.
11461299
1147 Returns:1300 Returns:
1148 # an iterator that provides access to the query results1301 # an iterator that provides access to the query results
1149 Iterator1302 Iterator
1150 """1303 """
1151 return self._Run(**kwargs)1304 config = _Rpc2Config(_GetConfigFromKwargs(kwargs))
11521305 itr = Iterator(self.GetBatcher(config=config))
1153 def _Run(self, limit=None, offset=None,1306 self.__cursor_source = itr.cursor
1154 prefetch_count=None, next_count=None, **kwargs):1307 self.__compiled_query_source = itr._compiled_query
1155 """Runs this query, with an optional result limit and an optional offset.1308 return itr
1156
1157 Identical to Run, with the extra optional limit, offset, prefetch_count,
1158 next_count parameters. These parameters must be integers >= 0.
1159
1160 This is not intended to be used by application developers. Use Get()
1161 instead!
1162
1163 Args:
1164 limit: integer, limit for the query.
1165 offset: integer, offset for the query.
1166 prefetch_count: integer, number of results to return in the first query.
1167 next_count: number of results to return in subsequent next queries.
1168 rpc: datastore.RPC to use for this request.
1169
1170 Returns:
1171 # an iterator that provides access to the query results
1172 Iterator
1173 """
1174 rpc = GetRpcFromKwargs(kwargs)
1175 self.__last_iterator, self.__compiled_query = Query._RunInternal(
1176 self._ToPb(limit, offset, prefetch_count),
1177 next_count=next_count,
1178 rpc=rpc)
1179
1180 return self.__last_iterator
1181
1182 @staticmethod
1183 def _RunInternal(request, next_count=None, rpc=None):
1184 """Runs the given request and wraps the result in an iterator.
1185
1186 Args:
1187 request: datastore_pb.query, the request to run.
1188 next_count: number of results to return in subsequent next queries.
1189 rpc: datastore.RPC to use for this request.
1190
1191 Returns:
1192 (Iterator, datastore_pb.CompiledQuery), the iterator and compiled query
1193 that result from running the given request.
1194 """
1195
1196 if rpc:
1197 rpc_clone = rpc.clone()
1198 else:
1199 rpc_clone = None
1200
1201 try:
1202 result = _MakeSyncCall('datastore_v3', 'RunQuery', request,
1203 datastore_pb.QueryResult(), rpc)
1204 except apiproxy_errors.ApplicationError, err:
1205 try:
1206 raise _ToDatastoreError(err)
1207 except datastore_errors.NeedIndexError, exc:
1208 yaml = datastore_index.IndexYamlForQuery(
1209 *datastore_index.CompositeIndexForQuery(request)[1:-1])
1210 raise datastore_errors.NeedIndexError(
1211 str(exc) + '\nThis query needs this index:\n' + yaml)
1212
1213 iterator = Iterator(result, query_request_pb=request, batch_size=next_count,
1214 rpc=rpc_clone)
1215 if result.has_compiled_query():
1216 return iterator, result.compiled_query()
1217 else:
1218 return iterator, None
12191309
1220 def Get(self, limit, offset=0, **kwargs):1310 def Get(self, limit, offset=0, **kwargs):
1221 """Fetches and returns a maximum number of results from the query.1311 """Fetches and returns a maximum number of results from the query.
@@ -1249,54 +1339,64 @@
1249 int or long1339 int or long
1250 # the number of entities to skip1340 # the number of entities to skip
1251 int or long1341 int or long
1252 rpc: datastore.RPC to use for this request.1342 config: Optional Configuration to use for this request. If limit and
1343 offset are specified in the config, they are ignored.
12531344
1254 Returns:1345 Returns:
1255 # a list of entities1346 # a list of entities
1256 [Entity, ...]1347 [Entity, ...]
1257 """1348 """
1258 if not isinstance(limit, (int, long)) or limit < 0:1349 config = _Rpc2Config(_GetConfigFromKwargs(kwargs))
1259 raise datastore_errors.BadArgumentError(1350 batcher = self.GetBatcher(datastore_query.QueryOptions(
1260 'Argument to Get named \'limit\' must be an int greater than or '1351 config=config, limit=limit, offset=offset, prefetch_size=limit))
1261 'equal to 0; received %s (a %s)' % (limit, typename(limit)))1352
12621353 if limit is None:
1263 if not isinstance(offset, (int, long)) or offset < 0:1354 batch = batcher.next_batch(_MAX_INT_32)
1264 raise datastore_errors.BadArgumentError(1355 else:
1265 'Argument to Get named \'offset\' must be an int greater than or '1356 batch = batcher.next_batch(limit)
1266 'equal to 0; received %s (a %s)' % (offset, typename(offset)))1357 self.__cursor_source = lambda: batch.end_cursor
12671358 self.__compiled_query_source = lambda: batch._compiled_query
1268 return self._Run(1359 return batch.results
1269 limit=limit, offset=offset, prefetch_count=limit, **kwargs)._Get(limit)
12701360
1271 def Count(self, limit=1000, **kwargs):1361 def Count(self, limit=1000, **kwargs):
1272 """Returns the number of entities that this query matches. The returned1362 """Returns the number of entities that this query matches.
1273 count is cached; successive Count() calls will not re-scan the datastore
1274 unless the query is changed.
12751363
1276 Args:1364 Args:
1277 limit, a number or None. If there are more results than this, stop short1365 limit, a number or None. If there are more results than this, stop short
1278 and just return this number. Providing this argument makes the count1366 and just return this number. Providing this argument makes the count
1279 operation more efficient.1367 operation more efficient.
1280 rpc: datastore.RPC to use for this request.1368 config: Optional Configuration to use for this request.
12811369
1282 Returns:1370 Returns:
1283 The number of results.1371 The number of results.
1284 """1372 """
1285 if not self.__cached_count:1373 if limit is None:
1286 if limit is None:1374 offset = _MAX_INT_32
1287 offset = _MAX_INT_321375 else:
1288 else:1376 offset = limit
1289 offset = limit1377
12901378 config = datastore_query.QueryOptions(
1291 iterator = self._Run(limit=0, offset=offset, **kwargs)1379 config=_Rpc2Config(_GetConfigFromKwargs(kwargs)),
1292 self.__cached_count = iterator._SkippedResults()1380 limit=0,
12931381 offset=offset)
1294 return self.__cached_count1382
1383 batch = self.GetBatcher(config=config).next()
1384 self.__cursor_source = lambda: batch.cursor(0)
1385 self.__compiled_query_source = lambda: batch._compiled_query
1386 return batch.skipped_results
12951387
1296 def __iter__(self):1388 def __iter__(self):
1297 raise NotImplementedError(1389 raise NotImplementedError(
1298 'Query objects should not be used as iterators. Call Run() first.')1390 'Query objects should not be used as iterators. Call Run() first.')
12991391
1392 def __getstate__(self):
1393 state = self.__dict__.copy()
1394 if '_Query__cursor_source' in state:
1395 del state['_Query__cursor_source']
1396 if '_Query__compiled_query_source' in state:
1397 del state['_Query__compiled_query_source']
1398 return state
1399
1300 def __setitem__(self, filter, value):1400 def __setitem__(self, filter, value):
1301 """Implements the [] operator. Used to set filters.1401 """Implements the [] operator. Used to set filters.
13021402
@@ -1325,8 +1425,6 @@
1325 self.__filter_order[filter] = self.__filter_counter1425 self.__filter_order[filter] = self.__filter_counter
1326 self.__filter_counter += 11426 self.__filter_counter += 1
13271427
1328 self.__cached_count = None
1329
1330 def setdefault(self, filter, value):1428 def setdefault(self, filter, value):
1331 """If the filter exists, returns its value. Otherwise sets it to value.1429 """If the filter exists, returns its value. Otherwise sets it to value.
13321430
@@ -1336,7 +1434,6 @@
1336 """1434 """
1337 datastore_types.ValidateProperty(' ', value)1435 datastore_types.ValidateProperty(' ', value)
1338 self._CheckFilter(filter, value)1436 self._CheckFilter(filter, value)
1339 self.__cached_count = None
1340 return dict.setdefault(self, filter, value)1437 return dict.setdefault(self, filter, value)
13411438
1342 def __delitem__(self, filter):1439 def __delitem__(self, filter):
@@ -1344,7 +1441,6 @@
1344 """1441 """
1345 dict.__delitem__(self, filter)1442 dict.__delitem__(self, filter)
1346 del self.__filter_order[filter]1443 del self.__filter_order[filter]
1347 self.__cached_count = None
13481444
1349 match = Query.FILTER_REGEX.match(filter)1445 match = Query.FILTER_REGEX.match(filter)
1350 property = match.group(1)1446 property = match.group(1)
@@ -1447,92 +1543,42 @@
14471543
1448 return match1544 return match
14491545
1546 def _Run(self, limit=None, offset=None,
1547 prefetch_count=None, next_count=None, **kwargs):
1548 """Deprecated, use .Run instead."""
1549 config = _Rpc2Config(_GetConfigFromKwargs(kwargs))
1550 return self.Run(config=datastore_query.QueryOptions(
1551 config=config,
1552 limit=limit,
1553 offset=offset,
1554 prefetch_size=prefetch_count,
1555 batch_size=next_count))
1556
1450 def _ToPb(self, limit=None, offset=None, count=None):1557 def _ToPb(self, limit=None, offset=None, count=None):
1451 """Converts this Query to its protocol buffer representation. Not1558 query_options = datastore_query.QueryOptions(
1452 intended to be used by application developers. Enforced by hiding the1559 config=self.GetQueryOptions(),
1453 datastore_pb classes.1560 limit=limit,
14541561 offset=offset,
1455 Args:1562 batch_size=count)
1456 # an upper bound on the number of results returned by the query.1563 return self.GetQuery()._to_pb(_GetConnection(), query_options)
1457 limit: int1564
1458 # number of results that match the query to skip. limit is applied1565 def _GetCompiledQuery(self):
1459 # after the offset is fulfilled1566 """Returns the internal-only pb representation of the last query run.
1460 offset: int1567
1461 # the requested initial batch size1568 Do not use.
1462 count: int
1463
1464 Returns:
1465 # the PB representation of this Query
1466 datastore_pb.Query
1467
1468 Raises:
1469 BadRequestError if called inside a transaction and the query does not
1470 include an ancestor.
1471 """1569 """
14721570 try:
1473 if not self.__ancestor and IsInTransaction():1571 compiled_query = self.__compiled_query_source()
1474 raise datastore_errors.BadRequestError(1572 if not compiled_query:
1475 'Only ancestor queries are allowed inside transactions.')1573 raise AttributeError()
14761574 except AttributeError:
1477 pb = datastore_pb.Query()1575 raise AssertionError('No compiled query available, either this query has '
1478 _MaybeSetupTransaction(pb, [self.__ancestor])1576 'not been executed or there is no compilation '
14791577 'available for this kind of query')
1480 if self.__kind is not None:1578 return compiled_query
1481 pb.set_kind(self.__kind.encode('utf-8'))1579
1482 pb.set_keys_only(bool(self.__keys_only))1580 GetCompiledQuery = _GetCompiledQuery
1483 if self.__app:1581 GetCompiledCursor = GetCursor
1484 pb.set_app(self.__app.encode('utf-8'))
1485 datastore_types.SetNamespace(pb, self.__namespace)
1486 if self.__compile:
1487 pb.set_compile(True)
1488 if limit is not None:
1489 pb.set_limit(limit)
1490 if offset is not None:
1491 pb.set_offset(offset)
1492 if count is not None:
1493 pb.set_count(count)
1494 if self.__ancestor:
1495 pb.mutable_ancestor().CopyFrom(self.__ancestor._Key__reference)
1496
1497 if ((self.__hint == self.ORDER_FIRST and self.__orderings) or
1498 (self.__hint == self.ANCESTOR_FIRST and self.__ancestor) or
1499 (self.__hint == self.FILTER_FIRST and len(self) > 0)):
1500 pb.set_hint(self.__hint)
1501
1502 ordered_filters = [(i, f) for f, i in self.__filter_order.iteritems()]
1503 ordered_filters.sort()
1504
1505 for i, filter_str in ordered_filters:
1506 if filter_str not in self:
1507 continue
1508
1509 values = self[filter_str]
1510 match = self._CheckFilter(filter_str, values)
1511 name = match.group(1)
1512
1513 props = datastore_types.ToPropertyPb(name, values)
1514 if not isinstance(props, list):
1515 props = [props]
1516
1517 op = match.group(3)
1518 if op is None:
1519 op = '='
1520
1521 for prop in props:
1522 filter = pb.add_filter()
1523 filter.set_op(self.OPERATORS[op])
1524 filter.add_property().CopyFrom(prop)
1525
1526 for property, direction in self.__orderings:
1527 order = pb.add_order()
1528 order.set_property(property.encode('utf-8'))
1529 order.set_direction(direction)
1530
1531 if self.__cursor:
1532 pb.mutable_compiled_cursor().CopyFrom(self.__cursor)
1533 if self.__end_cursor:
1534 pb.mutable_end_compiled_cursor().CopyFrom(self.__end_cursor)
1535 return pb
15361582
15371583
1538def AllocateIds(model_key, size=None, **kwargs):1584def AllocateIds(model_key, size=None, **kwargs):
@@ -1557,48 +1603,24 @@
1557 in which to allocate IDs1603 in which to allocate IDs
1558 size: integer, number of IDs to allocate.1604 size: integer, number of IDs to allocate.
1559 max: integer, upper bound of the range of IDs to allocate.1605 max: integer, upper bound of the range of IDs to allocate.
1560 rpc: datastore.RPC to use for this request.1606 config: Optional Configuration to use for this request.
15611607
1562 Returns:1608 Returns:
1563 (start, end) of the allocated range, inclusive.1609 (start, end) of the allocated range, inclusive.
1564 """1610 """
1565 max = kwargs.pop('max', None)1611 max = kwargs.pop('max', None)
1566 rpc = GetRpcFromKwargs(kwargs)1612 config = _GetConfigFromKwargs(kwargs)
1613 if getattr(config, 'read_policy', None) == EVENTUAL_CONSISTENCY:
1614 raise datastore_errors.BadRequestError(
1615 'read_policy is only supported on read operations.')
1567 keys, _ = NormalizeAndTypeCheckKeys(model_key)1616 keys, _ = NormalizeAndTypeCheckKeys(model_key)
15681617
1569 if len(keys) > 1:1618 if len(keys) > 1:
1570 raise datastore_errors.BadArgumentError(1619 raise datastore_errors.BadArgumentError(
1571 'Cannot allocate IDs for more than one model key at a time')1620 'Cannot allocate IDs for more than one model key at a time')
15721621
1573 req = datastore_pb.AllocateIdsRequest()1622 rpc = _GetConnection().async_allocate_ids(config, keys[0], size, max)
1574 if size is not None:1623 return rpc.get_result()
1575 if max is not None:
1576 raise datastore_errors.BadArgumentError(
1577 'Cannot allocate ids using both size and max')
1578 if size > _MAX_ID_BATCH_SIZE:
1579 raise datastore_errors.BadArgumentError(
1580 'Cannot allocate more than %s ids at a time; received %s'
1581 % (_MAX_ID_BATCH_SIZE, size))
1582 if size <= 0:
1583 raise datastore_errors.BadArgumentError(
1584 'Cannot allocate less than 1 id; received %s' % size)
1585 req.set_size(size)
1586 if max:
1587 if max < 0:
1588 raise datastore_errors.BadArgumentError(
1589 'Cannot allocate a range with a max less than 0 id; received %s' %
1590 size)
1591 req.set_max(max)
1592
1593 req.mutable_model_key().CopyFrom(keys[0]._ToPb())
1594
1595 try:
1596 resp = _MakeSyncCall('datastore_v3', 'AllocateIds', req,
1597 datastore_pb.AllocateIdsResponse(), rpc)
1598 except apiproxy_errors.ApplicationError, err:
1599 raise _ToDatastoreError(err)
1600
1601 return resp.start(), resp.end()
16021624
16031625
1604class MultiQuery(Query):1626class MultiQuery(Query):
@@ -1642,17 +1664,17 @@
1642 limit: maximum number of values to return.1664 limit: maximum number of values to return.
1643 offset: offset requested -- if nonzero, this will override the offset in1665 offset: offset requested -- if nonzero, this will override the offset in
1644 the original query1666 the original query
1645 rpc: datastore.RPC to use for this request.1667 config: Optional Configuration to use for this request.
16461668
1647 Returns:1669 Returns:
1648 A list of entities with at most "limit" entries (less if the query1670 A list of entities with at most "limit" entries (less if the query
1649 completes before reading limit values).1671 completes before reading limit values).
1650 """1672 """
1651 rpc = GetRpcFromKwargs(kwargs)1673 config = _GetConfigFromKwargs(kwargs)
1652 count = 11674 count = 1
1653 result = []1675 result = []
16541676
1655 iterator = self.Run(rpc=rpc)1677 iterator = self.Run(config=config)
16561678
1657 try:1679 try:
1658 for i in xrange(offset):1680 for i in xrange(offset):
@@ -1782,17 +1804,14 @@
1782 Merge sort the results. First create a list of iterators, then walk1804 Merge sort the results. First create a list of iterators, then walk
1783 though them and yield results in order.1805 though them and yield results in order.
1784 """1806 """
1785 rpc = GetRpcFromKwargs(kwargs)1807 config = _GetConfigFromKwargs(kwargs)
1808 config = _Rpc2Config(config)
1786 results = []1809 results = []
1787 count = 11810 count = 1
1788 log_level = logging.DEBUG - 11811 log_level = logging.DEBUG - 1
1789 for bound_query in self.__bound_queries:1812 for bound_query in self.__bound_queries:
1790 logging.log(log_level, 'Running query #%i' % count)1813 logging.log(log_level, 'Running query #%i' % count)
1791 if rpc:1814 results.append(bound_query.Run(config=config))
1792 rpc_clone = rpc.clone()
1793 else:
1794 rpc_clone = None
1795 results.append(bound_query.Run(rpc=rpc_clone))
1796 count += 11815 count += 1
17971816
1798 def IterateResults(results):1817 def IterateResults(results):
@@ -1852,25 +1871,27 @@
1852 Args:1871 Args:
1853 limit: maximum number of entries to count (for any result > limit, return1872 limit: maximum number of entries to count (for any result > limit, return
1854 limit).1873 limit).
1855 rpc: datastore.RPC to use for this request.1874 config: Optional Configuration to use for this request.
18561875
1857 Returns:1876 Returns:
1858 count of the number of entries returned.1877 count of the number of entries returned.
1859 """1878 """
1860 rpc = GetRpcFromKwargs(kwargs)1879 config = _GetConfigFromKwargs(kwargs)
1861 if limit is None:1880 if limit is None:
1862 count = 01881 count = 0
1863 for i in self.Run(rpc=rpc):1882 for _ in self.Run(config=config):
1864 count += 11883 count += 1
1865 return count1884 return count
1866 else:1885 else:
1867 return len(self.Get(limit, rpc=rpc))1886 return len(self.Get(limit, config=config))
18681887
1869 def GetCompiledCursor(self):1888 def GetCursor(self):
1870 raise AssertionError('No cursor available for a MultiQuery (queries '1889 raise AssertionError('No cursor available for a MultiQuery (queries '
1871 'using "IN" or "!=" operators)')1890 'using "IN" or "!=" operators)')
18721891
1873 def GetCompiledQuery(self):1892
1893 def _GetCompiledQuery(self):
1894 """Internal only, do not use."""
1874 raise AssertionError('No compilation available for a MultiQuery (queries '1895 raise AssertionError('No compilation available for a MultiQuery (queries '
1875 'using "IN" or "!=" operators)')1896 'using "IN" or "!=" operators)')
18761897
@@ -1934,253 +1955,8 @@
1934 def __iter__(self):1955 def __iter__(self):
1935 return iter(self.__bound_queries)1956 return iter(self.__bound_queries)
19361957
19371958 GetCompiledCursor = GetCursor
19381959 GetCompiledQuery = _GetCompiledQuery
1939class Iterator(object):
1940 """An iterator over the results of a datastore query.
1941
1942 Iterators are used to access the results of a Query. An iterator is
1943 obtained by building a Query, then calling Run() on it.
1944
1945 Iterator implements Python's iterator protocol, so results can be accessed
1946 with the for and in statements:
1947
1948 > it = Query('Person').Run()
1949 > for person in it:
1950 > print 'Hi, %s!' % person['name']
1951 """
1952 def __init__(self, query_result_pb, batch_size=None, rpc=None,
1953 query_request_pb=None):
1954 """Constructor.
1955
1956 kwargs gets stored and passed on to Next calls made by this iterator.
1957 """
1958 self.__cursor = query_result_pb.cursor()
1959 self.__keys_only = query_result_pb.keys_only()
1960 self.__batch_size = batch_size
1961 self.__rpc = rpc
1962 self.__skipped_results = 0
1963
1964 self.__results_since_prev = 0
1965 self.__prev_compiled_cursor = None
1966 self.__next_compiled_cursor = None
1967
1968 if query_request_pb:
1969 self.__remaining_offset = query_request_pb.offset()
1970 else:
1971 self.__remaining_offset = 0
1972
1973 if query_request_pb and query_result_pb.has_compiled_cursor():
1974 if query_request_pb.has_compiled_cursor():
1975 self.__next_compiled_cursor = query_request_pb.compiled_cursor()
1976 else:
1977 self.__next_compiled_cursor = datastore_pb.CompiledCursor()
1978 self.__buffer = self._ProcessQueryResult(query_result_pb)
1979 self.__results_since_prev = query_request_pb.offset()
1980 else:
1981 self.__buffer = self._ProcessQueryResult(query_result_pb)
1982
1983 def _Get(self, count):
1984 """Gets the next count result(s) of the query.
1985
1986 Not intended to be used by application developers. Use the python
1987 iterator protocol instead.
1988
1989 This method uses _Next to returns the next entities or keys from the list of
1990 matching results. If the query specified a sort order, results are returned
1991 in that order. Otherwise, the order is undefined.
1992
1993 The argument, count, specifies the number of results to return. However, the
1994 length of the returned list may be smaller than count. This is the case only
1995 if count is greater than the number of remaining results.
1996
1997 The results are always returned as a list. If there are no results left,
1998 an empty list is returned.
1999
2000 Args:
2001 # the number of results to return; must be >= 1
2002 count: int or long
2003
2004 Returns:
2005 # a list of entities or keys
2006 [Entity or Key, ...]
2007 """
2008 entity_list = self._Next(count)
2009 while len(entity_list) < count and self.__more_results:
2010 entity_list += self._Next(count - len(entity_list))
2011 return entity_list;
2012
2013 def _Next(self, count=None):
2014 """Returns the next batch of results.
2015
2016 Not intended to be used by application developers. Use the python
2017 iterator protocol instead.
2018
2019 Values are returned in the order they are recieved from the datastore.
2020
2021 If there are values in the internal buffer they are returned, otherwise a
2022 single RPC is run in an attempt to fulfill the request.
2023
2024 The optional argument, count, specifies the number of results to return.
2025 However, the length of the returned list may be smaller than count. This is
2026 the case if:
2027 - the local buffer has results and count is greater than the number of
2028 results in the buffer.
2029 - count is greater than the number of remaining results
2030 - the size of the remaining results exceeds the RPC buffer limit
2031 Use _Get to ensure all possible entities are retrieved.
2032
2033 When count is None, if there are items in the local buffer, they are
2034 all returned, otherwise the datastore backend is allowed to decide how many
2035 entities to send.
2036
2037 The internal buffer is also used by the next() method so it is best not to
2038 mix _Next() and next().
2039
2040 The results are always returned as a list. If there are results left, at
2041 least one result will be returned in this list. If there are no results
2042 left, an empty list is returned.
2043
2044 Args:
2045 # the number of results to return; must be >= 1
2046 count: int or long or None
2047
2048 Returns:
2049 # a list of entities or keys
2050 [Entity or Key, ...]
2051 """
2052 if count is not None and (not isinstance(count, (int, long)) or count < 0):
2053 raise datastore_errors.BadArgumentError(
2054 'Argument to _Next must be an int greater than or equal to 0; received '
2055 '%s (a %s)' % (count, typename(count)))
2056
2057 if self.__buffer:
2058 if count is None:
2059 entity_list = self.__buffer
2060 self.__buffer = []
2061 elif count <= len(self.__buffer):
2062 entity_list = self.__buffer[:count]
2063 del self.__buffer[:count]
2064 else:
2065 entity_list = self.__buffer
2066 self.__buffer = []
2067 self.__results_since_prev += len(entity_list)
2068 return entity_list
2069
2070
2071 if not self.__more_results:
2072 return []
2073
2074 req = datastore_pb.NextRequest()
2075 if self.__remaining_offset:
2076 req.set_offset(self.__remaining_offset)
2077 if count is not None:
2078 req.set_count(count)
2079 if self.__next_compiled_cursor:
2080 req.set_compile(True)
2081 req.mutable_cursor().CopyFrom(self.__cursor)
2082 try:
2083 rpc = self.__rpc
2084 if rpc:
2085 self.__rpc = rpc.clone()
2086
2087 result = _MakeSyncCall('datastore_v3', 'Next', req,
2088 datastore_pb.QueryResult(), rpc)
2089 except apiproxy_errors.ApplicationError, err:
2090 raise _ToDatastoreError(err)
2091
2092 new_batch = self._ProcessQueryResult(result)
2093 if not self.__has_advanced:
2094 self.__more_results = False
2095 return new_batch
2096
2097 def _ProcessQueryResult(self, result):
2098 """Returns all results from datastore_pb.QueryResult and updates
2099 self.__more_results
2100
2101 Not intended to be used by application developers. Use the python
2102 iterator protocol instead.
2103
2104 The results are always returned as a list. If there are no results left,
2105 an empty list is returned.
2106
2107 Args:
2108 # the instance of datastore_pb.QueryResult to be stored
2109 result: datastore_pb.QueryResult
2110
2111 Returns:
2112 # a list of entities or keys
2113 [Entity or Key, ...]
2114 """
2115 if self.__next_compiled_cursor and result.has_compiled_cursor():
2116 self.__prev_compiled_cursor = self.__next_compiled_cursor
2117 self.__next_compiled_cursor = result.compiled_cursor()
2118 self.__results_since_prev = 0
2119
2120 self.__more_results = result.more_results()
2121 if result.skipped_results():
2122 self.__has_advanced = True
2123 self.__skipped_results += result.skipped_results()
2124 self.__remaining_offset -= result.skipped_results()
2125 else:
2126 self.__has_advanced = result.result_size() > 0
2127
2128 if self.__keys_only:
2129 return [Key._FromPb(e.key()) for e in result.result_list()]
2130 else:
2131 return [Entity._FromPb(e) for e in result.result_list()]
2132
2133 def _SkippedResults(self):
2134 self.__PrepBuffer()
2135 return self.__skipped_results
2136
2137 def GetCompiledCursor(self, query):
2138 if not self.__buffer:
2139 return self.__next_compiled_cursor
2140 elif not self.__results_since_prev:
2141 return self.__prev_compiled_cursor
2142 elif self.__prev_compiled_cursor:
2143 return Query._RunInternal(query._ToPb(limit=0,
2144 offset=self.__results_since_prev),
2145 rpc=self.__rpc)[0].GetCompiledCursor(query)
2146 else:
2147 return None
2148
2149 def next(self):
2150 self.__PrepBuffer()
2151 try:
2152 result = self.__buffer.pop(0)
2153 except IndexError:
2154 raise StopIteration
2155 self.__results_since_prev += 1
2156 return result
2157
2158 def __PrepBuffer(self):
2159 """Loads the next set of values into the local buffer if needed."""
2160 while not self.__buffer and self.__more_results:
2161 self.__buffer = self._Next(self.__batch_size)
2162
2163 def __iter__(self): return self
2164
2165class _Transaction(object):
2166 """Encapsulates a transaction currently in progress.
2167
2168 If we've sent a BeginTransaction call, then handle will be a
2169 datastore_pb.Transaction that holds the transaction handle.
2170
2171 If we know the entity group for this transaction, it's stored in the
2172 entity_group attribute, which is set by RunInTransaction().
2173
2174 modified_keys is a set containing the Keys of all entities modified (ie put
2175 or deleted) in this transaction. If an entity is modified more than once, a
2176 BadRequestError is raised.
2177 """
2178 def __init__(self):
2179 """Initializes modified_keys to the empty set."""
2180 self.handle = None
2181 self.entity_group = None
2182 self.modified_keys = None
2183 self.modified_keys = set()
21841960
21851961
2186def RunInTransaction(function, *args, **kwargs):1962def RunInTransaction(function, *args, **kwargs):
@@ -2211,7 +1987,8 @@
2211 Runs the user-provided function inside a full-featured, ACID datastore1987 Runs the user-provided function inside a full-featured, ACID datastore
2212 transaction. Every Put, Get, and Delete call in the function is made within1988 transaction. Every Put, Get, and Delete call in the function is made within
2213 the transaction. All entities involved in these calls must belong to the1989 the transaction. All entities involved in these calls must belong to the
2214 same entity group. Queries are not supported.1990 same entity group. Queries are supported as long as they specify an
1991 ancestor belonging to the same entity group.
22151992
2216 The trailing arguments are passed to the function as positional arguments.1993 The trailing arguments are passed to the function as positional arguments.
2217 If the function returns a value, that value will be returned by1994 If the function returns a value, that value will be returned by
@@ -2260,7 +2037,7 @@
2260 Nested transactions are not supported.2037 Nested transactions are not supported.
22612038
2262 Args:2039 Args:
2263 # number of retries2040 # number of retries (not counting the initial try)
2264 retries: integer2041 retries: integer
2265 # a function to be run inside the transaction2042 # a function to be run inside the transaction
2266 function: callable2043 function: callable
@@ -2274,142 +2051,78 @@
2274 TransactionFailedError, if the transaction could not be committed.2051 TransactionFailedError, if the transaction could not be committed.
2275 """2052 """
22762053
2277 if _CurrentTransactionKey():
2278 raise datastore_errors.BadRequestError(
2279 'Nested transactions are not supported.')
2280
2281 if retries < 0:2054 if retries < 0:
2282 raise datastore_errors.BadRequestError(2055 raise datastore_errors.BadRequestError(
2283 'Number of retries should be non-negative number.')2056 'Number of retries should be non-negative number.')
22842057
2285 tx_key = None2058 if IsInTransaction():
2059 raise datastore_errors.BadRequestError(
2060 'Nested transactions are not supported.')
2061
2062 old_connection = _GetConnection()
2063 for i in range(0, retries + 1):
2064 new_connection = old_connection.new_transaction()
2065 _SetConnection(new_connection)
2066 try:
2067 ok, result = _DoOneTry(new_connection, function, args, kwargs)
2068 if ok:
2069 return result
2070 finally:
2071 _SetConnection(old_connection)
2072
2073 raise datastore_errors.TransactionFailedError(
2074 'The transaction could not be committed. Please try again.')
2075
2076
2077def _DoOneTry(new_connection, function, args, kwargs):
2078 """Helper to call a function in a transaction, once.
2079
2080 Args:
2081 new_connection: The new, transactional, connection object.
2082 function: The function to call.
2083 args: Tuple of positional arguments.
2084 kwargs: Dict of keyword arguments.
2085 """
22862086
2287 try:2087 try:
2288 tx_key = _NewTransactionKey()2088 result = function(*args, **kwargs)
2289 tx = _Transaction()2089
2290 _txes[tx_key] = tx2090 except:
22912091 original_exception = sys.exc_info()
2292 for i in range(0, retries + 1):2092
2293 tx.modified_keys.clear()2093 try:
22942094 new_connection.rollback()
2295 try:2095 except Exception:
2296 result = function(*args, **kwargs)2096 logging.exception('Exception sending Rollback:')
2297 except:2097
2298 original_exception = sys.exc_info()2098 type, value, trace = original_exception
22992099 if isinstance(value, datastore_errors.Rollback):
2300 if tx.handle:2100 return True, None
2301 try:2101 else:
2302 _MakeSyncCall('datastore_v3', 'Rollback',2102 raise type, value, trace
2303 tx.handle, api_base_pb.VoidProto())2103
2304 except:2104 else:
2305 logging.info('Exception sending Rollback:\n' +2105 if new_connection.commit():
2306 traceback.format_exc())2106 return True, result
23072107 else:
2308 type, value, trace = original_exception2108 logging.warning('Transaction collision. Retrying... %s', '')
2309 if type is datastore_errors.Rollback:2109 return False, None
2310 return
2311 else:
2312 raise type, value, trace
2313
2314 if tx.handle:
2315 try:
2316 _MakeSyncCall('datastore_v3', 'Commit',
2317 tx.handle, datastore_pb.CommitResponse())
2318 except apiproxy_errors.ApplicationError, err:
2319 if (err.application_error ==
2320 datastore_pb.Error.CONCURRENT_TRANSACTION):
2321 logging.warning('Transaction collision for entity group with '
2322 'key %r. Retrying...', tx.entity_group)
2323 tx.handle = None
2324 tx.entity_group = None
2325 continue
2326 else:
2327 raise _ToDatastoreError(err)
2328
2329 return result
2330
2331 raise datastore_errors.TransactionFailedError(
2332 'The transaction could not be committed. Please try again.')
2333
2334 finally:
2335 if tx_key in _txes:
2336 del _txes[tx_key]
2337 del tx_key
23382110
23392111
2340def _MaybeSetupTransaction(request, keys):2112def _MaybeSetupTransaction(request, keys):
2341 """Begins a transaction, if necessary, and populates it in the request.2113 """Begin a transaction, if necessary, and populate it in the request.
23422114
2343 If we're currently inside a transaction, this records the entity group,2115 This API exists for internal backwards compatibility, primarily with
2344 checks that the keys are all in that entity group, creates the transaction2116 api/taskqueue/taskqueue.py.
2345 PB, and sends the BeginTransaction. It then populates the transaction handle
2346 in the request.
2347
2348 Raises BadRequestError if the entity has a different entity group than the
2349 current transaction.
23502117
2351 Args:2118 Args:
2352 request: GetRequest, PutRequest, DeleteRequest, or Query2119 request: A protobuf with a mutable_transaction() method.
2353 keys: sequence of Keys2120 keys: Unused.
23542121
2355 Returns:2122 Returns:
2356 _Transaction if we're inside a transaction, otherwise None2123 A transaction if we're inside a transaction, otherwise None
2357 """2124 """
2358 assert isinstance(request, (datastore_pb.GetRequest, datastore_pb.PutRequest,2125 return _GetConnection()._set_request_transaction(request)
2359 datastore_pb.DeleteRequest, datastore_pb.Query,
2360 taskqueue_service_pb.TaskQueueAddRequest,
2361 )), request.__class__
2362 tx_key = None
2363
2364 try:
2365 tx_key = _CurrentTransactionKey()
2366 if tx_key:
2367 tx = _txes[tx_key]
2368
2369 groups = [k.entity_group() for k in keys]
2370 if tx.entity_group:
2371 expected_group = tx.entity_group
2372 elif groups:
2373 expected_group = groups[0]
2374 else:
2375 expected_group = None
2376
2377 for group in groups:
2378 if (group != expected_group or
2379
2380
2381
2382
2383
2384
2385
2386 (not group.has_id_or_name() and group is not expected_group)):
2387 raise _DifferentEntityGroupError(expected_group, group)
2388
2389 if not tx.entity_group and group.has_id_or_name():
2390 tx.entity_group = group
2391
2392 if not tx.handle:
2393 req = datastore_pb.BeginTransactionRequest()
2394 if keys:
2395 req.set_app(keys[0].app())
2396 else:
2397 assert isinstance(request, taskqueue_service_pb.TaskQueueAddRequest)
2398 req.set_app(os.environ['APPLICATION_ID'])
2399 assert req.app()
2400
2401 tx.handle = _MakeSyncCall('datastore_v3', 'BeginTransaction',
2402 req, datastore_pb.Transaction())
2403
2404 if not tx.handle.app():
2405 tx.handle.set_app(req.app())
2406
2407 request.mutable_transaction().CopyFrom(tx.handle)
2408
2409 return tx
2410
2411 finally:
2412 del tx_key
24132126
24142127
2415def IsInTransaction():2128def IsInTransaction():
@@ -2418,52 +2131,7 @@
2418 Returns:2131 Returns:
2419 True if already running in transaction, else False.2132 True if already running in transaction, else False.
2420 """2133 """
2421 return bool(_CurrentTransactionKey())2134 return isinstance(_GetConnection(), datastore_rpc.TransactionalConnection)
2422
2423
2424def _DifferentEntityGroupError(a, b):
2425 """Raises a BadRequestError that says the given entity groups are different.
2426
2427 Includes the two entity groups in the message, formatted more clearly and
2428 concisely than repr(Key).
2429
2430 Args:
2431 a, b are both Keys that represent entity groups.
2432 """
2433 def id_or_name(key):
2434 if key.name():
2435 return 'name=%r' % key.name()
2436 else:
2437 return 'id=%r' % key.id()
2438
2439 raise datastore_errors.BadRequestError(
2440 'Cannot operate on different entity groups in a transaction: '
2441 '(kind=%r, %s) and (kind=%r, %s).' % (a.kind(), id_or_name(a),
2442 b.kind(), id_or_name(b)))
2443
2444
2445def _FindTransactionFrameInStack():
2446 """Walks the stack to find a RunInTransaction() call.
2447
2448 Returns:
2449 # this is the RunInTransactionCustomRetries() frame record, if found
2450 frame record or None
2451 """
2452 frame = sys._getframe()
2453 filename = frame.f_code.co_filename
2454
2455 frame = frame.f_back.f_back
2456 while frame:
2457 if (frame.f_code.co_filename == filename and
2458 frame.f_code.co_name == 'RunInTransactionCustomRetries'):
2459 return frame
2460 frame = frame.f_back
2461
2462 return None
2463
2464_CurrentTransactionKey = _FindTransactionFrameInStack
2465
2466_NewTransactionKey = sys._getframe
24672135
24682136
2469def _GetCompleteKeyOrError(arg):2137def _GetCompleteKeyOrError(arg):
@@ -2541,44 +2209,29 @@
2541 dictionary[key] = value2209 dictionary[key] = value
25422210
25432211
2544def _ToDatastoreError(err):2212class Iterator(datastore_query.ResultsIterator):
2545 """Converts an apiproxy.ApplicationError to an error in datastore_errors.2213 """Thin wrapper of datastore_query.ResultsIterator.
25462214
2547 Args:2215 Deprecated, do not use, only for backwards compatability.
2548 err: apiproxy.ApplicationError2216 """
25492217 def _Next(self, count=None):
2550 Returns:2218 if count is None:
2551 a subclass of datastore_errors.Error2219 count = 20
2552 """2220 result = []
2553 return _DatastoreExceptionFromErrorCodeAndDetail(err.application_error,2221 for r in self:
2554 err.error_detail)2222 if len(result) >= count:
25552223 break;
25562224 result.append(r)
2557def _DatastoreExceptionFromErrorCodeAndDetail(error, detail):2225 return result
2558 """Converts a datastore_pb.Error into a datastore_errors.Error.2226
25592227 def GetCompiledCursor(self, query):
2560 Args:2228 return self.cursor()
2561 error: A member of the datastore_pb.Error enumeration.2229
2562 detail: A string providing extra details about the error.2230 _Get = _Next
25632231
2564 Returns:2232
2565 A subclass of datastore_errors.Error.2233DatastoreRPC = apiproxy_stub_map.UserRPC
2566 """2234GetRpcFromKwargs = _GetConfigFromKwargs
2567 exception_class = {2235_CurrentTransactionKey = IsInTransaction
2568 datastore_pb.Error.BAD_REQUEST: datastore_errors.BadRequestError,2236_ToDatastoreError = datastore_rpc._ToDatastoreError
2569 datastore_pb.Error.CONCURRENT_TRANSACTION:2237_DatastoreExceptionFromErrorCodeAndDetail = datastore_rpc._DatastoreExceptionFromErrorCodeAndDetail
2570 datastore_errors.TransactionFailedError,
2571 datastore_pb.Error.INTERNAL_ERROR: datastore_errors.InternalError,
2572 datastore_pb.Error.NEED_INDEX: datastore_errors.NeedIndexError,
2573 datastore_pb.Error.TIMEOUT: datastore_errors.Timeout,
2574 datastore_pb.Error.BIGTABLE_ERROR: datastore_errors.Timeout,
2575 datastore_pb.Error.COMMITTED_BUT_STILL_APPLYING:
2576 datastore_errors.CommittedButStillApplying,
2577 datastore_pb.Error.CAPABILITY_DISABLED:
2578 apiproxy_errors.CapabilityDisabledError,
2579 }.get(error, datastore_errors.Error)
2580
2581 if detail is None:
2582 return exception_class()
2583 else:
2584 return exception_class(detail)
25852238
=== modified file 'AppServer/google/appengine/api/datastore_distributed.py'
--- AppServer/google/appengine/api/datastore_distributed.py 2010-12-17 22:47:53 +0000
+++ AppServer/google/appengine/api/datastore_distributed.py 2010-12-24 09:11:16 +0000
@@ -58,9 +58,9 @@
5858
59SSL_DEFAULT_PORT = 844359SSL_DEFAULT_PORT = 8443
60try:60try:
61 __import__('google.appengine.api.labs.taskqueue.taskqueue_service_pb')61 __import__('google.appengine.api.taskqueue.taskqueue_service_pb')
62 taskqueue_service_pb = sys.modules.get(62 taskqueue_service_pb = sys.modules.get(
63 'google.appengine.api.labs.taskqueue.taskqueue_service_pb')63 'google.appengine.api.taskqueue.taskqueue_service_pb')
64except ImportError:64except ImportError:
65 from google.appengine.api.taskqueue import taskqueue_service_pb65 from google.appengine.api.taskqueue import taskqueue_service_pb
6666
@@ -528,18 +528,10 @@
528 def _RemoteSend(self, request, response, method):528 def _RemoteSend(self, request, response, method):
529 tag = self.__app_id529 tag = self.__app_id
530 user = users.GetCurrentUser()530 user = users.GetCurrentUser()
531 APPSCALE_VERSION = '1'
532 try:
533 APPSCALE_VERSION = os.environ['APPSCALE_VERSION']
534 except Exception, e:
535 logging.info("WARNING: Appscale version secret not set")
536
537 if user != None:531 if user != None:
538 tag += ":" + user.email()532 tag += ":" + user.email()
539 tag += ":" + user.nickname()533 tag += ":" + user.nickname()
540 tag += ":" + user.auth_domain()534 tag += ":" + user.auth_domain()
541 if APPSCALE_VERSION:
542 tag += ":" + APPSCALE_VERSION
543 api_request = remote_api_pb.Request()535 api_request = remote_api_pb.Request()
544 api_request.set_method(method)536 api_request.set_method(method)
545 api_request.set_service_name("datastore_v3")537 api_request.set_service_name("datastore_v3")
546538
=== modified file 'AppServer/google/appengine/api/datastore_file_stub.py'
--- AppServer/google/appengine/api/datastore_file_stub.py 2010-11-30 10:37:25 +0000
+++ AppServer/google/appengine/api/datastore_file_stub.py 2010-12-24 09:11:16 +0000
@@ -42,7 +42,6 @@
42import sys42import sys
43import tempfile43import tempfile
44import threading44import threading
45import warnings
4645
47import cPickle as pickle46import cPickle as pickle
4847
@@ -53,6 +52,7 @@
53from google.appengine.api import datastore_errors52from google.appengine.api import datastore_errors
54from google.appengine.api import datastore_types53from google.appengine.api import datastore_types
55from google.appengine.api import users54from google.appengine.api import users
55from google.appengine.api.taskqueue import taskqueue_service_pb
56from google.appengine.datastore import datastore_pb56from google.appengine.datastore import datastore_pb
57from google.appengine.datastore import datastore_index57from google.appengine.datastore import datastore_index
58from google.appengine.datastore import datastore_stub_util58from google.appengine.datastore import datastore_stub_util
@@ -60,24 +60,12 @@
60from google.net.proto import ProtocolBuffer60from google.net.proto import ProtocolBuffer
61from google.appengine.datastore import entity_pb61from google.appengine.datastore import entity_pb
6262
63try:
64 __import__('google.appengine.api.labs.taskqueue.taskqueue_service_pb')
65 taskqueue_service_pb = sys.modules.get(
66 'google.appengine.api.labs.taskqueue.taskqueue_service_pb')
67except ImportError:
68 from google.appengine.api.taskqueue import taskqueue_service_pb
6963
70entity_pb.Reference.__hash__ = lambda self: hash(self.Encode())64entity_pb.Reference.__hash__ = lambda self: hash(self.Encode())
71datastore_pb.Query.__hash__ = lambda self: hash(self.Encode())65datastore_pb.Query.__hash__ = lambda self: hash(self.Encode())
72datastore_pb.Transaction.__hash__ = lambda self: hash(self.Encode())66datastore_pb.Transaction.__hash__ = lambda self: hash(self.Encode())
7367
7468
75_MAXIMUM_RESULTS = 1000
76
77
78_MAX_QUERY_OFFSET = 1000
79
80
81_MAX_QUERY_COMPONENTS = 10069_MAX_QUERY_COMPONENTS = 100
8270
8371
@@ -87,9 +75,6 @@
87_MAX_ACTIONS_PER_TXN = 575_MAX_ACTIONS_PER_TXN = 5
8876
8977
90_CURSOR_CONCAT_STR = '!CURSOR!'
91
92
93class _StoredEntity(object):78class _StoredEntity(object):
94 """Simple wrapper around an entity stored by the stub.79 """Simple wrapper around an entity stored by the stub.
9580
@@ -109,266 +94,8 @@
10994
110 self.encoded_protobuf = entity.Encode()95 self.encoded_protobuf = entity.Encode()
11196
112 self.native = datastore.Entity._FromPb(entity)97 self.native = datastore.Entity._FromPb(entity,
11398 validate_reserved_properties=False)
114
115class _Cursor(object):
116 """A query cursor.
117
118 Public properties:
119 cursor: the integer cursor
120 count: the original total number of results
121 keys_only: whether the query is keys_only
122 app: the app for which this cursor was created
123
124 Class attributes:
125 _next_cursor: the next cursor to allocate
126 _next_cursor_lock: protects _next_cursor
127 """
128 _next_cursor = 1
129 _next_cursor_lock = threading.Lock()
130
131 def __init__(self, query, results, order_compare_entities):
132 """Constructor.
133
134 Args:
135 query: the query request proto
136 # the query results, in order, such that results[self.offset+1] is
137 # the next result
138 results: list of datastore.Entity
139 order_compare_entities: a __cmp__ function for datastore.Entity that
140 follows sort order as specified by the query
141 """
142
143 if query.has_compiled_cursor() and query.compiled_cursor().position_list():
144 (self.__last_result, inclusive) = self._DecodeCompiledCursor(
145 query, query.compiled_cursor())
146 start_cursor_position = _Cursor._GetCursorOffset(results,
147 self.__last_result,
148 inclusive,
149 order_compare_entities)
150 else:
151 self.__last_result = None
152 start_cursor_position = 0
153
154 if query.has_end_compiled_cursor():
155 (end_cursor_entity, inclusive) = self._DecodeCompiledCursor(
156 query, query.end_compiled_cursor())
157 end_cursor_position = _Cursor._GetCursorOffset(results,
158 end_cursor_entity,
159 inclusive,
160 order_compare_entities)
161 else:
162 end_cursor_position = len(results)
163
164 results = results[start_cursor_position:end_cursor_position]
165
166 if query.has_limit():
167 limit = query.limit()
168 if query.offset():
169 limit += query.offset()
170 if limit > 0 and limit < len(results):
171 results = results[:limit]
172
173 self.__results = results
174 self.__query = query
175 self.__offset = 0
176
177 self.app = query.app()
178 self.keys_only = query.keys_only()
179 self.count = len(self.__results)
180 self.cursor = self._AcquireCursorID()
181
182 def _AcquireCursorID(self):
183 """Acquires the next cursor id in a thread safe manner.
184 """
185 self._next_cursor_lock.acquire()
186 try:
187 cursor_id = _Cursor._next_cursor
188 _Cursor._next_cursor += 1
189 finally:
190 self._next_cursor_lock.release()
191 return cursor_id
192
193 @staticmethod
194 def _GetCursorOffset(results, cursor_entity, inclusive, compare):
195 """Converts a cursor entity into a offset into the result set even if the
196 cursor_entity no longer exists.
197
198 Args:
199 cursor_entity: the decoded datastore.Entity from the compiled query
200 inclusive: boolean that specifies if to offset past the cursor_entity
201 compare: a function that takes two datastore.Entity and compares them
202 Returns:
203 the integer offset
204 """
205 lo = 0
206 hi = len(results)
207 if inclusive:
208 while lo < hi:
209 mid = (lo + hi) // 2
210 if compare(results[mid], cursor_entity) < 0:
211 lo = mid + 1
212 else:
213 hi = mid
214 else:
215 while lo < hi:
216 mid = (lo + hi) // 2
217 if compare(cursor_entity, results[mid]) < 0:
218 hi = mid
219 else:
220 lo = mid + 1
221 return lo
222
223 def _ValidateQuery(self, query, query_info):
224 """Ensure that the given query matches the query_info.
225
226 Args:
227 query: datastore_pb.Query instance we are chacking
228 query_info: datastore_pb.Query instance we want to match
229
230 Raises BadRequestError on failure.
231 """
232 error_msg = 'Cursor does not match query: %s'
233 exc = datastore_errors.BadRequestError
234 if query_info.filter_list() != query.filter_list():
235 raise exc(error_msg % 'filters do not match')
236 if query_info.order_list() != query.order_list():
237 raise exc(error_msg % 'orders do not match')
238
239 for attr in ('ancestor', 'kind', 'name_space', 'search_query'):
240 query_info_has_attr = getattr(query_info, 'has_%s' % attr)
241 query_info_attr = getattr(query_info, attr)
242 query_has_attr = getattr(query, 'has_%s' % attr)
243 query_attr = getattr(query, attr)
244 if query_info_has_attr():
245 if not query_has_attr() or query_info_attr() != query_attr():
246 raise exc(error_msg % ('%s does not match' % attr))
247 elif query_has_attr():
248 raise exc(error_msg % ('%s does not match' % attr))
249
250 def _MinimalQueryInfo(self, query):
251 """Extract the minimal set of information for query matching.
252
253 Args:
254 query: datastore_pb.Query instance from which to extract info.
255
256 Returns:
257 datastore_pb.Query instance suitable for matching against when
258 validating cursors.
259 """
260 query_info = datastore_pb.Query()
261 query_info.set_app(query.app())
262
263 for filter in query.filter_list():
264 query_info.filter_list().append(filter)
265 for order in query.order_list():
266 query_info.order_list().append(order)
267
268 if query.has_ancestor():
269 query_info.mutable_ancestor().CopyFrom(query.ancestor())
270
271 for attr in ('kind', 'name_space', 'search_query'):
272 query_has_attr = getattr(query, 'has_%s' % attr)
273 query_attr = getattr(query, attr)
274 query_info_set_attr = getattr(query_info, 'set_%s' % attr)
275 if query_has_attr():
276 query_info_set_attr(query_attr())
277
278 return query_info
279
280 def _MinimalEntityInfo(self, entity_proto, query):
281 """Extract the minimal set of information that preserves entity order.
282
283 Args:
284 entity_proto: datastore_pb.EntityProto instance from which to extract
285 information
286 query: datastore_pb.Query instance for which ordering must be preserved.
287
288 Returns:
289 datastore_pb.EntityProto instance suitable for matching against a list of
290 results when finding cursor positions.
291 """
292 entity_info = datastore_pb.EntityProto();
293 order_names = [o.property() for o in query.order_list()]
294 entity_info.mutable_key().MergeFrom(entity_proto.key())
295 entity_info.mutable_entity_group().MergeFrom(entity_proto.entity_group())
296 for prop in entity_proto.property_list():
297 if prop.name() in order_names:
298 entity_info.add_property().MergeFrom(prop)
299 return entity_info;
300
301 def _DecodeCompiledCursor(self, query, compiled_cursor):
302 """Converts a compiled_cursor into a cursor_entity.
303
304 Returns:
305 (cursor_entity, inclusive): a datastore.Entity and if it should be
306 included in the result set.
307 """
308 assert len(compiled_cursor.position_list()) == 1
309
310 position = compiled_cursor.position(0)
311 entity_pb = datastore_pb.EntityProto()
312 (query_info_encoded, entity_encoded) = position.start_key().split(
313 _CURSOR_CONCAT_STR, 1)
314 query_info_pb = datastore_pb.Query()
315 query_info_pb.ParseFromString(query_info_encoded)
316 self._ValidateQuery(query, query_info_pb)
317
318 entity_pb.ParseFromString(entity_encoded)
319 return (datastore.Entity._FromPb(entity_pb, True),
320 position.start_inclusive())
321
322 def _EncodeCompiledCursor(self, query, compiled_cursor):
323 """Converts the current state of the cursor into a compiled_cursor
324
325 Args:
326 query: the datastore_pb.Query this cursor is related to
327 compiled_cursor: an empty datstore_pb.CompiledCursor
328 """
329 if self.__last_result is not None:
330 position = compiled_cursor.add_position()
331 query_info = self._MinimalQueryInfo(query)
332 entity_info = self._MinimalEntityInfo(self.__last_result.ToPb(), query)
333 start_key = _CURSOR_CONCAT_STR.join((
334 query_info.Encode(),
335 entity_info.Encode()))
336 position.set_start_key(str(start_key))
337 position.set_start_inclusive(False)
338
339 def PopulateQueryResult(self, result, count, offset, compile=False):
340 """Populates a QueryResult with this cursor and the given number of results.
341
342 Args:
343 result: datastore_pb.QueryResult
344 count: integer of how many results to return
345 offset: integer of how many results to skip
346 compile: boolean, whether we are compiling this query
347 """
348 offset = min(offset, self.count - self.__offset)
349 limited_offset = min(offset, _MAX_QUERY_OFFSET)
350 if limited_offset:
351 self.__offset += limited_offset
352 result.set_skipped_results(limited_offset)
353
354 if offset == limited_offset and count:
355 if count > _MAXIMUM_RESULTS:
356 count = _MAXIMUM_RESULTS
357 results = self.__results[self.__offset:self.__offset + count]
358 count = len(results)
359 self.__offset += count
360 result.result_list().extend(r._ToPb() for r in results)
361
362 if self.__offset:
363 self.__last_result = self.__results[self.__offset - 1]
364
365 result.mutable_cursor().set_app(self.app)
366 result.mutable_cursor().set_cursor(self.cursor)
367 result.set_keys_only(self.keys_only)
368 result.set_more_results(self.__offset < self.count)
369 if compile:
370 self._EncodeCompiledCursor(
371 self.__query, result.mutable_compiled_cursor())
37299
373100
374class KindPseudoKind(object):101class KindPseudoKind(object):
@@ -381,10 +108,48 @@
381 """108 """
382 name = '__kind__'109 name = '__kind__'
383110
111 def Query(self, entities, query, filters, orders):
112 """Perform a query on this pseudo-kind.
113
114 Args:
115 entities: all the app's entities.
116 query: the original datastore_pb.Query.
117 filters: the filters from query.
118 orders: the orders from query.
119
120 Returns:
121 (results, remaining_filters, remaining_orders)
122 results is a list of datastore.Entity
123 remaining_filters and remaining_orders are the filters and orders that
124 should be applied in memory
125 """
126 kind_range = datastore_stub_util.ParseKindQuery(query, filters, orders)
127 app_namespace_str = datastore_types.EncodeAppIdNamespace(
128 query.app(), query.name_space())
129 kinds = []
130
131 for app_namespace, kind in entities:
132 if app_namespace != app_namespace_str: continue
133 if not kind_range.Contains(kind): continue
134 kinds.append(datastore.Entity(self.name, name=kind))
135
136 return (kinds, [], [])
137
138
139class PropertyPseudoKind(object):
140 """Pseudo-kind for schema queries.
141
142 Provides a Query method to perform the actual query.
143
144 Public properties:
145 name: the pseudo-kind name
146 """
147 name = '__property__'
148
384 def __init__(self, filestub):149 def __init__(self, filestub):
385 """Constructor.150 """Constructor.
386151
387 Initializes a __kind__ pseudo-kind definition.152 Initializes a __property__ pseudo-kind definition.
388153
389 Args:154 Args:
390 filestub: the DatastoreFileStub instance being served by this155 filestub: the DatastoreFileStub instance being served by this
@@ -396,10 +161,10 @@
396 """Perform a query on this pseudo-kind.161 """Perform a query on this pseudo-kind.
397162
398 Args:163 Args:
399 entities: all the app's entities164 entities: all the app's entities.
400 query: the original datastore_pb.Query165 query: the original datastore_pb.Query.
401 filters: the filters from query166 filters: the filters from query.
402 orders: the orders from query167 orders: the orders from query.
403168
404 Returns:169 Returns:
405 (results, remaining_filters, remaining_orders)170 (results, remaining_filters, remaining_orders)
@@ -407,63 +172,69 @@
407 remaining_filters and remaining_orders are the filters and orders that172 remaining_filters and remaining_orders are the filters and orders that
408 should be applied in memory173 should be applied in memory
409 """174 """
410 start_kind, start_inclusive, end_kind, end_inclusive = (175 property_range = datastore_stub_util.ParsePropertyQuery(query, filters,
411 datastore_stub_util.ParseKindQuery(query, filters, orders))176 orders)
412 keys_only = query.keys_only()177 keys_only = query.keys_only()
413 app_str = query.app()178 app_namespace_str = datastore_types.EncodeAppIdNamespace(
414 namespace_str = query.name_space()179 query.app(), query.name_space())
415 keys_only = query.keys_only()180
416 app_namespace_str = datastore_types.EncodeAppIdNamespace(app_str,181 properties = []
417 namespace_str)
418 kinds = []
419 if keys_only:182 if keys_only:
420 usekey = '__kind__keys'183 usekey = '__property__keys'
421 else:184 else:
422 usekey = '__kind__'185 usekey = '__property__'
423186
424 for app_namespace, kind in entities:187 for app_namespace, kind in entities:
425 if app_namespace != app_namespace_str: continue188 if app_namespace != app_namespace_str: continue
426 if start_kind is not None:189
427 if start_inclusive and kind < start_kind: continue190 (start_cmp, end_cmp) = property_range.MapExtremes(
428 if not start_inclusive and kind <= start_kind: continue191 lambda extreme, inclusive, is_end: cmp(kind, extreme[0]))
429 if end_kind is not None:192 if not((start_cmp is None or start_cmp >= 0) and
430 if end_inclusive and kind > end_kind: continue193 (end_cmp is None or end_cmp <= 0)):
431 if not end_inclusive and kind >= end_kind: continue194 continue
432195
433 app_kind = (app_namespace_str, kind)196 app_kind = (app_namespace_str, kind)
434197
435 kind_e = self.filestub._GetSchemaCache(app_kind, usekey)198 kind_properties = self.filestub._GetSchemaCache(app_kind, usekey)
436 if not kind_e:199 if not kind_properties:
437 kind_e = datastore.Entity(self.name, name=kind)200 kind_properties = []
438201 kind_key = datastore_types.Key.from_path(KindPseudoKind.name, kind)
439 if not keys_only:202 props = {}
440 props = {}203
441204 for entity in entities[app_kind].values():
442 for entity in entities[app_kind].values():205 for prop in entity.protobuf.property_list():
443 for prop in entity.protobuf.property_list():206 prop_name = prop.name()
444 prop_name = prop.name()207 if (prop_name in
445 if prop_name not in props:208 datastore_stub_util.GetInvisibleSpecialPropertyNames()):
446 props[prop_name] = set()209 continue
447 cls = entity.native[prop_name].__class__210 if prop_name not in props:
448 tag = self.filestub._PROPERTY_TYPE_TAGS.get(cls)211 props[prop_name] = set()
449 props[prop_name].add(tag)212 native_value = entity.native[prop_name]
450213 if not isinstance(native_value, list):
451 properties = []214 native_value = [native_value]
452 types = []215 for value in native_value:
453 for name in sorted(props):216 tag = self.filestub._PROPERTY_TYPE_TAGS.get(value.__class__)
454 for tag in sorted(props[name]):217 if tag is not None:
455 properties.append(name)218 props[prop_name].add(tag)
456 types.append(tag)219 else:
457 if properties:220 logging.warning('Unexpected value of class %s in datastore', value.__class__)
458 kind_e['property'] = properties221
459 if types:222 for prop in sorted(props):
460 kind_e['representation'] = types223 property_e = datastore.Entity(self.name, name=prop, parent=kind_key)
461224 kind_properties.append(property_e)
462 self.filestub._SetSchemaCache(app_kind, usekey, kind_e)225
463226 if not keys_only and props[prop]:
464 kinds.append(kind_e)227 property_e['property_representation'] = [
465228 datastore_stub_util._PROPERTY_TYPE_NAMES[tag]
466 return (kinds, [], [])229 for tag in sorted(props[prop])]
230
231 self.filestub._SetSchemaCache(app_kind, usekey, kind_properties)
232
233 def InQuery(property_e):
234 return property_range.Contains((kind, property_e.key().name()))
235 properties += filter(InQuery, kind_properties)
236
237 return (properties, [], [])
467238
468239
469class NamespacePseudoKind(object):240class NamespacePseudoKind(object):
@@ -476,25 +247,14 @@
476 """247 """
477 name = '__namespace__'248 name = '__namespace__'
478249
479 def __init__(self, filestub):
480 """Constructor.
481
482 Initializes a __namespace__ pseudo-kind definition.
483
484 Args:
485 filestub: the DatastoreFileStub instance being served by this
486 pseudo-kind.
487 """
488 self.filestub = filestub
489
490 def Query(self, entities, query, filters, orders):250 def Query(self, entities, query, filters, orders):
491 """Perform a query on this pseudo-kind.251 """Perform a query on this pseudo-kind.
492252
493 Args:253 Args:
494 entities: all the app's entities254 entities: all the app's entities.
495 query: the original datastore_pb.Query255 query: the original datastore_pb.Query.
496 filters: the filters from query256 filters: the filters from query.
497 orders: the orders from query257 orders: the orders from query.
498258
499 Returns:259 Returns:
500 (results, remaining_filters, remaining_orders)260 (results, remaining_filters, remaining_orders)
@@ -502,24 +262,16 @@
502 remaining_filters and remaining_orders are the filters and orders that262 remaining_filters and remaining_orders are the filters and orders that
503 should be applied in memory263 should be applied in memory
504 """264 """
505 start_namespace, start_inclusive, end_namespace, end_inclusive = (265 namespace_range = datastore_stub_util.ParseNamespaceQuery(query, filters,
506 datastore_stub_util.ParseNamespaceQuery(query, filters, orders))266 orders)
507 app_str = query.app()267 app_str = query.app()
508268
509 namespaces = set()269 namespaces = set()
510270
511 for app_namespace, kind in entities:271 for app_namespace, kind in entities:
512 (app_id, namespace) = datastore_types.DecodeAppIdNamespace(app_namespace)272 (app_id, namespace) = datastore_types.DecodeAppIdNamespace(app_namespace)
513 if app_id != app_str: continue273 if app_id == app_str and namespace_range.Contains(namespace):
514274 namespaces.add(namespace)
515 if start_namespace is not None:
516 if start_inclusive and namespace < start_namespace: continue
517 if not start_inclusive and namespace <= start_namespace: continue
518 if end_namespace is not None:
519 if end_inclusive and namespace > end_namespace: continue
520 if not end_inclusive and namespace >= end_namespace: continue
521
522 namespaces.add(namespace)
523275
524 namespace_entities = []276 namespace_entities = []
525 for namespace in namespaces:277 for namespace in namespaces:
@@ -558,6 +310,8 @@
558 datastore_types.PostalAddress: entity_pb.PropertyValue.kstringValue,310 datastore_types.PostalAddress: entity_pb.PropertyValue.kstringValue,
559 datastore_types.Rating: entity_pb.PropertyValue.kint64Value,311 datastore_types.Rating: entity_pb.PropertyValue.kint64Value,
560 str: entity_pb.PropertyValue.kstringValue,312 str: entity_pb.PropertyValue.kstringValue,
313 datastore_types.ByteString: entity_pb.PropertyValue.kstringValue,
314 datastore_types.BlobKey: entity_pb.PropertyValue.kstringValue,
561 datastore_types.Text: entity_pb.PropertyValue.kstringValue,315 datastore_types.Text: entity_pb.PropertyValue.kstringValue,
562 type(None): 0,316 type(None): 0,
563 unicode: entity_pb.PropertyValue.kstringValue,317 unicode: entity_pb.PropertyValue.kstringValue,
@@ -635,8 +389,9 @@
635 self.__indexes_lock = threading.Lock()389 self.__indexes_lock = threading.Lock()
636390
637 self.__pseudo_kinds = {}391 self.__pseudo_kinds = {}
638 self._RegisterPseudoKind(KindPseudoKind(self))392 self._RegisterPseudoKind(KindPseudoKind())
639 self._RegisterPseudoKind(NamespacePseudoKind(self))393 self._RegisterPseudoKind(PropertyPseudoKind(self))
394 self._RegisterPseudoKind(NamespacePseudoKind())
640395
641 self.Read()396 self.Read()
642397
@@ -826,14 +581,14 @@
826581
827 return []582 return []
828583
829 def __WritePickled(self, obj, filename, openfile=file):584 def __WritePickled(self, obj, filename):
830 """Pickles the object and writes it to the given file.585 """Pickles the object and writes it to the given file.
831 """586 """
832 if not filename or filename == '/dev/null' or not obj:587 if not filename or filename == '/dev/null' or not obj:
833 return588 return
834589
835 descriptor, tmp_filename = tempfile.mkstemp(dir=os.path.dirname(filename))590 descriptor, tmp_filename = tempfile.mkstemp(dir=os.path.dirname(filename))
836 tmpfile = openfile(tmp_filename, 'wb')591 tmpfile = os.fdopen(descriptor, 'wb')
837 pickler = pickle.Pickler(tmpfile, protocol=1)592 pickler = pickle.Pickler(tmpfile, protocol=1)
838 pickler.fast = True593 pickler.fast = True
839 pickler.dump(obj)594 pickler.dump(obj)
@@ -921,6 +676,8 @@
921 assert (clone.has_entity_group() and676 assert (clone.has_entity_group() and
922 clone.entity_group().element_size() > 0)677 clone.entity_group().element_size() > 0)
923678
679 datastore_stub_util.PrepareSpecialPropertiesForStore(clone)
680
924 self.__entities_lock.acquire()681 self.__entities_lock.acquire()
925682
926 try:683 try:
@@ -956,6 +713,8 @@
956713
957 if entity:714 if entity:
958 group.mutable_entity().CopyFrom(entity)715 group.mutable_entity().CopyFrom(entity)
716 datastore_stub_util.PrepareSpecialPropertiesForLoad(
717 group.mutable_entity())
959718
960719
961 def _Dynamic_Delete(self, delete_request, delete_response):720 def _Dynamic_Delete(self, delete_request, delete_response):
@@ -1166,6 +925,14 @@
1166 if cmped == 0:925 if cmped == 0:
1167 return cmp(a.key(), b.key())926 return cmp(a.key(), b.key())
1168927
928 def order_compare_entities_pb(a, b):
929 """ Return a negative, zero or positive number depending on whether
930 entity a is considered smaller than, equal to, or larger than b,
931 according to the query's orderings. a and b are protobuf-encoded
932 entities."""
933 return order_compare_entities(datastore.Entity.FromPb(a),
934 datastore.Entity.FromPb(b))
935
1169 def order_compare_properties(x, y):936 def order_compare_properties(x, y):
1170 """Return a negative, zero or positive number depending on whether937 """Return a negative, zero or positive number depending on whether
1171 property value x is considered smaller than, equal to, or larger than938 property value x is considered smaller than, equal to, or larger than
@@ -1201,7 +968,11 @@
1201 else:968 else:
1202 self.__query_history[clone] = 1969 self.__query_history[clone] = 1
1203970
1204 cursor = _Cursor(query, results, order_compare_entities)971 results = [r._ToPb() for r in results]
972 for result in results:
973 datastore_stub_util.PrepareSpecialPropertiesForLoad(result)
974 cursor = datastore_stub_util.ListCursor(query, results,
975 order_compare_entities_pb)
1205 self.__queries[cursor.cursor] = cursor976 self.__queries[cursor.cursor] = cursor
1206977
1207 if query.has_count():978 if query.has_count():
@@ -1243,7 +1014,8 @@
1243 query_result = datastore_pb.QueryResult()1014 query_result = datastore_pb.QueryResult()
1244 self._Dynamic_RunQuery(query, query_result)1015 self._Dynamic_RunQuery(query, query_result)
1245 cursor = query_result.cursor().cursor()1016 cursor = query_result.cursor().cursor()
1246 integer64proto.set_value(min(self.__queries[cursor].count, _MAXIMUM_RESULTS))1017 integer64proto.set_value(min(self.__queries[cursor].Count(),
1018 datastore_stub_util._MAXIMUM_RESULTS))
1247 del self.__queries[cursor]1019 del self.__queries[cursor]
12481020
1249 def _Dynamic_BeginTransaction(self, request, transaction):1021 def _Dynamic_BeginTransaction(self, request, transaction):
@@ -1348,6 +1120,9 @@
13481120
1349 for entity in self.__entities[app_kind].values():1121 for entity in self.__entities[app_kind].values():
1350 for prop in entity.protobuf.property_list():1122 for prop in entity.protobuf.property_list():
1123 if (prop.name() in
1124 datastore_stub_util.GetInvisibleSpecialPropertyNames()):
1125 continue
1351 if prop.name() not in props:1126 if prop.name() not in props:
1352 props[prop.name()] = entity_pb.PropertyValue()1127 props[prop.name()] = entity_pb.PropertyValue()
1353 props[prop.name()].MergeFrom(prop.value())1128 props[prop.name()].MergeFrom(prop.value())
13541129
=== modified file 'AppServer/google/appengine/api/datastore_types.py'
--- AppServer/google/appengine/api/datastore_types.py 2010-11-30 10:37:25 +0000
+++ AppServer/google/appengine/api/datastore_types.py 2010-12-24 09:11:16 +0000
@@ -679,7 +679,7 @@
679 TERM = 'user-tag'679 TERM = 'user-tag'
680680
681 def __init__(self, tag):681 def __init__(self, tag):
682 super(Category, self).__init__(self, tag)682 super(Category, self).__init__()
683 ValidateString(tag, 'tag')683 ValidateString(tag, 'tag')
684684
685 def ToXml(self):685 def ToXml(self):
@@ -701,7 +701,7 @@
701 Raises BadValueError if link is not a fully qualified, well-formed URL.701 Raises BadValueError if link is not a fully qualified, well-formed URL.
702 """702 """
703 def __init__(self, link):703 def __init__(self, link):
704 super(Link, self).__init__(self, link)704 super(Link, self).__init__()
705 ValidateString(link, 'link', max_len=_MAX_LINK_PROPERTY_LENGTH)705 ValidateString(link, 'link', max_len=_MAX_LINK_PROPERTY_LENGTH)
706706
707 scheme, domain, path, params, query, fragment = urlparse.urlparse(link)707 scheme, domain, path, params, query, fragment = urlparse.urlparse(link)
@@ -724,7 +724,7 @@
724 Raises BadValueError if email is not a valid email address.724 Raises BadValueError if email is not a valid email address.
725 """725 """
726 def __init__(self, email):726 def __init__(self, email):
727 super(Email, self).__init__(self, email)727 super(Email, self).__init__()
728 ValidateString(email, 'email')728 ValidateString(email, 'email')
729729
730 def ToXml(self):730 def ToXml(self):
@@ -915,7 +915,7 @@
915 Raises BadValueError if phone is not a string or subtype.915 Raises BadValueError if phone is not a string or subtype.
916 """916 """
917 def __init__(self, phone):917 def __init__(self, phone):
918 super(PhoneNumber, self).__init__(self, phone)918 super(PhoneNumber, self).__init__()
919 ValidateString(phone, 'phone')919 ValidateString(phone, 'phone')
920920
921 def ToXml(self):921 def ToXml(self):
@@ -933,7 +933,7 @@
933 Raises BadValueError if address is not a string or subtype.933 Raises BadValueError if address is not a string or subtype.
934 """934 """
935 def __init__(self, address):935 def __init__(self, address):
936 super(PostalAddress, self).__init__(self, address)936 super(PostalAddress, self).__init__()
937 ValidateString(address, 'address')937 ValidateString(address, 'address')
938938
939 def ToXml(self):939 def ToXml(self):
@@ -955,7 +955,7 @@
955 MAX = 100955 MAX = 100
956956
957 def __init__(self, rating):957 def __init__(self, rating):
958 super(Rating, self).__init__(self, rating)958 super(Rating, self).__init__()
959 if isinstance(rating, float) or isinstance(rating, complex):959 if isinstance(rating, float) or isinstance(rating, complex):
960 raise datastore_errors.BadValueError(960 raise datastore_errors.BadValueError(
961 'Expected int or long; received %s (a %s).' %961 'Expected int or long; received %s (a %s).' %
@@ -1505,7 +1505,7 @@
1505 same type.1505 same type.
15061506
1507 Returns:1507 Returns:
1508 A list of entity_pb.PropertyValue instances.1508 A list of entity_pb.Property instances.
1509 """1509 """
1510 encoded_name = name.encode('utf-8')1510 encoded_name = name.encode('utf-8')
15111511
15121512
=== modified file 'AppServer/google/appengine/api/images/__init__.py'
--- AppServer/google/appengine/api/images/__init__.py 2010-11-30 10:37:25 +0000
+++ AppServer/google/appengine/api/images/__init__.py 2010-12-24 09:11:16 +0000
@@ -897,6 +897,8 @@
897 return image.histogram()897 return image.histogram()
898898
899899
900IMG_SERVING_SIZES_LIMIT = 1600
901
900IMG_SERVING_SIZES = [902IMG_SERVING_SIZES = [
901 32, 48, 64, 72, 80, 90, 94, 104, 110, 120, 128, 144,903 32, 48, 64, 72, 80, 90, 94, 104, 110, 120, 128, 144,
902 150, 160, 200, 220, 288, 320, 400, 512, 576, 640, 720,904 150, 160, 200, 220, 288, 320, 400, 512, 576, 640, 720,
@@ -927,20 +929,8 @@
927929
928 "http://lh3.ggpht.com/SomeCharactersGoesHere=s32-c"930 "http://lh3.ggpht.com/SomeCharactersGoesHere=s32-c"
929931
930 Available sizes for resize are:932 Available sizes are any interger in the range [0, 1600] and is available as
931 (e.g. "=sX" where X is one of the following values)933 IMG_SERVING_SIZES_LIMIT.
932
933 0, 32, 48, 64, 72, 80, 90, 94, 104, 110, 120, 128, 144,
934 150, 160, 200, 220, 288, 320, 400, 512, 576, 640, 720,
935 800, 912, 1024, 1152, 1280, 1440, 1600
936
937 Available sizes for crop are:
938 (e.g. "=sX-c" where X is one of the following values)
939
940 32, 48, 64, 72, 80, 104, 136, 144, 150, 160
941
942 These values are also available as IMG_SERVING_SIZES and
943 IMG_SERVING_CROP_SIZES integer lists.
944934
945 Args:935 Args:
946 size: int, size of resulting images936 size: int, size of resulting images
@@ -960,10 +950,7 @@
960 if crop and not size:950 if crop and not size:
961 raise BadRequestError("Size should be set for crop operation")951 raise BadRequestError("Size should be set for crop operation")
962952
963 if size and crop and not size in IMG_SERVING_CROP_SIZES:953 if size and (size > IMG_SERVING_SIZES_LIMIT or size < 0):
964 raise UnsupportedSizeError("Unsupported crop size")
965
966 if size and not crop and not size in IMG_SERVING_SIZES:
967 raise UnsupportedSizeError("Unsupported size")954 raise UnsupportedSizeError("Unsupported size")
968955
969 request = images_service_pb.ImagesGetUrlBaseRequest()956 request = images_service_pb.ImagesGetUrlBaseRequest()
@@ -999,4 +986,3 @@
999 url += "-c"986 url += "-c"
1000987
1001 return url988 return url
1002
1003989
=== modified file 'AppServer/google/appengine/api/images/images_stub.py'
--- AppServer/google/appengine/api/images/images_stub.py 2010-11-30 10:37:25 +0000
+++ AppServer/google/appengine/api/images/images_stub.py 2010-12-24 09:11:16 +0000
@@ -41,6 +41,9 @@
41from google.appengine.runtime import apiproxy_errors41from google.appengine.runtime import apiproxy_errors
4242
4343
44MAX_REQUEST_SIZE = 32 << 20
45
46
44def _ArgbToRgbaTuple(argb):47def _ArgbToRgbaTuple(argb):
45 """Convert from a single ARGB value to a tuple containing RGBA.48 """Convert from a single ARGB value to a tuple containing RGBA.
4649
@@ -89,7 +92,8 @@
89 host_prefix: the URL prefix (protocol://host:port) to preprend to92 host_prefix: the URL prefix (protocol://host:port) to preprend to
90 image urls on a call to GetUrlBase.93 image urls on a call to GetUrlBase.
91 """94 """
92 super(ImagesServiceStub, self).__init__(service_name)95 super(ImagesServiceStub, self).__init__(service_name,
96 max_request_size=MAX_REQUEST_SIZE)
93 self._host_prefix = host_prefix97 self._host_prefix = host_prefix
94 Image.init()98 Image.init()
9599
96100
=== modified file 'AppServer/google/appengine/api/labs/taskqueue/__init__.py' (properties changed: -x to +x)
--- AppServer/google/appengine/api/labs/taskqueue/__init__.py 2010-05-07 09:58:53 +0000
+++ AppServer/google/appengine/api/labs/taskqueue/__init__.py 2010-12-24 09:11:16 +0000
@@ -15,6 +15,58 @@
15# limitations under the License.15# limitations under the License.
16#16#
1717
18"""Task Queue API module."""18"""Shim module so that the old labs import path still works."""
1919
20from taskqueue import *20
21
22__all__ = [
23
24 'BadTaskStateError', 'BadTransactionState', 'BadTransactionStateError',
25 'DatastoreError', 'DuplicateTaskNameError', 'Error', 'InternalError',
26 'InvalidQueueError', 'InvalidQueueNameError', 'InvalidTaskError',
27 'InvalidTaskNameError', 'InvalidUrlError', 'PermissionDeniedError',
28 'TaskAlreadyExistsError', 'TaskTooLargeError', 'TombstonedTaskError',
29 'TooManyTasksError', 'TransientError', 'UnknownQueueError',
30
31 'MAX_QUEUE_NAME_LENGTH', 'MAX_TASK_NAME_LENGTH', 'MAX_TASK_SIZE_BYTES',
32 'MAX_URL_LENGTH',
33
34 'Queue', 'Task', 'add']
35
36
37import os
38import sys
39import warnings
40
41from google.appengine.api.taskqueue import *
42
43
44if os.environ.get('DATACENTER', None) is None:
45 warnings.warn('google.appengine.api.labs.taskqueue is deprecated, please use '
46 'google.appengine.api.taskqueue', DeprecationWarning,
47 stacklevel=2)
48
49
50def _map_module(module_name):
51 """Map a module from the new path to the labs path.
52
53 Args:
54 module_name: Name of the module to be mapped.
55
56 Raises:
57 ImportError: If the specified module we are mapping from does not exist.
58
59 Returns:
60 The module object of the module that was mapped.
61 """
62 labs_module_name = '%s.%s' % (__name__, module_name)
63 module_prefix = '.'.join(__name__.split('.')[:2])
64 new_module_name = '%s.api.taskqueue.%s' % (module_prefix, module_name)
65
66 __import__(new_module_name)
67 sys.modules[labs_module_name] = sys.modules[new_module_name]
68 return sys.modules[labs_module_name]
69
70taskqueue = _map_module('taskqueue')
71taskqueue_service_pb = _map_module('taskqueue_service_pb')
72taskqueue_stub = _map_module('taskqueue_stub')
2173
=== removed file 'AppServer/google/appengine/api/labs/taskqueue/taskqueue.py'
--- AppServer/google/appengine/api/labs/taskqueue/taskqueue.py 2010-11-30 10:37:25 +0000
+++ AppServer/google/appengine/api/labs/taskqueue/taskqueue.py 1970-01-01 00:00:00 +0000
@@ -1,953 +0,0 @@
1#!/usr/bin/env python
2#
3# Copyright 2007 Google Inc.
4#
5# Licensed under the Apache License, Version 2.0 (the "License");
6# you may not use this file except in compliance with the License.
7# You may obtain a copy of the License at
8#
9# http://www.apache.org/licenses/LICENSE-2.0
10#
11# Unless required by applicable law or agreed to in writing, software
12# distributed under the License is distributed on an "AS IS" BASIS,
13# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14# See the License for the specific language governing permissions and
15# limitations under the License.
16#
17
18"""Task Queue API.
19
20Enables an application to queue background work for itself. Work is done through
21webhooks that process tasks pushed from a queue. Tasks will execute in
22best-effort order of ETA. Webhooks that fail will cause tasks to be retried at a
23later time. Multiple queues may exist with independent throttling controls.
24
25Webhook URLs may be specified directly for Tasks, or the default URL scheme
26may be used, which will translate Task names into URLs relative to a Queue's
27base path. A default queue is also provided for simple usage.
28"""
29
30
31
32import calendar
33import datetime
34import os
35import re
36import time
37import urllib
38import urlparse
39
40import taskqueue_service_pb
41
42from google.appengine.api import apiproxy_stub_map
43from google.appengine.api import namespace_manager
44from google.appengine.api import urlfetch
45from google.appengine.runtime import apiproxy_errors
46import os,sys
47APPSCALE_HOME = os.environ.get("APPSCALE_HOME")
48
49
50class Error(Exception):
51 """Base-class for exceptions in this module."""
52
53
54class UnknownQueueError(Error):
55 """The queue specified is unknown."""
56
57
58class TransientError(Error):
59 """There was a transient error while accessing the queue.
60
61 Please Try again later.
62 """
63
64
65class InternalError(Error):
66 """There was an internal error while accessing this queue.
67
68 If this problem continues, please contact the App Engine team through
69 our support forum with a description of your problem.
70 """
71
72
73class InvalidTaskError(Error):
74 """The task's parameters, headers, or method is invalid."""
75
76
77class InvalidTaskNameError(InvalidTaskError):
78 """The task's name is invalid."""
79
80
81class TaskTooLargeError(InvalidTaskError):
82 """The task is too large with its headers and payload."""
83
84
85class TaskAlreadyExistsError(InvalidTaskError):
86 """Task already exists. It has not yet run."""
87
88
89class TombstonedTaskError(InvalidTaskError):
90 """Task has been tombstoned."""
91
92
93class InvalidUrlError(InvalidTaskError):
94 """The task's relative URL is invalid."""
95
96
97class BadTaskStateError(Error):
98 """The task is in the wrong state for the requested operation."""
99
100
101class InvalidQueueError(Error):
102 """The Queue's configuration is invalid."""
103
104
105class InvalidQueueNameError(InvalidQueueError):
106 """The Queue's name is invalid."""
107
108
109class _RelativeUrlError(Error):
110 """The relative URL supplied is invalid."""
111
112
113class PermissionDeniedError(Error):
114 """The requested operation is not allowed for this app."""
115
116
117class DuplicateTaskNameError(Error):
118 """The add arguments contain tasks with identical names."""
119
120
121class TooManyTasksError(Error):
122 """Too many tasks were present in a single function call."""
123
124
125class DatastoreError(Error):
126 """There was a datastore error while accessing the queue."""
127
128
129class BadTransactionStateError(Error):
130 """The state of the current transaction does not permit this operation."""
131
132BadTransactionState = BadTransactionStateError
133
134MAX_QUEUE_NAME_LENGTH = 100
135
136MAX_TASK_NAME_LENGTH = 500
137
138MAX_TASK_SIZE_BYTES = 10 * (2 ** 10)
139
140MAX_URL_LENGTH = 2083
141
142_DEFAULT_QUEUE = 'default'
143
144_DEFAULT_QUEUE_PATH = '/_ah/queue'
145
146_METHOD_MAP = {
147 'GET': taskqueue_service_pb.TaskQueueAddRequest.GET,
148 'POST': taskqueue_service_pb.TaskQueueAddRequest.POST,
149 'HEAD': taskqueue_service_pb.TaskQueueAddRequest.HEAD,
150 'PUT': taskqueue_service_pb.TaskQueueAddRequest.PUT,
151 'DELETE': taskqueue_service_pb.TaskQueueAddRequest.DELETE,
152}
153
154_NON_POST_METHODS = frozenset(['GET', 'HEAD', 'PUT', 'DELETE'])
155
156_BODY_METHODS = frozenset(['POST', 'PUT'])
157
158_TASK_NAME_PATTERN = r'^[a-zA-Z0-9-]{1,%s}$' % MAX_TASK_NAME_LENGTH
159
160_TASK_NAME_RE = re.compile(_TASK_NAME_PATTERN)
161
162_QUEUE_NAME_PATTERN = r'^[a-zA-Z0-9-]{1,%s}$' % MAX_QUEUE_NAME_LENGTH
163
164_QUEUE_NAME_RE = re.compile(_QUEUE_NAME_PATTERN)
165
166_ERROR_MAPPING = {
167 taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE: UnknownQueueError,
168 taskqueue_service_pb.TaskQueueServiceError.TRANSIENT_ERROR:
169 TransientError,
170 taskqueue_service_pb.TaskQueueServiceError.INTERNAL_ERROR: InternalError,
171 taskqueue_service_pb.TaskQueueServiceError.TASK_TOO_LARGE:
172 TaskTooLargeError,
173 taskqueue_service_pb.TaskQueueServiceError.INVALID_TASK_NAME:
174 InvalidTaskNameError,
175 taskqueue_service_pb.TaskQueueServiceError.INVALID_QUEUE_NAME:
176 InvalidQueueNameError,
177 taskqueue_service_pb.TaskQueueServiceError.INVALID_URL: InvalidUrlError,
178 taskqueue_service_pb.TaskQueueServiceError.INVALID_QUEUE_RATE:
179 InvalidQueueError,
180 taskqueue_service_pb.TaskQueueServiceError.PERMISSION_DENIED:
181 PermissionDeniedError,
182 taskqueue_service_pb.TaskQueueServiceError.TASK_ALREADY_EXISTS:
183 TaskAlreadyExistsError,
184 taskqueue_service_pb.TaskQueueServiceError.TOMBSTONED_TASK:
185 TombstonedTaskError,
186 taskqueue_service_pb.TaskQueueServiceError.INVALID_ETA: InvalidTaskError,
187 taskqueue_service_pb.TaskQueueServiceError.INVALID_REQUEST: Error,
188 taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_TASK: Error,
189 taskqueue_service_pb.TaskQueueServiceError.TOMBSTONED_QUEUE: Error,
190 taskqueue_service_pb.TaskQueueServiceError.DUPLICATE_TASK_NAME:
191 DuplicateTaskNameError,
192
193 taskqueue_service_pb.TaskQueueServiceError.TOO_MANY_TASKS:
194 TooManyTasksError,
195
196}
197
198_PRESERVE_ENVIRONMENT_HEADERS = (
199 ('X-AppEngine-Default-Namespace', 'HTTP_X_APPENGINE_DEFAULT_NAMESPACE'),)
200
201
202class _UTCTimeZone(datetime.tzinfo):
203 """UTC timezone."""
204
205 ZERO = datetime.timedelta(0)
206
207 def utcoffset(self, dt):
208 return self.ZERO
209
210 def dst(self, dt):
211 return self.ZERO
212
213 def tzname(self, dt):
214 return 'UTC'
215
216
217_UTC = _UTCTimeZone()
218
219
220def _parse_relative_url(relative_url):
221 """Parses a relative URL and splits it into its path and query string.
222
223 Args:
224 relative_url: The relative URL, starting with a '/'.
225
226 Returns:
227 Tuple (path, query) where:
228 path: The path in the relative URL.
229 query: The query string in the URL without the '?' character.
230
231 Raises:
232 _RelativeUrlError if the relative_url is invalid for whatever reason
233 """
234 if not relative_url:
235 raise _RelativeUrlError('Relative URL is empty')
236 (scheme, netloc, path, query, fragment) = urlparse.urlsplit(relative_url)
237 if scheme or netloc:
238 raise _RelativeUrlError('Relative URL may not have a scheme or location')
239 if fragment:
240 raise _RelativeUrlError('Relative URL may not specify a fragment')
241 if not path or path[0] != '/':
242 raise _RelativeUrlError('Relative URL path must start with "/"')
243 return path, query
244
245
246def _flatten_params(params):
247 """Converts a dictionary of parameters to a list of parameters.
248
249 Any unicode strings in keys or values will be encoded as UTF-8.
250
251 Args:
252 params: Dictionary mapping parameter keys to values. Values will be
253 converted to a string and added to the list as tuple (key, value). If
254 a values is iterable and not a string, each contained value will be
255 added as a separate (key, value) tuple.
256
257 Returns:
258 List of (key, value) tuples.
259 """
260 def get_string(value):
261 if isinstance(value, unicode):
262 return unicode(value).encode('utf-8')
263 else:
264 return str(value)
265
266 param_list = []
267 for key, value in params.iteritems():
268 key = get_string(key)
269 if isinstance(value, basestring):
270 param_list.append((key, get_string(value)))
271 else:
272 try:
273 iterator = iter(value)
274 except TypeError:
275 param_list.append((key, str(value)))
276 else:
277 param_list.extend((key, get_string(v)) for v in iterator)
278
279 return param_list
280
281
282class Task(object):
283 """Represents a single Task on a queue."""
284
285 __CONSTRUCTOR_KWARGS = frozenset([
286 'countdown', 'eta', 'headers', 'method', 'name', 'params', 'url'])
287
288 __eta_posix = None
289
290 def __init__(self, payload=None, **kwargs):
291 """Initializer.
292
293 All parameters are optional.
294
295 Args:
296 payload: The payload data for this Task that will be delivered to the
297 webhook as the HTTP request body. This is only allowed for POST and PUT
298 methods.
299 countdown: Time in seconds into the future that this Task should execute.
300 Defaults to zero.
301 eta: Absolute time when the Task should execute. May not be specified
302 if 'countdown' is also supplied. This may be timezone-aware or
303 timezone-naive.
304 headers: Dictionary of headers to pass to the webhook. Values in the
305 dictionary may be iterable to indicate repeated header fields.
306 method: Method to use when accessing the webhook. Defaults to 'POST'.
307 name: Name to give the Task; if not specified, a name will be
308 auto-generated when added to a queue and assigned to this object. Must
309 match the _TASK_NAME_PATTERN regular expression.
310 params: Dictionary of parameters to use for this Task. For POST requests
311 these params will be encoded as 'application/x-www-form-urlencoded' and
312 set to the payload. For all other methods, the parameters will be
313 converted to a query string. May not be specified if the URL already
314 contains a query string.
315 url: Relative URL where the webhook that should handle this task is
316 located for this application. May have a query string unless this is
317 a POST method.
318
319 Raises:
320 InvalidTaskError if any of the parameters are invalid;
321 InvalidTaskNameError if the task name is invalid; InvalidUrlError if
322 the task URL is invalid or too long; TaskTooLargeError if the task with
323 its payload is too large.
324 """
325 args_diff = set(kwargs.iterkeys()) - self.__CONSTRUCTOR_KWARGS
326 if args_diff:
327 raise TypeError('Invalid arguments: %s' % ', '.join(args_diff))
328
329 self.__name = kwargs.get('name')
330 if self.__name and not _TASK_NAME_RE.match(self.__name):
331 raise InvalidTaskNameError(
332 'Task name does not match expression "%s"; found %s' %
333 (_TASK_NAME_PATTERN, self.__name))
334
335 self.__default_url, self.__relative_url, query = Task.__determine_url(
336 kwargs.get('url', ''))
337 self.__headers = urlfetch._CaselessDict()
338 self.__headers.update(kwargs.get('headers', {}))
339 self.__method = kwargs.get('method', 'POST').upper()
340 self.__payload = None
341 params = kwargs.get('params', {})
342
343 for header_name, environ_name in _PRESERVE_ENVIRONMENT_HEADERS:
344 value = os.environ.get(environ_name)
345 if value is not None:
346 self.__headers.setdefault(header_name, value)
347
348 self.__headers.setdefault('X-AppEngine-Current-Namespace',
349 namespace_manager.get_namespace())
350 if query and params:
351 raise InvalidTaskError('Query string and parameters both present; '
352 'only one of these may be supplied')
353
354 if self.__method == 'POST':
355 if payload and params:
356 raise InvalidTaskError('Message body and parameters both present for '
357 'POST method; only one of these may be supplied')
358 elif query:
359 raise InvalidTaskError('POST method may not have a query string; '
360 'use the "params" keyword argument instead')
361 elif params:
362 self.__payload = Task.__encode_params(params)
363 self.__headers.setdefault(
364 'content-type', 'application/x-www-form-urlencoded')
365 elif payload is not None:
366 self.__payload = Task.__convert_payload(payload, self.__headers)
367 elif self.__method in _NON_POST_METHODS:
368 if payload and self.__method not in _BODY_METHODS:
369 raise InvalidTaskError('Payload may only be specified for methods %s' %
370 ', '.join(_BODY_METHODS))
371 if payload:
372 self.__payload = Task.__convert_payload(payload, self.__headers)
373 if params:
374 query = Task.__encode_params(params)
375 if query:
376 self.__relative_url = '%s?%s' % (self.__relative_url, query)
377 else:
378 raise InvalidTaskError('Invalid method: %s' % self.__method)
379
380 self.__headers_list = _flatten_params(self.__headers)
381 self.__eta_posix = Task.__determine_eta_posix(
382 kwargs.get('eta'), kwargs.get('countdown'))
383 self.__eta = None
384 self.__enqueued = False
385
386 if self.size > MAX_TASK_SIZE_BYTES:
387 raise TaskTooLargeError('Task size must be less than %d; found %d' %
388 (MAX_TASK_SIZE_BYTES, self.size))
389
390 @staticmethod
391 def __determine_url(relative_url):
392 """Determines the URL of a task given a relative URL and a name.
393
394 Args:
395 relative_url: The relative URL for the Task.
396
397 Returns:
398 Tuple (default_url, relative_url, query) where:
399 default_url: True if this Task is using the default URL scheme;
400 False otherwise.
401 relative_url: String containing the relative URL for this Task.
402 query: The query string for this task.
403
404 Raises:
405 InvalidUrlError if the relative_url is invalid.
406 """
407 if not relative_url:
408 default_url, query = True, ''
409 else:
410 default_url = False
411 try:
412 relative_url, query = _parse_relative_url(relative_url)
413 except _RelativeUrlError, e:
414 raise InvalidUrlError(e)
415
416 if len(relative_url) > MAX_URL_LENGTH:
417 raise InvalidUrlError(
418 'Task URL must be less than %d characters; found %d' %
419 (MAX_URL_LENGTH, len(relative_url)))
420
421 return (default_url, relative_url, query)
422
423 @staticmethod
424 def __determine_eta_posix(eta=None, countdown=None, current_time=time.time):
425 """Determines the ETA for a task.
426
427 If 'eta' and 'countdown' are both None, the current time will be used.
428 Otherwise, only one of them may be specified.
429
430 Args:
431 eta: A datetime.datetime specifying the absolute ETA or None;
432 this may be timezone-aware or timezone-naive.
433 countdown: Count in seconds into the future from the present time that
434 the ETA should be assigned to.
435
436 Returns:
437 A float giving a POSIX timestamp containing the ETA.
438
439 Raises:
440 InvalidTaskError if the parameters are invalid.
441 """
442 if eta is not None and countdown is not None:
443 raise InvalidTaskError('May not use a countdown and ETA together')
444 elif eta is not None:
445 if not isinstance(eta, datetime.datetime):
446 raise InvalidTaskError('ETA must be a datetime.datetime instance')
447 elif eta.tzinfo is None:
448 return time.mktime(eta.timetuple()) + eta.microsecond*1e-6
449 else:
450 return calendar.timegm(eta.utctimetuple()) + eta.microsecond*1e-6
451 elif countdown is not None:
452 try:
453 countdown = float(countdown)
454 except ValueError:
455 raise InvalidTaskError('Countdown must be a number')
456 except OverflowError:
457 raise InvalidTaskError('Countdown out of range')
458 else:
459 return current_time() + countdown
460 else:
461 return current_time()
462
463 @staticmethod
464 def __encode_params(params):
465 """URL-encodes a list of parameters.
466
467 Args:
468 params: Dictionary of parameters, possibly with iterable values.
469
470 Returns:
471 URL-encoded version of the params, ready to be added to a query string or
472 POST body.
473 """
474 return urllib.urlencode(_flatten_params(params))
475
476 @staticmethod
477 def __convert_payload(payload, headers):
478 """Converts a Task payload into UTF-8 and sets headers if necessary.
479
480 Args:
481 payload: The payload data to convert.
482 headers: Dictionary of headers.
483
484 Returns:
485 The payload as a non-unicode string.
486
487 Raises:
488 InvalidTaskError if the payload is not a string or unicode instance.
489 """
490 if isinstance(payload, unicode):
491 headers.setdefault('content-type', 'text/plain; charset=utf-8')
492 payload = payload.encode('utf-8')
493 elif not isinstance(payload, str):
494 raise InvalidTaskError(
495 'Task payloads must be strings; invalid payload: %r' % payload)
496 return payload
497
498 @property
499 def on_queue_url(self):
500 """Returns True if this Task will run on the queue's URL."""
501 return self.__default_url
502
503 @property
504 def eta_posix(self):
505 """Returns a POSIX timestamp giving when this Task will execute."""
506 if self.__eta_posix is None and self.__eta is not None:
507 self.__eta_posix = Task.__determine_eta_posix(self.__eta)
508 return self.__eta_posix
509
510 @property
511 def eta(self):
512 """Returns a datetime when this Task will execute."""
513 if self.__eta is None and self.__eta_posix is not None:
514 self.__eta = datetime.datetime.fromtimestamp(self.__eta_posix, _UTC)
515 return self.__eta
516
517 @property
518 def headers(self):
519 """Returns a copy of the headers for this Task."""
520 return self.__headers.copy()
521
522 @property
523 def method(self):
524 """Returns the method to use for this Task."""
525 return self.__method
526
527 @property
528 def name(self):
529 """Returns the name of this Task.
530
531 Will be None if using auto-assigned Task names and this Task has not yet
532 been added to a Queue.
533 """
534 return self.__name
535
536 @property
537 def payload(self):
538 """Returns the payload for this task, which may be None."""
539 return self.__payload
540
541 @property
542 def size(self):
543 """Returns the size of this task in bytes."""
544 HEADER_SEPERATOR = len(': \r\n')
545 header_size = sum((len(key) + len(value) + HEADER_SEPERATOR)
546 for key, value in self.__headers_list)
547 return (len(self.__method) + len(self.__payload or '') +
548 len(self.__relative_url) + header_size)
549
550 @property
551 def url(self):
552 """Returns the relative URL for this Task."""
553 return self.__relative_url
554
555 @property
556 def was_enqueued(self):
557 """Returns True if this Task has been enqueued.
558
559 Note: This will not check if this task already exists in the queue.
560 """
561 return self.__enqueued
562
563 def add(self, queue_name=_DEFAULT_QUEUE, transactional=False):
564 """Adds this Task to a queue. See Queue.add."""
565 return Queue(queue_name).add(self, transactional=transactional)
566
567
568class Queue(object):
569 """Represents a Queue."""
570
571 def __init__(self, name=_DEFAULT_QUEUE):
572 """Initializer.
573
574 Args:
575 name: Name of this queue. If not supplied, defaults to the default queue.
576
577 Raises:
578 InvalidQueueNameError if the queue name is invalid.
579 """
580 if not _QUEUE_NAME_RE.match(name):
581 raise InvalidQueueNameError(
582 'Queue name does not match pattern "%s"; found %s' %
583 (_QUEUE_NAME_PATTERN, name))
584 self.__name = name
585 self.__url = '%s/%s' % (_DEFAULT_QUEUE_PATH, self.__name)
586
587 self._app = None
588
589 def add(self, task, transactional=False):
590 """Adds a Task or list of Tasks to this Queue.
591
592 If a list of more than one Tasks is given, a raised exception does not
593 guarantee that no tasks were added to the queue (unless transactional is set
594 to True). To determine which tasks were successfully added when an exception
595 is raised, check the Task.was_enqueued property.
596
597 Args:
598 task: A Task instance or a list of Task instances that will added to the
599 queue.
600 transactional: If False adds the Task(s) to a queue irrespectively to the
601 enclosing transaction success or failure. An exception is raised if True
602 and called outside of a transaction. (optional)
603
604 Returns:
605 The Task or list of tasks that was supplied to this method.
606
607 Raises:
608 BadTaskStateError: if the Task(s) has already been added to a queue.
609 BadTransactionStateError: if the transactional argument is true but this
610 call is being made outside of the context of a transaction.
611 Error-subclass on application errors.
612 """
613 try:
614 tasks = list(iter(task))
615 except TypeError:
616 tasks = [task]
617 multiple = False
618 else:
619 multiple = True
620
621 self.__AddTasks(tasks, transactional)
622
623 if multiple:
624 return tasks
625 else:
626 assert len(tasks) == 1
627 return tasks[0]
628
629 def __AddTasks(self, tasks, transactional):
630 """Internal implementation of .add() where tasks must be a list."""
631
632 request = taskqueue_service_pb.TaskQueueBulkAddRequest()
633 response = taskqueue_service_pb.TaskQueueBulkAddResponse()
634
635 task_names = set()
636 for task in tasks:
637 if task.name:
638 if task.name in task_names:
639 raise DuplicateTaskNameError(
640 'The task name %r is used more than once in the request' %
641 task.name)
642 task_names.add(task.name)
643
644 self.__FillAddRequest(task, request.add_add_request(), transactional)
645
646 try:
647 apiproxy_stub_map.MakeSyncCall('taskqueue', 'BulkAdd', request, response)
648 except apiproxy_errors.ApplicationError, e:
649 raise self.__TranslateError(e.application_error, e.error_detail)
650
651 assert response.taskresult_size() == len(tasks), (
652 'expected %d results from BulkAdd(), got %d' % (
653 len(tasks), response.taskresult_size()))
654
655 exception = None
656 for task, task_result in zip(tasks, response.taskresult_list()):
657 if task_result.result() == taskqueue_service_pb.TaskQueueServiceError.OK:
658 if task_result.has_chosen_task_name():
659 task._Task__name = task_result.chosen_task_name()
660 task._Task__enqueued = True
661 elif (task_result.result() ==
662 taskqueue_service_pb.TaskQueueServiceError.SKIPPED):
663 pass
664 elif exception is None:
665 exception = self.__TranslateError(task_result.result())
666
667 if exception is not None:
668 raise exception
669
670 return tasks
671
672 def __FillAddRequest(self, task, task_request, transactional):
673 """Populates a TaskQueueAddRequest with the data from a Task instance.
674
675 Args:
676 task: The Task instance to use as a source for the data to be added to
677 task_request.
678 task_request: The taskqueue_service_pb.TaskQueueAddRequest to populate.
679 transactional: If true then populates the task_request.transaction message
680 with information from the enclosing transaction (if any).
681
682 Raises:
683 BadTaskStateError: If the task was already added to a Queue.
684 BadTransactionStateError: If the transactional argument is True and there
685 is no enclosing transaction.
686 InvalidTaskNameError: If the transactional argument is True and the task
687 is named.
688 """
689 if task.was_enqueued:
690 raise BadTaskStateError('Task has already been enqueued')
691
692 adjusted_url = task.url
693 if task.on_queue_url:
694 adjusted_url = self.__url + task.url
695
696
697 task_request.set_queue_name(self.__name)
698 task_request.set_eta_usec(long(task.eta_posix * 1e6))
699 task_request.set_method(_METHOD_MAP.get(task.method))
700 task_request.set_url(adjusted_url)
701
702 if task.name:
703 task_request.set_task_name(task.name)
704 else:
705 task_request.set_task_name('')
706
707 if task.payload:
708 task_request.set_body(task.payload)
709 for key, value in _flatten_params(task.headers):
710 header = task_request.add_header()
711 header.set_key(key)
712 header.set_value(value)
713
714 if self._app:
715 task_request.set_app_id(self._app)
716
717 if transactional:
718 from google.appengine.api import datastore
719 if not datastore._MaybeSetupTransaction(task_request, []):
720 raise BadTransactionStateError(
721 'Transactional adds are not allowed outside of transactions')
722
723 if task_request.has_transaction() and task.name:
724 raise InvalidTaskNameError(
725 'Task bound to a transaction cannot be named.')
726
727 @property
728 def name(self):
729 """Returns the name of this queue."""
730 return self.__name
731
732 @staticmethod
733 def __TranslateError(error, detail=''):
734 """Translates a TaskQueueServiceError into an exception.
735
736 Args:
737 error: Value from TaskQueueServiceError enum.
738 detail: A human-readable description of the error.
739
740 Returns:
741 The corresponding Exception sub-class for that error code.
742 """
743 if (error >= taskqueue_service_pb.TaskQueueServiceError.DATASTORE_ERROR
744 and isinstance(error, int)):
745 from google.appengine.api import datastore
746 datastore_exception = datastore._DatastoreExceptionFromErrorCodeAndDetail(
747 error - taskqueue_service_pb.TaskQueueServiceError.DATASTORE_ERROR,
748 detail)
749
750 class JointException(datastore_exception.__class__, DatastoreError):
751 """There was a datastore error while accessing the queue."""
752 __msg = (u'taskqueue.DatastoreError caused by: %s %s' %
753 (datastore_exception.__class__, detail))
754 def __str__(self):
755 return JointException.__msg
756
757 return JointException()
758 else:
759 exception_class = _ERROR_MAPPING.get(error, None)
760 if exception_class:
761 return exception_class(detail)
762 else:
763 return Error('Application error %s: %s' % (error, detail))
764
765
766def add(*args, **kwargs):
767 """Convenience method will create a Task and add it to a queue.
768
769 All parameters are optional.
770
771 Args:
772 name: Name to give the Task; if not specified, a name will be
773 auto-generated when added to a queue and assigned to this object. Must
774 match the _TASK_NAME_PATTERN regular expression.
775 queue_name: Name of this queue. If not supplied, defaults to
776 the default queue.
777 url: Relative URL where the webhook that should handle this task is
778 located for this application. May have a query string unless this is
779 a POST method.
780 method: Method to use when accessing the webhook. Defaults to 'POST'.
781 headers: Dictionary of headers to pass to the webhook. Values in the
782 dictionary may be iterable to indicate repeated header fields.
783 payload: The payload data for this Task that will be delivered to the
784 webhook as the HTTP request body. This is only allowed for POST and PUT
785 methods.
786 params: Dictionary of parameters to use for this Task. For POST requests
787 these params will be encoded as 'application/x-www-form-urlencoded' and
788 set to the payload. For all other methods, the parameters will be
789 converted to a query string. May not be specified if the URL already
790 contains a query string.
791 transactional: If False adds the Task(s) to a queue irrespectively to the
792 enclosing transaction success or failure. An exception is raised if True
793 and called outside of a transaction. (optional)
794 countdown: Time in seconds into the future that this Task should execute.
795 Defaults to zero.
796 eta: Absolute time when the Task should execute. May not be specified
797 if 'countdown' is also supplied. This may be timezone-aware or
798 timezone-naive.
799
800 Returns:
801 The Task that was added to the queue.
802
803 Raises:
804 InvalidTaskError if any of the parameters are invalid;
805 InvalidTaskNameError if the task name is invalid; InvalidUrlError if
806 the task URL is invalid or too long; TaskTooLargeError if the task with
807 its payload is too large.
808 """
809 transactional = kwargs.pop('transactional', False)
810 queue_name = kwargs.pop('queue_name', _DEFAULT_QUEUE)
811 return Task(*args, **kwargs).add(
812 queue_name=queue_name, transactional=transactional)
813
814def getLang(file):
815 supportedExtensions = {
816 "rb" : "ruby",
817 "py" : "python",
818 "pl" : "perl",
819 }
820
821 # return None if file is None
822 extension = file.split(".")[-1]
823
824 try:
825 lang = supportedExtensions[extension]
826 return lang
827 except:
828 sys.stderr.write("extension " + extension + " not recognized\n")
829 return "none"
830
831def writeTempFile(suffix, data):
832 suffix = urllib.unquote(suffix)
833 regex = r"[^\w\d/\.-]"
834 pattern = re.compile(regex)
835 suffix = pattern.sub('', suffix)
836
837 fileLoc = "/tmp/" + suffix
838 f = open(fileLoc, "w+")
839 f.write(data)
840 f.close()
841 return fileLoc
842
843def getAllIPs():
844 all_ips = []
845 fileLoc = APPSCALE_HOME + "/.appscale/all_ips"
846 if os.path.exists(fileLoc):
847 f = open(fileLoc)
848 text = f.read()
849
850def getNumOfNodes():
851 num_of_nodes = 0
852 fileLoc = APPSCALE_HOME + "/.appscale/num_of_nodes"
853 if os.path.exists(fileLoc):
854 f = open(fileLoc)
855 num_of_nodes = int(f.read())
856
857 return num_of_nodes
858
859def putMRInput(data, inputLoc):
860 inputLoc = urllib.unquote(inputLoc)
861 regex = r"[^\w\d/\.-]"
862 pattern = re.compile(regex)
863 inputLoc = pattern.sub('', inputLoc)
864
865 fileLoc = "/tmp/" + inputLoc
866 f = open(fileLoc, "w+")
867 f.write(data)
868 f.close()
869
870 removeInput = APPSCALE_HOME + "/AppDB/hadoop-0.20.0/bin/hadoop fs -rmr " + inputLoc
871 sys.stderr.write(removeInput + "\n")
872 os.system(removeInput)
873
874 put = APPSCALE_HOME + "/AppDB/hadoop-0.20.0/bin/hadoop fs -put " + fileLoc + " " + inputLoc
875 os.system(put)
876
877 return
878def runMRJob(mapper, reducer, inputLoc, outputLoc, config={}):
879 mapper = urllib.unquote(mapper)
880 reducer = urllib.unquote(reducer)
881 inputLoc = urllib.unquote(inputLoc)
882 outputLoc = urllib.unquote(outputLoc)
883
884 regex = r"[^\w\d/\.-]"
885 pattern = re.compile(regex)
886
887 mydir = os.getcwd() + "/"
888 mapper = "\"" + getLang(mapper) + " " + mydir + pattern.sub('', mapper) + "\""
889 reducer = "\"" + getLang(reducer) + " " + mydir + pattern.sub('', reducer) + "\""
890 inputLoc = pattern.sub('', inputLoc)
891 outputLoc = pattern.sub('', outputLoc)
892
893 removeOutput = APPSCALE_HOME + "/AppDB/hadoop-0.20.0/bin/hadoop fs -rmr " + outputLoc
894 sys.stderr.write(removeOutput + "\n")
895 os.system(removeOutput)
896
897 formattedConfig = ""
898 for key in config:
899 formattedConfig = formattedConfig + " -D " + key + "=" + config[key]
900
901 command = APPSCALE_HOME + "/AppDB/hadoop-0.20.0/bin/hadoop jar " + APPSCALE_HOME + "/AppDB/hadoop-0.20.0/contrib/streaming/hadoop-0.20.0-streaming.jar " + formattedConfig + " -input " + inputLoc + " -output " + outputLoc + " -mapper " + mapper + " -reducer " + reducer
902 sys.stderr.write("\n" + command + "\n")
903 start = time.time()
904 os.system(command)
905 end = time.time()
906 sys.stderr.write("\nTime elapsed = " + str(end - start) + "seconds\n")
907
908def getMROutput(outputLoc):
909 outputLoc = urllib.unquote(outputLoc)
910 regex = r"[^\w\d/\.-]"
911 pattern = re.compile(regex)
912 outputLoc = pattern.sub('', outputLoc)
913
914 fileLoc = "/tmp/" + outputLoc
915
916 rmr = "rm -rf " + fileLoc
917 os.system(rmr)
918 get = APPSCALE_HOME + "/AppDB/hadoop-0.20.0/bin/hadoop fs -get " + outputLoc + " " + fileLoc
919 os.system(get)
920
921 contents = "no output"
922 if os.path.exists(fileLoc):
923 cmd = "cat " + fileLoc + "/part*"
924 contents = os.popen(cmd).read()
925
926
927 sys.stderr.write(contents)
928 return contents
929
930def getMRLogs(outputLoc):
931 outputLoc = urllib.unquote(outputLoc)
932 regex = r"[^\w\d/\.-]"
933 pattern = re.compile(regex)
934 outputLoc = pattern.sub('', outputLoc)
935
936 fileLoc = "/tmp/" + outputLoc
937
938 rmr = "rm -rf " + fileLoc
939 os.system(rmr)
940
941 get = APPSCALE_HOME + "/AppDB/hadoop-0.20.0/bin/hadoop fs -get " + outputLoc + " " + fileLoc
942 os.system(get)
943
944 contents = "no logs"
945 if os.path.exists(fileLoc):
946 cmd = "cat " + fileLoc + "/_logs/history/*"
947 contents = os.popen(cmd).read()
948
949
950 sys.stderr.write(contents)
951 return contents
952
953
9540
=== removed file 'AppServer/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py'
--- AppServer/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py 2010-11-30 10:37:25 +0000
+++ AppServer/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py 1970-01-01 00:00:00 +0000
@@ -1,5229 +0,0 @@
1#!/usr/bin/env python
2#
3# Copyright 2007 Google Inc.
4#
5# Licensed under the Apache License, Version 2.0 (the "License");
6# you may not use this file except in compliance with the License.
7# You may obtain a copy of the License at
8#
9# http://www.apache.org/licenses/LICENSE-2.0
10#
11# Unless required by applicable law or agreed to in writing, software
12# distributed under the License is distributed on an "AS IS" BASIS,
13# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14# See the License for the specific language governing permissions and
15# limitations under the License.
16#
17
18from google.net.proto import ProtocolBuffer
19import array
20import dummy_thread as thread
21
22__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
23 unusednames=printElemNumber,debug_strs no-special"""
24
25from google.appengine.datastore.datastore_v3_pb import *
26import google.appengine.datastore.datastore_v3_pb
27from google.net.proto.message_set import MessageSet
28class TaskQueueServiceError(ProtocolBuffer.ProtocolMessage):
29
30 OK = 0
31 UNKNOWN_QUEUE = 1
32 TRANSIENT_ERROR = 2
33 INTERNAL_ERROR = 3
34 TASK_TOO_LARGE = 4
35 INVALID_TASK_NAME = 5
36 INVALID_QUEUE_NAME = 6
37 INVALID_URL = 7
38 INVALID_QUEUE_RATE = 8
39 PERMISSION_DENIED = 9
40 TASK_ALREADY_EXISTS = 10
41 TOMBSTONED_TASK = 11
42 INVALID_ETA = 12
43 INVALID_REQUEST = 13
44 UNKNOWN_TASK = 14
45 TOMBSTONED_QUEUE = 15
46 DUPLICATE_TASK_NAME = 16
47 SKIPPED = 17
48 TOO_MANY_TASKS = 18
49 INVALID_PAYLOAD = 19
50 INVALID_RETRY_PARAMETERS = 20
51 DATASTORE_ERROR = 10000
52
53 _ErrorCode_NAMES = {
54 0: "OK",
55 1: "UNKNOWN_QUEUE",
56 2: "TRANSIENT_ERROR",
57 3: "INTERNAL_ERROR",
58 4: "TASK_TOO_LARGE",
59 5: "INVALID_TASK_NAME",
60 6: "INVALID_QUEUE_NAME",
61 7: "INVALID_URL",
62 8: "INVALID_QUEUE_RATE",
63 9: "PERMISSION_DENIED",
64 10: "TASK_ALREADY_EXISTS",
65 11: "TOMBSTONED_TASK",
66 12: "INVALID_ETA",
67 13: "INVALID_REQUEST",
68 14: "UNKNOWN_TASK",
69 15: "TOMBSTONED_QUEUE",
70 16: "DUPLICATE_TASK_NAME",
71 17: "SKIPPED",
72 18: "TOO_MANY_TASKS",
73 19: "INVALID_PAYLOAD",
74 20: "INVALID_RETRY_PARAMETERS",
75 10000: "DATASTORE_ERROR",
76 }
77
78 def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
79 ErrorCode_Name = classmethod(ErrorCode_Name)
80
81
82 def __init__(self, contents=None):
83 pass
84 if contents is not None: self.MergeFromString(contents)
85
86
87 def MergeFrom(self, x):
88 assert x is not self
89
90 def Equals(self, x):
91 if x is self: return 1
92 return 1
93
94 def IsInitialized(self, debug_strs=None):
95 initialized = 1
96 return initialized
97
98 def ByteSize(self):
99 n = 0
100 return n + 0
101
102 def Clear(self):
103 pass
104
105 def OutputUnchecked(self, out):
106 pass
107
108 def TryMerge(self, d):
109 while d.avail() > 0:
110 tt = d.getVarInt32()
111 if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
112 d.skipData(tt)
113
114
115 def __str__(self, prefix="", printElemNumber=0):
116 res=""
117 return res
118
119
120 def _BuildTagLookupTable(sparse, maxtag, default=None):
121 return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
122
123
124 _TEXT = _BuildTagLookupTable({
125 0: "ErrorCode",
126 }, 0)
127
128 _TYPES = _BuildTagLookupTable({
129 0: ProtocolBuffer.Encoder.NUMERIC,
130 }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
131
132 _STYLE = """"""
133 _STYLE_CONTENT_TYPE = """"""
134class TaskQueueRetryParameters(ProtocolBuffer.ProtocolMessage):
135 has_retry_limit_ = 0
136 retry_limit_ = 0
137 has_age_limit_sec_ = 0
138 age_limit_sec_ = 0
139 has_min_backoff_sec_ = 0
140 min_backoff_sec_ = 0.1
141 has_max_backoff_sec_ = 0
142 max_backoff_sec_ = 3600.0
143 has_max_doublings_ = 0
144 max_doublings_ = 16
145
146 def __init__(self, contents=None):
147 if contents is not None: self.MergeFromString(contents)
148
149 def retry_limit(self): return self.retry_limit_
150
151 def set_retry_limit(self, x):
152 self.has_retry_limit_ = 1
153 self.retry_limit_ = x
154
155 def clear_retry_limit(self):
156 if self.has_retry_limit_:
157 self.has_retry_limit_ = 0
158 self.retry_limit_ = 0
159
160 def has_retry_limit(self): return self.has_retry_limit_
161
162 def age_limit_sec(self): return self.age_limit_sec_
163
164 def set_age_limit_sec(self, x):
165 self.has_age_limit_sec_ = 1
166 self.age_limit_sec_ = x
167
168 def clear_age_limit_sec(self):
169 if self.has_age_limit_sec_:
170 self.has_age_limit_sec_ = 0
171 self.age_limit_sec_ = 0
172
173 def has_age_limit_sec(self): return self.has_age_limit_sec_
174
175 def min_backoff_sec(self): return self.min_backoff_sec_
176
177 def set_min_backoff_sec(self, x):
178 self.has_min_backoff_sec_ = 1
179 self.min_backoff_sec_ = x
180
181 def clear_min_backoff_sec(self):
182 if self.has_min_backoff_sec_:
183 self.has_min_backoff_sec_ = 0
184 self.min_backoff_sec_ = 0.1
185
186 def has_min_backoff_sec(self): return self.has_min_backoff_sec_
187
188 def max_backoff_sec(self): return self.max_backoff_sec_
189
190 def set_max_backoff_sec(self, x):
191 self.has_max_backoff_sec_ = 1
192 self.max_backoff_sec_ = x
193
194 def clear_max_backoff_sec(self):
195 if self.has_max_backoff_sec_:
196 self.has_max_backoff_sec_ = 0
197 self.max_backoff_sec_ = 3600.0
198
199 def has_max_backoff_sec(self): return self.has_max_backoff_sec_
200
201 def max_doublings(self): return self.max_doublings_
202
203 def set_max_doublings(self, x):
204 self.has_max_doublings_ = 1
205 self.max_doublings_ = x
206
207 def clear_max_doublings(self):
208 if self.has_max_doublings_:
209 self.has_max_doublings_ = 0
210 self.max_doublings_ = 16
211
212 def has_max_doublings(self): return self.has_max_doublings_
213
214
215 def MergeFrom(self, x):
216 assert x is not self
217 if (x.has_retry_limit()): self.set_retry_limit(x.retry_limit())
218 if (x.has_age_limit_sec()): self.set_age_limit_sec(x.age_limit_sec())
219 if (x.has_min_backoff_sec()): self.set_min_backoff_sec(x.min_backoff_sec())
220 if (x.has_max_backoff_sec()): self.set_max_backoff_sec(x.max_backoff_sec())
221 if (x.has_max_doublings()): self.set_max_doublings(x.max_doublings())
222
223 def Equals(self, x):
224 if x is self: return 1
225 if self.has_retry_limit_ != x.has_retry_limit_: return 0
226 if self.has_retry_limit_ and self.retry_limit_ != x.retry_limit_: return 0
227 if self.has_age_limit_sec_ != x.has_age_limit_sec_: return 0
228 if self.has_age_limit_sec_ and self.age_limit_sec_ != x.age_limit_sec_: return 0
229 if self.has_min_backoff_sec_ != x.has_min_backoff_sec_: return 0
230 if self.has_min_backoff_sec_ and self.min_backoff_sec_ != x.min_backoff_sec_: return 0
231 if self.has_max_backoff_sec_ != x.has_max_backoff_sec_: return 0
232 if self.has_max_backoff_sec_ and self.max_backoff_sec_ != x.max_backoff_sec_: return 0
233 if self.has_max_doublings_ != x.has_max_doublings_: return 0
234 if self.has_max_doublings_ and self.max_doublings_ != x.max_doublings_: return 0
235 return 1
236
237 def IsInitialized(self, debug_strs=None):
The diff has been truncated for viewing.

Subscribers

People subscribed via source and target branches