Merge lp:~nchohan/appscale/GAE1.4.0-namespaces into lp:appscale

Proposed by Navraj Chohan
Status: Merged
Merged at revision: 628
Proposed branch: lp:~nchohan/appscale/GAE1.4.0-namespaces
Merge into: lp:appscale
Diff against target: 33458 lines (+20094/-9568)
98 files modified
AppController/helperfunctions.rb (+0/-2)
AppDB/appscale_server.py (+41/-62)
AppDB/cassandra/py_cassandra.py (+4/-4)
AppDB/dbconstants.py (+1/-1)
AppDB/helper_functions.py (+1/-1)
AppDB/hypertable/py_hypertable.py (+2/-2)
AppDB/soap_server.py (+13/-28)
AppServer/RELEASE_NOTES (+65/-0)
AppServer/VERSION (+2/-2)
AppServer/google/appengine/api/apiproxy_stub.py (+1/-2)
AppServer/google/appengine/api/apiproxy_stub_map.py (+5/-4)
AppServer/google/appengine/api/appinfo.py (+1/-2)
AppServer/google/appengine/api/blobstore/blobstore_stub.py (+18/-25)
AppServer/google/appengine/api/channel/channel.py (+56/-22)
AppServer/google/appengine/api/channel/channel_service_pb.py (+0/-2)
AppServer/google/appengine/api/datastore.py (+541/-888)
AppServer/google/appengine/api/datastore_distributed.py (+2/-10)
AppServer/google/appengine/api/datastore_file_stub.py (+138/-363)
AppServer/google/appengine/api/datastore_types.py (+7/-7)
AppServer/google/appengine/api/images/__init__.py (+5/-19)
AppServer/google/appengine/api/images/images_stub.py (+5/-1)
AppServer/google/appengine/api/labs/taskqueue/__init__.py (+55/-3)
AppServer/google/appengine/api/labs/taskqueue/taskqueue.py (+0/-953)
AppServer/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py (+0/-5229)
AppServer/google/appengine/api/labs/taskqueue/taskqueue_stub.py (+0/-986)
AppServer/google/appengine/api/mail_stub.py (+7/-2)
AppServer/google/appengine/api/matcher/matcher_stub.py (+1/-5)
AppServer/google/appengine/api/memcache/memcache_stub.py (+5/-1)
AppServer/google/appengine/api/memcache_distributed.py (+0/-295)
AppServer/google/appengine/api/queueinfo.py (+57/-5)
AppServer/google/appengine/api/taskqueue/__init__.py (+34/-0)
AppServer/google/appengine/api/taskqueue/taskqueue.py (+1116/-0)
AppServer/google/appengine/api/taskqueue/taskqueue_distributed.py (+321/-0)
AppServer/google/appengine/api/taskqueue/taskqueue_service_pb.py (+5397/-0)
AppServer/google/appengine/api/taskqueue/taskqueue_stub.py (+1009/-0)
AppServer/google/appengine/api/user_service_stub.py (+1/-1)
AppServer/google/appengine/api/users.py (+2/-8)
AppServer/google/appengine/datastore/datastore_pb.py (+48/-12)
AppServer/google/appengine/datastore/datastore_query.py (+1184/-0)
AppServer/google/appengine/datastore/datastore_rpc.py (+1638/-0)
AppServer/google/appengine/datastore/datastore_sqlite_stub.py (+207/-196)
AppServer/google/appengine/datastore/datastore_stub_util.py (+575/-56)
AppServer/google/appengine/ext/admin/__init__.py (+2/-1)
AppServer/google/appengine/ext/admin/templates/base.html (+8/-3)
AppServer/google/appengine/ext/admin/templates/queues.html (+3/-3)
AppServer/google/appengine/ext/appstats/sample_appengine_config.py (+2/-1)
AppServer/google/appengine/ext/appstats/static/appstats_js.js (+77/-77)
AppServer/google/appengine/ext/appstats/templates/main.html (+2/-2)
AppServer/google/appengine/ext/appstats/ui.py (+2/-2)
AppServer/google/appengine/ext/builtins/appstats/include.yaml (+0/-1)
AppServer/google/appengine/ext/bulkload/bulkloader_config.py (+5/-2)
AppServer/google/appengine/ext/datastore_admin/static/css/compiled.css (+1/-1)
AppServer/google/appengine/ext/datastore_admin/static/js/compiled.js (+1/-1)
AppServer/google/appengine/ext/datastore_admin/testutil.py (+1/-1)
AppServer/google/appengine/ext/db/__init__.py (+70/-85)
AppServer/google/appengine/ext/db/metadata.py (+190/-0)
AppServer/google/appengine/ext/deferred/deferred.py (+1/-1)
AppServer/google/appengine/ext/key_range/__init__.py (+2/-0)
AppServer/google/appengine/ext/mapreduce/handlers.py (+1/-1)
AppServer/google/appengine/ext/mapreduce/input_readers.py (+6/-2)
AppServer/google/appengine/ext/remote_api/remote_api_services.py (+1/-7)
AppServer/google/appengine/ext/remote_api/remote_api_stub.py (+1/-0)
AppServer/google/appengine/ext/search/__init__.py (+16/-15)
AppServer/google/appengine/ext/webapp/__init__.py (+4/-0)
AppServer/google/appengine/ext/webapp/template.py (+81/-19)
AppServer/google/appengine/ext/zipserve/__init__.py (+1/-1)
AppServer/google/appengine/runtime/apiproxy.py (+19/-7)
AppServer/google/appengine/runtime/apiproxy_errors.py (+3/-0)
AppServer/google/appengine/tools/appcfg.py (+9/-9)
AppServer/google/appengine/tools/dev-channel-js.js (+48/-39)
AppServer/google/appengine/tools/dev_appserver.py (+53/-19)
AppServer/google/appengine/tools/dev_appserver_blobimage.py (+5/-10)
AppServer/google/appengine/tools/dev_appserver_login.py (+5/-9)
AppServer/google/appengine/tools/dev_appserver_main.py (+54/-21)
AppServer/google/net/proto/ProtocolBuffer.py (+0/-1)
AppServer/google/net/proto/RawMessage.py (+1/-1)
AppServer/google/net/proto2/__init__.py (+16/-0)
AppServer/google/net/proto2/proto/__init__.py (+16/-0)
AppServer/google/net/proto2/proto/descriptor_pb2.py (+1581/-0)
AppServer/google/net/proto2/python/__init__.py (+16/-0)
AppServer/google/net/proto2/python/internal/__init__.py (+16/-0)
AppServer/google/net/proto2/python/internal/api_implementation.py (+34/-0)
AppServer/google/net/proto2/python/internal/containers.py (+239/-0)
AppServer/google/net/proto2/python/internal/decoder.py (+632/-0)
AppServer/google/net/proto2/python/internal/encoder.py (+722/-0)
AppServer/google/net/proto2/python/internal/message_listener.py (+64/-0)
AppServer/google/net/proto2/python/internal/python_message.py (+969/-0)
AppServer/google/net/proto2/python/internal/type_checkers.py (+253/-0)
AppServer/google/net/proto2/python/internal/wire_format.py (+234/-0)
AppServer/google/net/proto2/python/public/__init__.py (+16/-0)
AppServer/google/net/proto2/python/public/descriptor.py (+549/-0)
AppServer/google/net/proto2/python/public/message.py (+225/-0)
AppServer/google/net/proto2/python/public/reflection.py (+124/-0)
AppServer/google/net/proto2/python/public/service.py (+210/-0)
AppServer/google/net/proto2/python/public/service_reflection.py (+263/-0)
AppServer/google/net/proto2/python/public/text_format.py (+657/-0)
AppServer/lib/fancy_urllib/fancy_urllib/__init__.py (+18/-7)
AppServer/lib/webob/LICENSE (+0/-20)
To merge this branch: bzr merge lp:~nchohan/appscale/GAE1.4.0-namespaces
Reviewer Review Type Date Requested Status
Chris Bunch Approve
Review via email: mp+44652@code.launchpad.net

Description of the change

GAE 1.4.0
Namespace for DB
Fixed Cassandra issue with improper range queries (showed up for namespaces)
Updated pbserver for putting appid for begin trans responses
Removed appscale versioning from AppServer completely
Fixed path issue for MR streaming to point to hadoop-0.20.2
Moved taskqueue code to new location, no longer in labs (there is an auto function for taskqueue, we should discuss this)
Ran pychecker on a few files, and fixed small code issues

To post a comment you must log in.
628. By Navraj Chohan

GAE 1.4.0, Namespace for DB, Fixed Cassandra issue with improper range queries (showed up for namespaces), Updated pbserver for putting appid for begin trans responses, Removed appscale versioning from AppServer completely, Fixed path issue for MR streaming to point to hadoop-0.20.2, Moved taskqueue code to new location, no longer in labs (there is an auto function for taskqueue, we should discuss this), Ran pychecker on a few files, and fixed small code issues

Revision history for this message
Chris Bunch (cgb-cs) wrote :

Approved - also added new sample apps to tools

review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'AppController/helperfunctions.rb'
2--- AppController/helperfunctions.rb 2010-12-11 18:01:33 +0000
3+++ AppController/helperfunctions.rb 2010-12-24 09:11:16 +0000
4@@ -225,10 +225,8 @@
5 # "--address=#{public_ip}",
6 # "--port=#{port}",
7 # "--datastore_path=#{db_location}",
8- "--appscale_version=#{app_version}",
9 "/var/apps/#{app_name}/app",
10 "-a #{public_ip}",
11- "--appscale_version #{app_version}",
12 ">> /var/apps/#{app_name}/log/server.log 2>&1 &"]
13 start_app = cmd.join(" ")
14 Djinn.log_debug(start_app)
15
16=== modified file 'AppDB/appscale_server.py'
17--- AppDB/appscale_server.py 2010-12-10 23:34:17 +0000
18+++ AppDB/appscale_server.py 2010-12-24 09:11:16 +0000
19@@ -127,8 +127,8 @@
20
21
22
23-def getTableName(app_id, kind, version):
24- return app_id + "___" + kind + "___" + version
25+def getTableName(app_id, kind, namespace):
26+ return app_id + "___" + kind + "___" + namespace
27
28 def getRowKey(app_id, ancestor_list):
29 if ancestor_list == None:
30@@ -217,8 +217,8 @@
31 return key
32
33
34-def getJournalTable(app_id, appscale_version):
35- return JOURNAL_TABLE + "___" + app_id + "___" + str(appscale_version)
36+def getJournalTable(app_id, namespace):
37+ return JOURNAL_TABLE + "___" + app_id + "___" + namespace
38
39 # isChild is None if False
40 # if isChild is None, root is ignored
41@@ -296,7 +296,7 @@
42
43 # remote api request
44 # sends back a response
45- def remote_request(self, app_id, appscale_version, http_request_data):
46+ def remote_request(self, app_id, http_request_data):
47 apirequest = remote_api_pb.Request(http_request_data)
48 apiresponse = remote_api_pb.Response()
49 response = None
50@@ -318,35 +318,27 @@
51 http_request_data = request_data.contents()
52 if method == "Put":
53 response, errcode, errdetail = self.put_request(app_id,
54- appscale_version,
55 http_request_data)
56 elif method == "Get":
57 response, errcode, errdetail = self.get_request(app_id,
58- appscale_version,
59 http_request_data)
60 elif method == "Delete":
61 response, errcode, errdetail = self.delete_request(app_id,
62- appscale_version,
63 http_request_data)
64 elif method == "RunQuery":
65 response, errcode, errdetail = self.run_query(app_id,
66- appscale_version,
67 http_request_data)
68 elif method == "BeginTransaction":
69 response, errcode, errdetail = self.begin_transaction_request(app_id,
70- appscale_version,
71 http_request_data)
72 elif method == "Commit":
73 response, errcode, errdetail = self.commit_transaction_request(app_id,
74- appscale_version,
75 http_request_data)
76 elif method == "Rollback":
77 response, errcode, errdetail = self.rollback_transaction_request(app_id,
78- appscale_version,
79 http_request_data)
80 elif method == "AllocateIds":
81 response, errcode, errdetail = self.allocate_ids_request(app_id,
82- appscale_version,
83 http_request_data)
84 elif method == "CreateIndex":
85 errcode = datastore_pb.Error.PERMISSION_DENIED
86@@ -405,9 +397,10 @@
87 print "errdetail:",errdetail
88 self.write( apiresponse.Encode() )
89
90- def run_query(self, app_id, appscale_version, http_request_data):
91+ def run_query(self, app_id, http_request_data):
92 global app_datastore
93 query = datastore_pb.Query(http_request_data)
94+ namespace = query.name_space()
95 #logger.debug("QUERY:%s" % query)
96 results = []
97 if query.has_transaction():
98@@ -441,7 +434,7 @@
99 else:
100 kind = query.kind()
101 # Fetch query from the datastore #
102- table_name = getTableName(app_id, kind, appscale_version)
103+ table_name = getTableName(app_id, kind, namespace)
104 r = app_datastore.get_table( table_name, ENTITY_TABLE_SCHEMA)
105 err = r[0]
106 if err not in ERROR_CODES:
107@@ -494,7 +487,7 @@
108 elif prev_version != long(ii):
109 # if the versions don't match, a valid version must be fetched
110 journal_key = getJournalKey(row_key, prev_version)
111- journal_table = getJournalTable(app_id, appscale_version)
112+ journal_table = getJournalTable(app_id, namespace)
113 journal_result = app_datastore.get_entity( journal_table,
114 journal_key,
115 JOURNAL_SCHEMA )
116@@ -616,14 +609,15 @@
117 return (clone_qr_pb.Encode(), 0, "")
118
119
120- def begin_transaction_request(self, app_id, appscale_version, http_request_data):
121+ def begin_transaction_request(self, app_id, http_request_data):
122 transaction_pb = datastore_pb.Transaction()
123 # handle = zk.getTransactionID(app_id)
124 handle = zoo_keeper.getTransactionID(app_id)
125 transaction_pb.set_handle(handle)
126+ transaction_pb.set_app(app_id)
127 return (transaction_pb.Encode(), 0, "")
128
129- def commit_transaction_request(self, app_id, appscale_version, http_request_data):
130+ def commit_transaction_request(self, app_id, http_request_data):
131 txn = datastore_pb.Transaction(http_request_data)
132 commitres_pb = datastore_pb.CommitResponse()
133
134@@ -635,13 +629,13 @@
135 datastore_pb.Error.INTERNAL_ERROR,
136 "Unable to release lock")
137
138- def rollback_transaction_request(self, app_id, appscale_version, http_request_data):
139+ def rollback_transaction_request(self, app_id, http_request_data):
140 txn = datastore_pb.Transaction(http_request_data)
141 zoo_keeper.notifyFailedTransaction(app_id, txn.handle())
142 return (api_base_pb.VoidProto().Encode(), 0, "")
143
144
145- def allocate_ids_request(self, app_id, appscale_version, http_request_data):
146+ def allocate_ids_request(self, app_id, http_request_data):
147 return (api_base_pb.VoidProto().Encode(),
148 datastore_pb.Error.PERMISSION_DENIED,
149 'Allocation of block ids not implemented.')
150@@ -714,7 +708,6 @@
151 rollback_req = datastore_pb.Transaction()
152 rollback_req.set_handle(internal_txn)
153 self.rollback_transaction_request(app_id,
154- "version",
155 rollback_req.Encode())
156
157 """ Transaction algorithm for single puts:
158@@ -732,7 +725,7 @@
159 -Commit the transaction
160 -Release the lock from ZK
161 """
162- def put_request(self, app_id, appscale_version, http_request_data):
163+ def put_request(self, app_id, http_request_data):
164 global app_datastore
165 global keySecret
166 global tableHashTable
167@@ -841,7 +834,6 @@
168 if not putreq_pb.has_transaction():
169 begintime = time.time()
170 txn, err, errcode = self.begin_transaction_request(app_id,
171- appscale_version,
172 http_request_data)
173
174 # parse from contents
175@@ -870,7 +862,8 @@
176 # Notify Soap Server of any new tables
177 #######################################
178 # insert key
179- table_name = getTableName(app_id, kind, appscale_version)
180+ namespace = e.key().name_space()
181+ table_name = getTableName(app_id, kind, namespace)
182 #print "Put Using table name:",table_name
183 # Notify Users/Apps table if a new class is being added
184 if table_name not in tableHashTable:
185@@ -879,7 +872,7 @@
186 # This function is reentrant
187 # If the class was deleted, and added a second time there is no
188 # notifying the users/app server of its creation
189- if tableServer.add_class(app_id, kind, keySecret) == "true":
190+ if tableServer.add_class(app_id, kind, namespace, keySecret) == "true":
191 tableHashTable[table_name] = 1
192
193 # Store One Entity #
194@@ -933,7 +926,7 @@
195 "Timeout: Unable to update ZooKeeper on change set for transaction")
196 journalPut = putThread()
197 journal_key = getJournalKey(row_key, txn.handle())
198- journal_table = getJournalTable(app_id, appscale_version)
199+ journal_table = getJournalTable(app_id, namespace)
200 journalPut.setup(app_datastore,
201 journal_table,
202 journal_key,
203@@ -968,7 +961,6 @@
204 if not putreq_pb.has_transaction():
205 committime = time.time()
206 com_res, errcode, errdetail = self.commit_transaction_request(app_id,
207- appscale_version,
208 txn.Encode())
209 if PROFILE: appscale_log.write("COMMIT %d %f\n"%(txn.handle(), time.time() - committime))
210
211@@ -982,7 +974,7 @@
212 return (putresp_pb.Encode(), 0, "")
213
214
215- def get_request(self, app_id, appscale_version, http_request_data):
216+ def get_request(self, app_id, http_request_data):
217 global app_datastore
218 getreq_pb = datastore_pb.GetRequest(http_request_data)
219 #logger.debug("GET_REQUEST: %s" % getreq_pb)
220@@ -1018,7 +1010,8 @@
221 if last_path.has_type():
222 kind = last_path.type()
223 #logger.debug("get: %s___%s___%s %s" % (app_id, kind, appscale_version, str(entity_id)))
224- table_name = getTableName(app_id, kind, appscale_version)
225+ namespace = key.name_space()
226+ table_name = getTableName(app_id, kind, namespace)
227 row_key = getRowKey(app_id,key.path().element_list())
228 #print "get row key:" + str(row_key)
229 #print "table_name:" + str(table_name)
230@@ -1048,7 +1041,7 @@
231 if prev_version == long(NONEXISTANT_TRANSACTION):
232 entity = None
233 else:
234- journal_table = getJournalTable(app_id, appscale_version)
235+ journal_table = getJournalTable(app_id, namespace)
236 journal_key = getJournalKey(row_key, prev_version)
237 r = app_datastore.get_entity(journal_table, journal_key, ENTITY_TABLE_SCHEMA[:1] )
238 err = r[0]
239@@ -1075,7 +1068,7 @@
240 rollback to know which entity group a possible failed
241 transaction belongs to.
242 """
243- def delete_request(self, app_id, appscale_version, http_request_data):
244+ def delete_request(self, app_id, http_request_data):
245 global app_datastore
246 root_key = None
247 txn = None
248@@ -1104,14 +1097,13 @@
249 last_path = key.path().element_list()[-1]
250 if last_path.has_type():
251 kind = last_path.type()
252-
253+ namespace = key.name_space()
254 row_key = getRowKey(app_id, key.path().element_list())
255
256 # All deletes are transactional and per entity if
257 # not already wrapped in a transaction
258 if not delreq_pb.has_transaction():
259 txn, err, errcode = self.begin_transaction_request(app_id,
260- appscale_version,
261 http_request_data)
262 # parse from contents
263 txn = datastore_pb.Transaction(txn)
264@@ -1140,13 +1132,13 @@
265 ##########################
266 # Get the previous version
267 ##########################
268- table_name = getTableName(app_id, kind, appscale_version)
269+ table_name = getTableName(app_id, kind, namespace)
270 field_name_list = ENTITY_TABLE_SCHEMA[1:]
271 r = app_datastore.get_entity( table_name, row_key, field_name_list )
272 err = r[0]
273 if err not in ERROR_CODES:
274 # the table does not exist, hence, the previous value was null
275- # TODO, make its not because the database is down
276+ # TODO, make sure its not because the database is down
277 r = ["DB_ERROR:", NONEXISTANT_TRANSACTION] #
278 if len(r) == 1:
279 r.append(NONEXISTANT_TRANSACTION)
280@@ -1174,7 +1166,7 @@
281 "Timeout: Unable to update ZooKeeper on change set for transaction")
282
283 encoded_delete = DELETED + "/" + row_key
284- journal_table = getJournalTable(app_id, appscale_version)
285+ journal_table = getJournalTable(app_id, namespace)
286 journal_key = getJournalKey(row_key, txn.handle())
287
288 field_name_list = JOURNAL_SCHEMA
289@@ -1191,7 +1183,7 @@
290 datastore_pb.Error.INTERNAL_ERROR,
291 err + ", Unable to write to journal")
292
293- table_name = getTableName(app_id, kind, appscale_version)
294+ table_name = getTableName(app_id, kind, namespace)
295 field_name_list = ENTITY_TABLE_SCHEMA
296 field_value_list = [encoded_delete, str(txn.handle())]
297 err, res = app_datastore.put_entity( table_name,
298@@ -1210,7 +1202,6 @@
299
300 if not delreq_pb.has_transaction():
301 com_res, errcode, errdetail = self.commit_transaction_request(app_id,
302- appscale_version,
303 txn.Encode())
304 if errcode != 0:
305 return (delresp_pb.Encode(), errcode, errdetail)
306@@ -1218,20 +1209,20 @@
307 return (delresp_pb.Encode(), 0, "")
308
309
310- def optimized_delete_request(self, app_id, appscale_version, http_request_data):
311+ def optimized_delete_request(self, app_id, http_request_data):
312 pass
313- def run_optimized_query(self, app_id, appscale_version, http_request_data):
314+ def run_optimized_query(self, app_id, http_request_data):
315 return
316- def optimized_put_request(self, app_id, appscale_version, http_request_data):
317+ def optimized_put_request(self, app_id, http_request_data):
318 pass
319
320- def void_proto(self, app_id, appscale_version, http_request_data):
321+ def void_proto(self, app_id, http_request_data):
322 resp_pb = api_base_pb.VoidProto()
323 print "Got void"
324 #logger.debug("VOID_RESPONSE: %s to void" % resp_pb)
325 return (resp_pb.Encode(), 0, "" )
326
327- def str_proto(self, app_id, appscale_version, http_request_data):
328+ def str_proto(self, app_id, http_request_data):
329 str_pb = api_base_pb.StringProto( http_request_data )
330 composite_pb = datastore_pb.CompositeIndices()
331 print "Got a string proto"
332@@ -1240,7 +1231,7 @@
333 #logger.debug("CompositeIndex response to string: %s" % composite_pb)
334 return (composite_pb.Encode(), 0, "" )
335
336- def int64_proto(self, app_id, appscale_version, http_request_data):
337+ def int64_proto(self, app_id, http_request_data):
338 int64_pb = api_base_pb.Integer64Proto( http_request_data )
339 resp_pb = api_base_pb.VoidProto()
340 print "Got a int 64"
341@@ -1249,7 +1240,7 @@
342 #logger.debug("VOID_RESPONSE to int64: %s" % resp_pb)
343 return (resp_pb.Encode(), 0, "")
344
345- def compositeindex_proto(self, app_id, appscale_version, http_request_data):
346+ def compositeindex_proto(self, app_id, http_request_data):
347 compindex_pb = entity_pb.CompositeIndex( http_request_data)
348 resp_pb = api_base_pb.VoidProto()
349 print "Got Composite Index"
350@@ -1300,14 +1291,14 @@
351 ##############
352 # OTHER TYPE #
353 ##############
354- def unknown_request(self, app_id, appscale_version, http_request_data, pb_type):
355+ def unknown_request(self, app_id, http_request_data, pb_type):
356 #logger.debug("Received Unknown Protocol Buffer %s" % pb_type )
357 print "ERROR: Received Unknown Protocol Buffer <" + pb_type +">.",
358 print "Nothing has been implemented to handle this Protocol Buffer type."
359 print "http request data:"
360 print http_request_data
361 print "http done"
362- self.void_proto(app_id, appscale_version, http_request_data)
363+ self.void_proto(app_id, http_request_data)
364
365
366 #########################
367@@ -1322,27 +1313,15 @@
368 app_data = app_data.split(':')
369 #logger.debug("POST len: %d" % len(app_data))
370
371- if len(app_data) == 5:
372- app_id, user_email, nick_name, auth_domain, appscale_version = app_data
373- os.environ['AUTH_DOMAIN'] = auth_domain
374- os.environ['USER_EMAIL'] = user_email
375- os.environ['USER_NICKNAME'] = nick_name
376- os.environ['APPLICATION_ID'] = app_id
377- elif len(app_data) == 4:
378+ if len(app_data) == 4:
379 app_id, user_email, nick_name, auth_domain = app_data
380 os.environ['AUTH_DOMAIN'] = auth_domain
381 os.environ['USER_EMAIL'] = user_email
382 os.environ['USER_NICKNAME'] = nick_name
383 os.environ['APPLICATION_ID'] = app_id
384- appscale_version = "1"
385- elif len(app_data) == 2:
386- app_id, appscale_version = app_data
387- app_id = app_data[0]
388- os.environ['APPLICATION_ID'] = app_id
389 elif len(app_data) == 1:
390 app_id = app_data[0]
391 os.environ['APPLICATION_ID'] = app_id
392- appscale_version = "1"
393 else:
394 #logger.debug("UNABLE TO EXTRACT APPLICATION DATA")
395 return
396@@ -1352,9 +1331,9 @@
397 #logger.debug("For app version: " + appscale_version)
398
399 if pb_type == "Request":
400- self.remote_request(app_id, appscale_version, http_request_data)
401+ self.remote_request(app_id, http_request_data)
402 else:
403- self.unknown_request(app_id, appscale_version, http_request_data, pb_type)
404+ self.unknown_request(app_id, http_request_data, pb_type)
405 self.finish()
406 def usage():
407 print "AppScale Server"
408
409=== modified file 'AppDB/cassandra/py_cassandra.py'
410--- AppDB/cassandra/py_cassandra.py 2010-10-04 21:01:14 +0000
411+++ AppDB/cassandra/py_cassandra.py 2010-12-24 09:11:16 +0000
412@@ -134,8 +134,8 @@
413 keyslices = []
414 column_parent = ColumnParent(column_family="Standard1")
415 predicate = SlicePredicate(column_names=column_names)
416- start_key = table_name
417- end_key = table_name + '~'
418+ start_key = table_name + "/"
419+ end_key = table_name + '/~'
420 try:
421 client = self.__setup_connection()
422 keyslices = client.get_range_slice(MAIN_TABLE,
423@@ -211,8 +211,8 @@
424 predicate = SlicePredicate(column_names=[])
425 curtime = self.timestamp()
426 path = ColumnPath(COLUMN_FAMILY)
427- start_key = table_name
428- end_key = table_name + '~'
429+ start_key = table_name + "/"
430+ end_key = table_name + '/~'
431 try:
432 client = self.__setup_connection()
433 keyslices = client.get_range_slice(MAIN_TABLE,
434
435=== modified file 'AppDB/dbconstants.py'
436--- AppDB/dbconstants.py 2010-05-10 19:52:37 +0000
437+++ AppDB/dbconstants.py 2010-12-24 09:11:16 +0000
438@@ -1,6 +1,6 @@
439 # Constants
440
441-import os,sys
442+import os
443 APPSCALE_HOME=os.environ.get("APPSCALE_HOME")
444
445 LOG_DIR = "%s/AppDB/logs" % APPSCALE_HOME
446
447=== modified file 'AppDB/helper_functions.py'
448--- AppDB/helper_functions.py 2010-06-23 07:11:04 +0000
449+++ AppDB/helper_functions.py 2010-12-24 09:11:16 +0000
450@@ -52,7 +52,7 @@
451 self.loggingOn = True
452
453 def debug(self, string):
454- if self.loggingOn == True:
455+ if self.loggingOn:
456 self.log_logger.info(string)
457
458 def randomString(length):
459
460=== modified file 'AppDB/hypertable/py_hypertable.py'
461--- AppDB/hypertable/py_hypertable.py 2010-11-16 04:25:22 +0000
462+++ AppDB/hypertable/py_hypertable.py 2010-12-24 09:11:16 +0000
463@@ -76,8 +76,8 @@
464 self.lock.acquire()
465 self.conn = ThriftClient(self.get_local_ip(), THRIFT_PORT)
466 self.ns = self.conn.open_namespace(NS)
467- if PROFILING:
468- self.logger.debug("HT InitConnection: %s"%str(endtime - starttime))
469+ #if PROFILING:
470+ # self.logger.debug("HT InitConnection: %s"%str(endtime - starttime))
471 return self.conn
472
473 def __closeConnection(self, conn):
474
475=== modified file 'AppDB/soap_server.py'
476--- AppDB/soap_server.py 2010-10-04 21:01:14 +0000
477+++ AppDB/soap_server.py 2010-12-24 09:11:16 +0000
478@@ -2,28 +2,20 @@
479 # 2nd major revision: No longer are tables being cached in memory
480 # See LICENSE file
481
482-import string, cgi
483-import sys
484-import os
485
486 # we don't use PYTHONPATH now.
487 #PYTHON_PATH = os.environ.get("PYTHONPATH")
488 #print "Python path: ",PYTHON_PATH
489-print sys.path
490+#print sys.path
491
492+import sys
493 import SOAPpy
494 import time
495-import socket
496 import datetime
497 import re
498-import cgitb; #cgitb.enable()
499-import getopt
500-import logging
501-import logging.handlers
502 from dbconstants import *
503 import appscale_datastore
504 import appscale_logger
505-import pickle
506 from M2Crypto import SSL
507
508 logger = appscale_logger.getLogger("soap_server")
509@@ -323,7 +315,7 @@
510 if secret != super_secret:
511 #logger.error("commit_new_user: bad secret")
512 return "Error: bad secret"
513- if DEBUG: "Commiting a new user %s"%user
514+ if DEBUG: print "Commiting a new user %s"%user
515 error = "Error: username should be an email"
516 # look for the @ and . in the email
517 if user.find("@") == -1 or user.find(".") == -1:
518@@ -368,7 +360,7 @@
519
520 error = "Error: appname/language can only be alpha numeric"
521
522- if language.isalnum() == False:
523+ if not language.isalnum():
524 #logger.error("language %s is not alpha numeric" % language)
525 if DEBUG: print error
526 return error
527@@ -411,7 +403,7 @@
528 else:
529 #logger.error("creating a new app: %s failed %s" % (appname, result[0]))
530 return "false"
531- return "true"
532+ return ret
533 else:
534 error = "Error: User not found"
535 #logger.error(error)
536@@ -426,7 +418,6 @@
537 #logger.debug("get_tar: bad secret")
538 return "Error: bad secret"
539 if DEBUG: print "get_tar: entry"
540- error = "Error: unable to find application tar ball. "
541 result = db.get_entity(APP_TABLE, app_name, ["tar_ball"])
542 if result[0] in ERROR_CODES and len(result) == 2:
543 #logger.info("get_tar app:%s length of tar %s" % (app_name, str(len(result[1]))) )
544@@ -443,7 +434,7 @@
545 if DEBUG: print "commit_tar: entry"
546
547 #logger.info("commit_tar app:%s, secret:%s" % (app_name, secret))
548- if DEBUG: "Committing a tar for %s"%app_name
549+ if DEBUG: print "Committing a tar for %s"%app_name
550 if secret != super_secret:
551 #logger.error("commit_tar: bad secret")
552 return "Error: bad secret"
553@@ -625,7 +616,7 @@
554 return "false"
555 return "true"
556
557-def add_class(appname, classname, secret):
558+def add_class(appname, classname, namespace, secret):
559 global db
560 global super_secret
561 global app_schema
562@@ -653,7 +644,7 @@
563 # already in classes list
564 return "true"
565
566- classes += [str(classname)]
567+ classes += [str(classname+"___"+namespace)]
568 classes = ':'.join(classes)
569
570 result = db.put_entity(APP_TABLE, appname, columns, [classes])
571@@ -673,8 +664,8 @@
572 if result[0] not in ERROR_CODES or len(result) == 1:
573 #logger.error("delete_app: Unable to get entity for app %s" %appname)
574 return "false: unable to get entity for app"
575+ """
576 owner = result[1]
577- """
578 result = db.get_entity(USER_TABLE, owner, ['applications'])
579 if result[0] not in ERROR_CODES and len(result) == 1:
580 logger.error("delete_app: Unable to get entity for app %s" %appname)
581@@ -697,7 +688,7 @@
582 return "false: unable to put for user modified app list"
583 """
584 # look up all the class tables of this app and delete their tables
585- result = db.get_entity(APP_TABLE, appname, ["classes", "version"])
586+ result = db.get_entity(APP_TABLE, appname, ["classes"])
587 if result[0] not in ERROR_CODES or len(result) == 1:
588 #logger.error("delete_app: Unable to get classes for app %s"%appname)
589 return "false: unable to get classes for app"
590@@ -706,19 +697,13 @@
591 classes = result[0].split(':')
592 else:
593 classes = []
594- if result[1]:
595- appscale_version = result[1]
596- else:
597- appscale_version = "1"
598- #logger.error("delete_app: Unable to get version number for app %s"%appname)
599-
600 result = db.put_entity(APP_TABLE, appname, ["host", "port"], ["", ""])
601 if result[0] not in ERROR_CODES:
602 #logger.error("delete_app: Unable to delete instances for app %s"%appname)
603 return "false: unable to delete instances"
604
605 for classname in classes:
606- table_name = appname + "___" + classname + "___" + appscale_version
607+ table_name = appname + "___" + classname
608 db.delete_table(table_name)
609 #logger.error("delete_app: removed %s"%table_name)
610
611@@ -791,8 +776,8 @@
612 return "Error: User does not exist"
613
614 result = result[1:]
615- appdrop_rem_token = result[0]
616- appdrop_rem_token_exp = result[1]
617+ #appdrop_rem_token = result[0]
618+ #appdrop_rem_token_exp = result[1]
619 t = datetime.datetime.now()
620 date_change = str(time.mktime(t.timetuple()))
621
622
623=== modified file 'AppServer/BUGS' (properties changed: -x to +x)
624=== modified file 'AppServer/LICENSE' (properties changed: -x to +x)
625=== modified file 'AppServer/README' (properties changed: -x to +x)
626=== modified file 'AppServer/RELEASE_NOTES' (properties changed: -x to +x)
627--- AppServer/RELEASE_NOTES 2010-11-30 10:37:25 +0000
628+++ AppServer/RELEASE_NOTES 2010-12-24 09:11:16 +0000
629@@ -3,6 +3,71 @@
630
631 App Engine Python SDK - Release Notes
632
633+Version 1.4.0
634+================================
635+- The Always On feature allows applications to pay and keep 3 instances of their
636+ application always running, which can significantly reduce application
637+ latency.
638+- Developers can now enable Warmup Requests. By specifying a handler in an
639+ app's app.yaml, App Engine will attempt to send a Warmup Request to initialize
640+ new instances before a user interacts with it. This can reduce the latency an
641+ end-user sees for initializing your application.
642+- The Channel API is now available for all users.
643+- Task Queue has been officially released, and is no longer an experimental
644+ feature. The API import paths that use 'labs' have been deprecated. Task queue
645+ storage will count towards an application's overall storage quota, and will
646+ thus be charged for.
647+- The deadline for Task Queue and Cron requests has been raised to 10 minutes.
648+ Datastore and API deadlines within those requests remain unchanged.
649+- For the Task Queue, developers can specify task retry_parameters in their
650+ queue.yaml.
651+- Apps that have enabled billing are allowed up to 100 queues with the Task
652+ Queue API.
653+- Metadata Queries on the datastore for datastore kinds, namespaces, and entity
654+ properties are available.
655+- URLFetch allowed response size has been increased, up to 32 MB. Request size
656+ is still limited to 1 MB.
657+- The request and response sizes for the Images API have been increased to
658+ 32 MB.
659+- The total size of Memcache batch operations is increased to 32 MB. The 1 MB
660+ limit on individual Memcache objects still applies.
661+- The attachment size for outgoing emails has been increased from 1 MB to 10 MB.
662+ The size limit for incoming emails is still 10 MB.
663+- Size and quantity limits on datastore batch get/put/delete operations have
664+ been removed. Individual entities are still limited to 1 MB, but your app may
665+ batch as many entities together for get/put/delete calls as the overall
666+ datastore deadline will allow for.
667+- When iterating over query results, the datastore will now asynchronously
668+ prefetch results, reducing latency in many cases by 10-15%.
669+- The Admin Console Blacklist page lists the top blacklist rejected visitors.
670+- The automatic image thumbnailing service supports arbitrary crop sizes up to
671+ 1600px.
672+- Overall average instance latency in the Admin Console is now a weighted
673+ average over QPS per instance.
674+- The developer who uploaded an app version can download that version's code
675+ using the appcfg.py download_app command. This feature can be disabled on
676+ a per application basis in the admin console, under the 'Permissions' tab.
677+ Once disabled, code download for the application CANNOT be re-enabled.
678+- Fixed an issue where custom Admin Console pages did not work for Google
679+ Apps for your Domain users.
680+- In the Python runtime, an instance is killed and restarted when a request
681+ handler hits DeadlineExceededError. This should fix an issue related to
682+ intermittent SystemErrors using Django.
683+ http://code.google.com/p/googleappengine/issues/detail?id=772
684+- Allow Django initialization to be moved to appengine_config.py to avoid
685+ Django version conflicts when mixing webapp.template with pure Django.
686+ http://code.google.com/p/googleappengine/issues/detail?id=1758
687+- Fixed an issue with OpenId over SSL.
688+ http://code.google.com/p/googleappengine/issues/detail?id=3393
689+- Fixed an issue on the dev_appserver where login/logout code didn't work using
690+ Python 2.6.
691+ http://code.google.com/p/googleappengine/issues/detail?id=3566
692+- Fixed an issue in the dev_appserver where get_serving_url did not work
693+ for transparent, cropped PNGs:
694+ http://code.google.com/p/googleappengine/issues/detail?id=3887
695+- Fixed an issue with the DatastoreFileStub.
696+ http://code.google.com/p/googleappengine/issues/detail?id=3895
697+
698 Version 1.3.8
699 ==================================
700 - Builtin app.yaml handlers are available for common application functions,
701
702=== modified file 'AppServer/VERSION' (properties changed: -x to +x)
703--- AppServer/VERSION 2010-11-30 10:37:25 +0000
704+++ AppServer/VERSION 2010-12-24 09:11:16 +0000
705@@ -1,3 +1,3 @@
706-release: "1.3.8"
707-timestamp: 1284157741
708+release: "1.4.0"
709+timestamp: 1287687253
710 api_versions: ['1']
711
712=== modified file 'AppServer/demos/guestbook/app.yaml' (properties changed: -x to +x)
713=== modified file 'AppServer/google/appengine/api/apiproxy_stub.py'
714--- AppServer/google/appengine/api/apiproxy_stub.py 2010-11-30 10:37:25 +0000
715+++ AppServer/google/appengine/api/apiproxy_stub.py 2010-12-24 09:11:16 +0000
716@@ -27,7 +27,6 @@
717 import time
718
719
720-
721 MAX_REQUEST_SIZE = 1 << 20
722 DS_STAT_LEVEL = 31
723 logging.addLevelName(DS_STAT_LEVEL, "DS_STAT")
724@@ -81,6 +80,7 @@
725 messages = []
726 assert request.IsInitialized(messages), messages
727 start = time.time()
728+
729 method = getattr(self, '_Dynamic_' + call)
730 method(request, response)
731 end = time.time()
732@@ -88,4 +88,3 @@
733 if service == "datastore_v3":
734 logging.log(DS_STAT_LEVEL,"qtype %s time %s" % (call, (end-start)))
735
736-
737
738=== modified file 'AppServer/google/appengine/api/apiproxy_stub_map.py'
739--- AppServer/google/appengine/api/apiproxy_stub_map.py 2010-12-10 23:34:17 +0000
740+++ AppServer/google/appengine/api/apiproxy_stub_map.py 2010-12-24 09:11:16 +0000
741@@ -237,7 +237,6 @@
742 service: string
743 stub: stub
744 """
745- # Changes made to allow changing stubs dynamically
746 #assert not self.__stub_map.has_key(service), repr(service)
747 self.__stub_map[service] = stub
748
749@@ -370,6 +369,8 @@
750 self.__rpc.callback = self.__internal_callback
751 self.callback = callback
752
753+ self.__class__.__local.may_interrupt_wait = False
754+
755 def __internal_callback(self):
756 """This is the callback set on the low-level RPC object.
757
758@@ -589,9 +590,9 @@
759 cls.__local.may_interrupt_wait = True
760 try:
761 running.__rpc.Wait()
762- except apiproxy_errors.InterruptedError:
763- running.__rpc._RPC__exception = None
764- running.__rpc._RPC__traceback = None
765+ except apiproxy_errors.InterruptedError, err:
766+ err.rpc._RPC__exception = None
767+ err.rpc._RPC__traceback = None
768 finally:
769 cls.__local.may_interrupt_wait = False
770 finished, runnning = cls.__check_one(rpcs)
771
772=== modified file 'AppServer/google/appengine/api/appinfo.py'
773--- AppServer/google/appengine/api/appinfo.py 2010-11-30 10:37:25 +0000
774+++ AppServer/google/appengine/api/appinfo.py 2010-12-24 09:11:16 +0000
775@@ -34,14 +34,13 @@
776 from google.appengine.api import yaml_listener
777 from google.appengine.api import yaml_object
778
779-
780 _URL_REGEX = r'(?!\^)/|\.|(\(.).*(?!\$).'
781 _FILES_REGEX = r'(?!\^).*(?!\$).'
782
783 _DELTA_REGEX = r'([0-9]+)([DdHhMm]|[sS]?)'
784 _EXPIRATION_REGEX = r'\s*(%s)(\s+%s)*\s*' % (_DELTA_REGEX, _DELTA_REGEX)
785
786-_SERVICE_RE_STRING = r'(mail|xmpp_message|rest|startup)'
787+_SERVICE_RE_STRING = r'(mail|xmpp_message|xmpp_subscribe|xmpp_presence|rest|warmup)'
788
789 _PAGE_NAME_REGEX = r'^.+$'
790
791
792=== modified file 'AppServer/google/appengine/api/blobstore/__init__.py' (properties changed: -x to +x)
793=== modified file 'AppServer/google/appengine/api/blobstore/blobstore_stub.py'
794--- AppServer/google/appengine/api/blobstore/blobstore_stub.py 2010-12-13 07:51:16 +0000
795+++ AppServer/google/appengine/api/blobstore/blobstore_stub.py 2010-12-24 09:11:16 +0000
796@@ -17,8 +17,9 @@
797
798 """
799 Modifications for AppScale by Navraj Chohan
800+
801 Datastore backed Blobstore API stub.
802-
803+
804 Class:
805 BlobstoreServiceStub: BlobstoreService stub backed by datastore.
806 """
807@@ -30,6 +31,7 @@
808
809 import os
810 import time
811+
812 from google.appengine.api import apiproxy_stub
813 from google.appengine.api import datastore
814 from google.appengine.api import datastore_errors
815@@ -39,7 +41,6 @@
816 from google.appengine.api.blobstore import blobstore_service_pb
817 from google.appengine.runtime import apiproxy_errors
818
819-import logging
820
821 __all__ = ['BlobStorage',
822 'BlobstoreServiceStub',
823@@ -47,7 +48,6 @@
824 'CreateUploadSession',
825 'Error',
826 ]
827-
828 BLOB_PORT = "6106"
829
830 class Error(Exception):
831@@ -84,6 +84,7 @@
832 'success_path': path,
833 'user': user,
834 'state': 'init'})
835+
836 datastore.Put(entity)
837 return str(entity.key())
838
839@@ -169,8 +170,6 @@
840 self.__time_function = time_function
841 self.__next_session_id = 1
842 self.__uploader_path = uploader_path
843- self.__block_cache = ""
844- self.__block_key_cache = ""
845
846 @property
847 def storage(self):
848@@ -225,10 +224,7 @@
849 """
850 session = self._CreateSession(request.success_path(),
851 users.get_current_user())
852- logging.info("bsstub: %s"%(self.__storage._app_id))
853-
854 response.set_url('http://%s:%s/%s%s/%s' % (self._GetEnviron('SERVER_NAME'),
855- #self._GetEnviron('NGINX_PORT'),
856 BLOB_PORT,
857 self.__uploader_path,
858 self.__storage._app_id,
859@@ -245,6 +241,11 @@
860 response: Not used but should be a VoidProto.
861 """
862 for blob_key in request.blob_key_list():
863+ key = datastore_types.Key.from_path(blobstore.BLOB_INFO_KIND,
864+ str(blob_key),
865+ namespace='')
866+
867+ datastore.Delete(key)
868 self.__storage.DeleteBlob(blob_key)
869
870 def _Dynamic_FetchData(self, request, response):
871@@ -282,23 +283,14 @@
872 blobstore_service_pb.BlobstoreServiceError.BLOB_FETCH_SIZE_TOO_LARGE)
873
874 blob_key = request.blob_key()
875- blob_info_key = datastore.Key.from_path(blobstore.BLOB_INFO_KIND,
876- blob_key,
877- namespace='')
878- try:
879- datastore.Get(blob_info_key)
880- except datastore_errors.EntityNotFoundError, err:
881- raise apiproxy_errors.ApplicationError(
882- blobstore_service_pb.BlobstoreServiceError.BLOB_NOT_FOUND)
883-
884- # Find out the block number from the size
885- # Append that key to the info key and fename=tch the data
886- # Must deal with over lapping boundaries
887+ #blob_info_key = datastore.Key.from_path(blobstore.BLOB_INFO_KIND,
888+ # blob_key,
889+ # namespace='')
890 block_count = int(start_index/blobstore.MAX_BLOB_FETCH_SIZE)
891 block_modulo = int(start_index%blobstore.MAX_BLOB_FETCH_SIZE)
892
893 block_count_end = int(end_index/blobstore.MAX_BLOB_FETCH_SIZE)
894- block_modulo_end = int(end_index%blobstore.MAX_BLOB_FETCH_SIZE)
895+ #block_modulo_end = int(end_index%blobstore.MAX_BLOB_FETCH_SIZE)
896
897 block_key = str(blob_key) + "__" + str(block_count)
898 block_key = datastore.Key.from_path("__BlobChunk__",
899@@ -308,12 +300,13 @@
900 if self.__block_key_cache != str(block_key):
901 try:
902 block = datastore.Get(block_key)
903- except datastore_errors.EntityNotFoundError, err:
904+ except datastore_errors.EntityNotFoundError:
905 raise apiproxy_errors.ApplicationError(
906 blobstore_service_pb.BlobstoreServiceError.BLOB_NOT_FOUND)
907
908 self.__block_cache = block["block"]
909 self.__block_key_cache = str(block_key)
910+
911 # Matching boundaries, start and end are within one fetch
912 if block_count_end == block_count:
913 # Is there enough data to satisfy fetch_size bytes?
914@@ -337,12 +330,12 @@
915 namespace='')
916 try:
917 block = datastore.Get(block_key)
918- except datastore_errors.EntityNotFoundError, err:
919+ except datastore_errors.EntityNotFoundError:
920 raise apiproxy_errors.ApplicationError(
921 blobstore_service_pb.BlobstoreServiceError.BLOB_NOT_FOUND)
922
923 self.__block_cache = block["block"]
924 self.__block_key_cache = str(block_key)
925- data.append(self.__block_cache[0,fetch_size - data_size])
926+ data.append(self.__block_cache[0,fetch_size - data_size])
927 response.set_data(data)
928-
929+
930
931=== modified file 'AppServer/google/appengine/api/channel/channel.py'
932--- AppServer/google/appengine/api/channel/channel.py 2010-11-30 10:40:47 +0000
933+++ AppServer/google/appengine/api/channel/channel.py 2010-12-24 09:11:16 +0000
934@@ -32,29 +32,24 @@
935 from google.appengine.api.channel import channel_service_pb
936 from google.appengine.runtime import apiproxy_errors
937
938-MAX_DURATION = 60 * 60 * 4
939-
940-MAX_SIMULTANEOUS_CONNECTIONS = 10
941+
942+MAXIMUM_CLIENT_ID_LENGTH = 64
943+
944+MAXIMUM_MESSAGE_LENGTH = 32767
945
946
947 class Error(Exception):
948 """Base error class for this module."""
949
950
951-class InvalidChannelKeyError(Error):
952- """Error that indicates a bad channel id."""
953+class InvalidChannelClientIdError(Error):
954+ """Error that indicates a bad client id."""
955
956-class InvalidChannelKeyError(Error):
957- """Error that indicates a bad channel key."""
958
959 class InvalidMessageError(Error):
960 """Error that indicates a message is malformed."""
961
962
963-class ChannelTimeoutError(Error):
964- """Error that indicates the given channel has timed out."""
965-
966-
967 def _ToChannelError(error):
968 """Translate an application error to a channel Error, if possible.
969
970@@ -67,11 +62,9 @@
971 """
972 error_map = {
973 channel_service_pb.ChannelServiceError.INVALID_CHANNEL_KEY:
974- InvalidChannelKeyError,
975+ InvalidChannelClientIdError,
976 channel_service_pb.ChannelServiceError.BAD_MESSAGE:
977 InvalidMessageError,
978- channel_service_pb.ChannelServiceError.CHANNEL_TIMEOUT:
979- ChannelTimeoutError
980 }
981
982 if error.application_error in error_map:
983@@ -88,24 +81,52 @@
984 return 'xmpp'
985
986
987-def create_channel(application_key):
988+def _ValidateClientId(client_id):
989+ """Valides a client id.
990+
991+ Args:
992+ client_id: The client id provided by the application.
993+
994+ Returns:
995+ If the client id is of type str, returns the original client id.
996+ If the client id is of type unicode, returns the id encoded to utf-8.
997+
998+ Raises:
999+ InvalidChannelClientIdError: if client id is not an instance of str or
1000+ unicode, or if the (utf-8 encoded) string is longer than 64 characters.
1001+ """
1002+ if isinstance(client_id, unicode):
1003+ client_id = client_id.encode('utf-8')
1004+ elif not isinstance(client_id, str):
1005+ raise InvalidChannelClientIdError
1006+
1007+ if len(client_id) > MAXIMUM_CLIENT_ID_LENGTH:
1008+ raise InvalidChannelClientIdError
1009+
1010+ return client_id
1011+
1012+
1013+def create_channel(client_id):
1014 """Create a channel.
1015
1016 Args:
1017- application_key: A key to identify this channel on the server side.
1018+ client_id: A string to identify this channel on the server side.
1019
1020 Returns:
1021- A string id that the client can use to connect to the channel.
1022+ A token that the client can use to connect to the channel.
1023
1024 Raises:
1025- InvalidChannelTimeoutError: if the specified timeout is invalid.
1026+ InvalidChannelClientIdError: if clientid is not an instance of str or
1027+ unicode, or if the (utf-8 encoded) string is longer than 64 characters.
1028 Other errors returned by _ToChannelError
1029 """
1030
1031+ client_id = _ValidateClientId(client_id)
1032+
1033 request = channel_service_pb.CreateChannelRequest()
1034 response = channel_service_pb.CreateChannelResponse()
1035
1036- request.set_application_key(application_key)
1037+ request.set_application_key(client_id)
1038
1039 try:
1040 apiproxy_stub_map.MakeSyncCall(_GetService(),
1041@@ -118,20 +139,33 @@
1042 return response.client_id()
1043
1044
1045-def send_message(application_key, message):
1046+def send_message(client_id, message):
1047 """Send a message to a channel.
1048
1049 Args:
1050- application_key: The key passed to create_channel.
1051+ client_id: The client id passed to create_channel.
1052 message: A string representing the message to send.
1053
1054 Raises:
1055+ InvalidChannelClientIdError: if client_id is not an instance of str or
1056+ unicode, or if the (utf-8 encoded) string is longer than 64 characters.
1057+ InvalidMessageError: if the message isn't a string or is too long.
1058 Errors returned by _ToChannelError
1059 """
1060+ client_id = _ValidateClientId(client_id)
1061+
1062+ if isinstance(message, unicode):
1063+ message = message.encode('utf-8')
1064+ elif not isinstance(message, str):
1065+ raise InvalidMessageError
1066+
1067+ if len(message) > MAXIMUM_MESSAGE_LENGTH:
1068+ raise InvalidMessageError
1069+
1070 request = channel_service_pb.SendMessageRequest()
1071 response = api_base_pb.VoidProto()
1072
1073- request.set_application_key(application_key)
1074+ request.set_application_key(client_id)
1075 request.set_message(message)
1076
1077 try:
1078
1079=== modified file 'AppServer/google/appengine/api/channel/channel_service_pb.py'
1080--- AppServer/google/appengine/api/channel/channel_service_pb.py 2010-11-30 10:40:47 +0000
1081+++ AppServer/google/appengine/api/channel/channel_service_pb.py 2010-12-24 09:11:16 +0000
1082@@ -30,14 +30,12 @@
1083 INTERNAL_ERROR = 1
1084 INVALID_CHANNEL_KEY = 2
1085 BAD_MESSAGE = 3
1086- CHANNEL_TIMEOUT = 4
1087
1088 _ErrorCode_NAMES = {
1089 0: "OK",
1090 1: "INTERNAL_ERROR",
1091 2: "INVALID_CHANNEL_KEY",
1092 3: "BAD_MESSAGE",
1093- 4: "CHANNEL_TIMEOUT",
1094 }
1095
1096 def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
1097
1098=== modified file 'AppServer/google/appengine/api/datastore.py'
1099--- AppServer/google/appengine/api/datastore.py 2010-11-30 10:37:25 +0000
1100+++ AppServer/google/appengine/api/datastore.py 2010-12-24 09:11:16 +0000
1101@@ -31,34 +31,26 @@
1102
1103
1104
1105+
1106 import heapq
1107 import itertools
1108 import logging
1109 import os
1110 import re
1111-import string
1112 import sys
1113+import threading
1114 import traceback
1115 from xml.sax import saxutils
1116
1117-from google.appengine.api import api_base_pb
1118-from google.appengine.api import apiproxy_rpc
1119 from google.appengine.api import apiproxy_stub_map
1120 from google.appengine.api import capabilities
1121 from google.appengine.api import datastore_errors
1122 from google.appengine.api import datastore_types
1123-from google.appengine.datastore import datastore_index
1124 from google.appengine.datastore import datastore_pb
1125-from google.appengine.runtime import apiproxy_errors
1126+from google.appengine.datastore import datastore_rpc
1127+from google.appengine.datastore import datastore_query
1128 from google.appengine.datastore import entity_pb
1129
1130-try:
1131- __import__('google.appengine.api.labs.taskqueue.taskqueue_service_pb')
1132- taskqueue_service_pb = sys.modules.get(
1133- 'google.appengine.api.labs.taskqueue.taskqueue_service_pb')
1134-except ImportError:
1135- from google.appengine.api.taskqueue import taskqueue_service_pb
1136-
1137 MAX_ALLOWABLE_QUERIES = 30
1138
1139 MAXIMUM_RESULTS = 1000
1140@@ -72,22 +64,15 @@
1141
1142 _MAX_INDEXED_PROPERTIES = 5000
1143
1144-_MAX_ID_BATCH_SIZE = 1000 * 1000 * 1000
1145+_MAX_ID_BATCH_SIZE = datastore_rpc._MAX_ID_BATCH_SIZE
1146
1147 Key = datastore_types.Key
1148 typename = datastore_types.typename
1149
1150-_txes = {}
1151-
1152-_ALLOWED_API_KWARGS = frozenset(['rpc'])
1153-
1154-_ALLOWED_FAILOVER_READ_METHODS = set(
1155- ('Get', 'RunQuery', 'RunCompiledQuery', 'Count', 'Next'))
1156-
1157-ARBITRARY_FAILOVER_MS = -1
1158-
1159-STRONG_CONSISTENCY = 0
1160-EVENTUAL_CONSISTENCY = 1
1161+_ALLOWED_API_KWARGS = frozenset(['rpc', 'config'])
1162+
1163+STRONG_CONSISTENCY = datastore_rpc.Configuration.STRONG_CONSISTENCY
1164+EVENTUAL_CONSISTENCY = datastore_rpc.Configuration.EVENTUAL_CONSISTENCY
1165
1166 _MAX_INT_32 = 2**31-1
1167
1168@@ -161,24 +146,101 @@
1169 return (keys, multiple)
1170
1171
1172-def GetRpcFromKwargs(kwargs):
1173+def _GetConfigFromKwargs(kwargs):
1174+ """Get a Configuration object from the keyword arguments.
1175+
1176+ This is purely an internal helper for the various public APIs below
1177+ such as Get().
1178+
1179+ Args:
1180+ kwargs: A dict containing the keyword arguments passed to a public API.
1181+
1182+ Returns:
1183+ A UserRPC instance, or a Configuration instance, or None.
1184+
1185+ Raises:
1186+ TypeError if unexpected keyword arguments are present.
1187+ """
1188 if not kwargs:
1189 return None
1190 args_diff = set(kwargs) - _ALLOWED_API_KWARGS
1191 if args_diff:
1192- raise TypeError('Invalid arguments: %s' % ', '.join(args_diff))
1193- return kwargs.get('rpc')
1194-
1195-
1196-def _MakeSyncCall(service, call, request, response, rpc=None):
1197+ raise datastore_errors.BadArgumentError(
1198+ 'Unexpected keyword arguments: %s' % ', '.join(args_diff))
1199+ rpc = kwargs.get('rpc')
1200+ config = kwargs.get('config')
1201+ if rpc is not None:
1202+ if config is not None:
1203+ raise datastore_errors.BadArgumentError(
1204+ 'Expected rpc= or config= argument but not both')
1205+ if isinstance(rpc, (apiproxy_stub_map.UserRPC,
1206+ datastore_rpc.Configuration)):
1207+ return rpc
1208+ raise datastore_errors.BadArgumentError(
1209+ 'rpc= argument should be None or a UserRPC instance')
1210+ if config is not None:
1211+ if not isinstance(config, (datastore_rpc.Configuration,
1212+ apiproxy_stub_map.UserRPC)):
1213+ raise datastore_errors.BadArgumentError(
1214+ 'config= argument should be None or a Configuration instance')
1215+ return config
1216+
1217+
1218+class DatastoreAdapter(datastore_rpc.AbstractAdapter):
1219+ """Adapter between datatypes defined here (Entity etc.) and protobufs.
1220+
1221+ See the base class in datastore_rpc.py for more docs.
1222+ """
1223+
1224+ def key_to_pb(self, key):
1225+ return key._Key__reference
1226+
1227+ def pb_to_key(self, pb):
1228+ return Key._FromPb(pb)
1229+
1230+ def entity_to_pb(self, entity):
1231+ return entity._ToPb()
1232+
1233+ def pb_to_entity(self, pb):
1234+ return Entity._FromPb(pb)
1235+
1236+
1237+_adapter = DatastoreAdapter()
1238+_thread_local = threading.local()
1239+
1240+_ENV_KEY = '__DATASTORE_CONNECTION_INITIALIZED__'
1241+
1242+
1243+def _GetConnection():
1244+ """Retrieve a datastore connection local to the thread."""
1245+ connection = None
1246+ if os.getenv(_ENV_KEY):
1247+ try:
1248+ connection = _thread_local.connection
1249+ except AttributeError:
1250+ pass
1251+ if connection is None:
1252+ connection = datastore_rpc.Connection(adapter=_adapter)
1253+ _SetConnection(connection)
1254+ return connection
1255+
1256+
1257+def _SetConnection(connection):
1258+ """Sets the datastore connection local to the thread."""
1259+ _thread_local.connection = connection
1260+ os.environ[_ENV_KEY] = '1'
1261+
1262+
1263+
1264+def _MakeSyncCall(service, call, request, response, config=None):
1265 """The APIProxy entry point for a synchronous API call.
1266
1267 Args:
1268- service: string representing which service to call
1269- call: string representing which function to call
1270- request: protocol buffer for the request
1271- response: protocol buffer for the response
1272- rpc: datastore.DatastoreRPC to use for this request.
1273+ service: For backwards compatibility, must be 'datastore_v3'.
1274+ call: String representing which function to call.
1275+ request: Protocol buffer for the request.
1276+ response: Protocol buffer for the response.
1277+ config: Optional Configuration to use for this request.
1278
1279 Returns:
1280 Response protocol buffer. Caller should always use returned value
1281@@ -187,67 +249,105 @@
1282 Raises:
1283 apiproxy_errors.Error or a subclass.
1284 """
1285- if not rpc:
1286- rpc = CreateRPC(service)
1287-
1288- rpc.make_call(call, request, response)
1289- rpc.wait()
1290- rpc.check_success()
1291+ conn = _GetConnection()
1292+ if isinstance(request, datastore_pb.Query):
1293+ conn._set_request_read_policy(request, config)
1294+ conn._set_request_transaction(request)
1295+ rpc = conn.make_rpc_call(config, call, request, response)
1296+ conn.check_rpc_success(rpc)
1297 return response
1298
1299
1300-def CreateRPC(service='datastore_v3', deadline=None, callback=None,
1301- read_policy=STRONG_CONSISTENCY):
1302+def CreateRPC(service='datastore_v3',
1303+ deadline=None, callback=None, read_policy=None):
1304 """Create an rpc for use in configuring datastore calls.
1305
1306- Args:
1307- deadline: float, deadline for calls in seconds.
1308- callback: callable, a callback triggered when this rpc completes,
1309- accepts one argument: the returned rpc.
1310- read_policy: flag, set to EVENTUAL_CONSISTENCY to enable eventually
1311- consistent reads
1312-
1313- Returns:
1314- A datastore.DatastoreRPC instance.
1315- """
1316- return DatastoreRPC(service, deadline, callback, read_policy)
1317-
1318-
1319-class DatastoreRPC(apiproxy_stub_map.UserRPC):
1320- """Specialized RPC for the datastore.
1321-
1322- Wraps the default RPC class and sets appropriate values for use by the
1323- datastore.
1324-
1325- This class or a sublcass of it is intended to be instatiated by
1326- developers interested in setting specific request parameters, such as
1327- deadline, on API calls. It will be used to make the actual call.
1328- """
1329-
1330- def __init__(self, service='datastore_v3', deadline=None, callback=None,
1331- read_policy=STRONG_CONSISTENCY):
1332- super(DatastoreRPC, self).__init__(service, deadline, callback)
1333- self.read_policy = read_policy
1334-
1335- def make_call(self, call, request, response):
1336- if self.read_policy == EVENTUAL_CONSISTENCY:
1337- if call not in _ALLOWED_FAILOVER_READ_METHODS:
1338- raise datastore_errors.BadRequestError(
1339- 'read_policy is only supported on read operations.')
1340- if call != 'Next':
1341- request.set_failover_ms(ARBITRARY_FAILOVER_MS)
1342- super(DatastoreRPC, self).make_call(call, request, response)
1343-
1344- def clone(self):
1345- """Make a shallow copy of this instance.
1346-
1347- This is usually used when an RPC has been specified with some configuration
1348- options and is being used as a template for multiple RPCs outside of a
1349- developer's easy control.
1350- """
1351- assert self.state == apiproxy_rpc.RPC.IDLE
1352- return self.__class__(
1353- self.service, self.deadline, self.callback, self.read_policy)
1354+ NOTE: This functions exists for backwards compatibility. Please use
1355+ CreateConfig() instead. NOTE: the latter uses 'on_completion',
1356+ which is a function taking an argument, wherease CreateRPC uses
1357+ 'callback' which is a function without arguments.
1358+
1359+ Args:
1360+ service: Optional string; for backwards compatibility, must be
1361+ 'datastore_v3'.
1362+ deadline: Optional int or float, deadline for calls in seconds.
1363+ callback: Optional callable, a callback triggered when this rpc
1364+ completes; takes no arguments.
1365+ read_policy: Optional read policy; set to EVENTUAL_CONSISTENCY to
1366+ enable eventually consistent reads (i.e. reads that may be
1367+ satisfied from an older version of the datastore in some cases).
1368+ The default read policy may have to wait until in-flight
1369+ transactions are committed.
1370+
1371+ Returns:
1372+ A UserRPC instance.
1373+ """
1374+ assert service == 'datastore_v3'
1375+ conn = _GetConnection()
1376+ config = None
1377+ if deadline is not None:
1378+ config = datastore_rpc.Configuration(deadline=deadline)
1379+ rpc = conn.create_rpc(config)
1380+ rpc.callback = callback
1381+ if read_policy is not None:
1382+ rpc.read_policy = read_policy
1383+ return rpc
1384+
1385+
1386+def CreateConfig(**kwds):
1387+ """Create a Configuration object for use in configuring datastore calls.
1388+
1389+ This configuration can be passed to most datastore calls using the
1390+ 'config=...' argument.
1391+
1392+ Args:
1393+ deadline: Optional deadline; default None (which means the
1394+ system default deadline will be used, typically 5 seconds).
1395+ on_completion: Optional callback function; default None. If
1396+ specified, it will be called with a UserRPC object as argument
1397+ when an RPC completes.
1398+ read_policy: Optional read policy; set to EVENTUAL_CONSISTENCY to
1399+ enable eventually consistent reads (i.e. reads that may be
1400+ satisfied from an older version of the datastore in some cases).
1401+ The default read policy may have to wait until in-flight
1402+ transactions are committed.
1403+ **kwds: Other keyword arguments as long as they are supported by
1404+ datastore_rpc.Configuration().
1405+
1406+ Returns:
1407+ A datastore_rpc.Configuration instance.
1408+ """
1409+ return datastore_rpc.Configuration(**kwds)
1410+
1411+
1412+def _Rpc2Config(rpc):
1413+ """Internal helper to construct a Configuration from a UserRPC object.
1414+
1415+ If the argument is a UserRPC object, it returns a Configuration
1416+ object constructed using the same deadline and read_policy;
1417+ otherwise it returns the argument unchanged.
1418+
1419+ NOTE: If the argument is a UserRPC object, its callback is *not*
1420+ transferred to the Configuration object; the Configuration's
1421+ on_completion attribute is set to None. This is done because (a)
1422+ the signature of on_completion differs from the callback signature;
1423+ (b) the caller probably doesn't expect the callback to be called
1424+ more than once; and (c) the callback, being argument-less, wouldn't
1425+ know which UserRPC object was actually completing. But yes,
1426+ technically, this is a backwards incompatibility.
1427+
1428+ Args:
1429+ rpc: None, a UserRPC object, or a datastore_rpc.Configuration object.
1430+
1431+ Returns:
1432+ None or a datastore_rpc.Configuration object.
1433+ """
1434+ if rpc is None or isinstance(rpc, datastore_rpc.Configuration):
1435+ return rpc
1436+ read_policy = getattr(rpc, 'read_policy', None)
1437+ return datastore_rpc.Configuration(deadline=rpc.deadline,
1438+ read_policy=read_policy,
1439+ config=_GetConnection().config)
1440
1441
1442 def Put(entities, **kwargs):
1443@@ -261,7 +361,7 @@
1444
1445 Args:
1446 entities: Entity or list of Entities
1447- rpc: datastore.RPC to use for this request.
1448+ config: Optional Configuration to use for this request.
1449
1450 Returns:
1451 Key or list of Keys
1452@@ -269,7 +369,10 @@
1453 Raises:
1454 TransactionFailedError, if the Put could not be committed.
1455 """
1456- rpc = GetRpcFromKwargs(kwargs)
1457+ config = _GetConfigFromKwargs(kwargs)
1458+ if getattr(config, 'read_policy', None) == EVENTUAL_CONSISTENCY:
1459+ raise datastore_errors.BadRequestError(
1460+ 'read_policy is only supported on read operations.')
1461 entities, multiple = NormalizeAndTypeCheck(entities, Entity)
1462
1463 if multiple and not entities:
1464@@ -280,36 +383,25 @@
1465 raise datastore_errors.BadRequestError(
1466 'App and kind must not be empty, in entity: %s' % entity)
1467
1468- req = datastore_pb.PutRequest()
1469- req.entity_list().extend([e._ToPb() for e in entities])
1470-
1471- keys = [e.key() for e in entities]
1472- tx = _MaybeSetupTransaction(req, keys)
1473-
1474- try:
1475- resp = _MakeSyncCall(
1476- 'datastore_v3', 'Put', req, datastore_pb.PutResponse(), rpc)
1477- except apiproxy_errors.ApplicationError, err:
1478- raise _ToDatastoreError(err)
1479-
1480- keys = resp.key_list()
1481- num_keys = len(keys)
1482- num_entities = len(entities)
1483- if num_keys != num_entities:
1484- raise datastore_errors.InternalError(
1485- 'Put accepted %d entities but returned %d keys.' %
1486- (num_entities, num_keys))
1487-
1488- for entity, key in zip(entities, keys):
1489- entity._Entity__key._Key__reference.CopyFrom(key)
1490-
1491- if tx:
1492- tx.entity_group = entities[0].entity_group()
1493-
1494- if multiple:
1495- return [Key._FromPb(k) for k in keys]
1496- else:
1497- return Key._FromPb(resp.key(0))
1498+ def extra_hook(keys):
1499+ num_keys = len(keys)
1500+ num_entities = len(entities)
1501+ if num_keys != num_entities:
1502+ raise datastore_errors.InternalError(
1503+ 'Put accepted %d entities but returned %d keys.' %
1504+ (num_entities, num_keys))
1505+
1506+ for entity, key in zip(entities, keys):
1507+ if entity._Entity__key._Key__reference != key._Key__reference:
1508+ assert not entity._Entity__key.has_id_or_name()
1509+ entity._Entity__key._Key__reference.CopyFrom(key._Key__reference)
1510+
1511+ if multiple:
1512+ return keys
1513+ else:
1514+ return keys[0]
1515+
1516+ return _GetConnection().async_put(config, entities, extra_hook).get_result()
1517
1518
1519 def Get(keys, **kwargs):
1520@@ -329,39 +421,26 @@
1521 Args:
1522 # the primary key(s) of the entity(ies) to retrieve
1523 keys: Key or string or list of Keys or strings
1524- rpc: datastore.RPC to use for this request.
1525+ config: Optional Configuration to use for this request.
1526
1527 Returns:
1528 Entity or list of Entity objects
1529 """
1530- rpc = GetRpcFromKwargs(kwargs)
1531+ config = _GetConfigFromKwargs(kwargs)
1532 keys, multiple = NormalizeAndTypeCheckKeys(keys)
1533
1534 if multiple and not keys:
1535 return []
1536- req = datastore_pb.GetRequest()
1537- req.key_list().extend([key._Key__reference for key in keys])
1538- _MaybeSetupTransaction(req, keys)
1539-
1540- try:
1541- resp = _MakeSyncCall(
1542- 'datastore_v3', 'Get', req, datastore_pb.GetResponse(), rpc)
1543- except apiproxy_errors.ApplicationError, err:
1544- raise _ToDatastoreError(err)
1545-
1546- entities = []
1547- for group in resp.entity_list():
1548- if group.has_entity():
1549- entities.append(Entity._FromPb(group.entity()))
1550+
1551+ def extra_hook(entities):
1552+ if multiple:
1553+ return entities
1554 else:
1555- entities.append(None)
1556+ if entities[0] is None:
1557+ raise datastore_errors.EntityNotFoundError()
1558+ return entities[0]
1559
1560- if multiple:
1561- return entities
1562- else:
1563- if entities[0] is None:
1564- raise datastore_errors.EntityNotFoundError()
1565- return entities[0]
1566+ return _GetConnection().async_get(config, keys, extra_hook).get_result()
1567
1568
1569 def Delete(keys, **kwargs):
1570@@ -374,27 +453,21 @@
1571 Args:
1572 # the primary key(s) of the entity(ies) to delete
1573 keys: Key or string or list of Keys or strings
1574- rpc: datastore.RPC to use for this request.
1575+ config: Optional Configuration to use for this request.
1576
1577 Raises:
1578 TransactionFailedError, if the Delete could not be committed.
1579 """
1580- rpc = GetRpcFromKwargs(kwargs)
1581+ config = _GetConfigFromKwargs(kwargs)
1582+ if getattr(config, 'read_policy', None) == EVENTUAL_CONSISTENCY:
1583+ raise datastore_errors.BadRequestError(
1584+ 'read_policy is only supported on read operations.')
1585 keys, multiple = NormalizeAndTypeCheckKeys(keys)
1586
1587 if multiple and not keys:
1588 return
1589
1590- req = datastore_pb.DeleteRequest()
1591- req.key_list().extend([key._Key__reference for key in keys])
1592-
1593- tx = _MaybeSetupTransaction(req, keys)
1594-
1595- try:
1596- _MakeSyncCall(
1597- 'datastore_v3', 'Delete', req, datastore_pb.DeleteResponse(), rpc)
1598- except apiproxy_errors.ApplicationError, err:
1599- raise _ToDatastoreError(err)
1600+ _GetConnection().async_delete(config, keys).get_result()
1601
1602
1603 class Entity(dict):
1604@@ -704,7 +777,7 @@
1605 return pb
1606
1607 @staticmethod
1608- def FromPb(pb):
1609+ def FromPb(pb, validate_reserved_properties=True):
1610 """Static factory method. Returns the Entity representation of the
1611 given protocol buffer (datastore_pb.Entity).
1612
1613@@ -719,10 +792,12 @@
1614 real_pb.ParseFromString(pb)
1615 pb = real_pb
1616
1617- return Entity._FromPb(pb, require_valid_key=False)
1618+ return Entity._FromPb(
1619+ pb, require_valid_key=False,
1620+ validate_reserved_properties=validate_reserved_properties)
1621
1622 @staticmethod
1623- def _FromPb(pb, require_valid_key=True):
1624+ def _FromPb(pb, require_valid_key=True, validate_reserved_properties=True):
1625 """Static factory method. Returns the Entity representation of the
1626 given protocol buffer (datastore_pb.Entity). Not intended to be used by
1627 application developers.
1628@@ -790,7 +865,8 @@
1629 for name, value in temporary_values.iteritems():
1630 decoded_name = unicode(name.decode('utf-8'))
1631
1632- datastore_types.ValidateReadProperty(decoded_name, value)
1633+ datastore_types.ValidateReadProperty(
1634+ decoded_name, value, read_only=(not validate_reserved_properties))
1635
1636 dict.__setitem__(e, decoded_name, value)
1637
1638@@ -874,33 +950,29 @@
1639 the query. The returned count is cached; successive Count() calls will not
1640 re-scan the datastore unless the query is changed.
1641 """
1642- ASCENDING = datastore_pb.Query_Order.ASCENDING
1643- DESCENDING = datastore_pb.Query_Order.DESCENDING
1644-
1645- ORDER_FIRST = datastore_pb.Query.ORDER_FIRST
1646- ANCESTOR_FIRST = datastore_pb.Query.ANCESTOR_FIRST
1647- FILTER_FIRST = datastore_pb.Query.FILTER_FIRST
1648-
1649- OPERATORS = {'<': datastore_pb.Query_Filter.LESS_THAN,
1650- '<=': datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL,
1651- '>': datastore_pb.Query_Filter.GREATER_THAN,
1652- '>=': datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL,
1653- '=': datastore_pb.Query_Filter.EQUAL,
1654- '==': datastore_pb.Query_Filter.EQUAL,
1655- }
1656- INEQUALITY_OPERATORS = frozenset(['<', '<=', '>', '>='])
1657+ ASCENDING = datastore_query.PropertyOrder.ASCENDING
1658+ DESCENDING = datastore_query.PropertyOrder.DESCENDING
1659+
1660+ ORDER_FIRST = datastore_query.QueryOptions.ORDER_FIRST
1661+ ANCESTOR_FIRST = datastore_query.QueryOptions.ANCESTOR_FIRST
1662+ FILTER_FIRST = datastore_query.QueryOptions.FILTER_FIRST
1663+
1664+ OPERATORS = {'==': datastore_query.PropertyFilter._OPERATORS['=']}
1665+ OPERATORS.update(datastore_query.PropertyFilter._OPERATORS)
1666+
1667+ INEQUALITY_OPERATORS = datastore_query.PropertyFilter._INEQUALITY_OPERATORS
1668+
1669 UPPERBOUND_INEQUALITY_OPERATORS = frozenset(['<', '<='])
1670 FILTER_REGEX = re.compile(
1671- '^\s*([^\s]+)(\s+(%s)\s*)?$' % '|'.join(OPERATORS.keys()),
1672+ '^\s*([^\s]+)(\s+(%s)\s*)?$' % '|'.join(OPERATORS),
1673 re.IGNORECASE | re.UNICODE)
1674
1675 __kind = None
1676 __app = None
1677 __namespace = None
1678 __orderings = None
1679- __cached_count = None
1680 __hint = None
1681- __ancestor = None
1682+ __ancestor_pb = None
1683 __compile = None
1684
1685 __cursor = None
1686@@ -1099,33 +1171,114 @@
1687 # this query
1688 Query
1689 """
1690- self.__ancestor = _GetCompleteKeyOrError(ancestor)
1691+ self.__ancestor_pb = _GetCompleteKeyOrError(ancestor)._ToPb()
1692 return self
1693
1694 def IsKeysOnly(self):
1695 """Returns True if this query is keys only, false otherwise."""
1696 return self.__keys_only
1697
1698- def GetCompiledCursor(self):
1699+ def GetQueryOptions(self):
1700+ """Returns a datastore_query.QueryOptions for the current instance."""
1701+ return datastore_query.QueryOptions(keys_only=self.__keys_only,
1702+ produce_cursors=self.__compile,
1703+ start_cursor=self.__cursor,
1704+ end_cursor=self.__end_cursor,
1705+ hint=self.__hint)
1706+
1707+ def GetQuery(self):
1708+ """Returns a datastore_query.Query for the current instance."""
1709+ return datastore_query.Query(app=self.__app,
1710+ namespace=self.__namespace,
1711+ kind=self.__kind,
1712+ ancestor=self.__ancestor_pb,
1713+ filter_predicate=self.GetFilterPredicate(),
1714+ order=self.GetOrder())
1715+
1716+ def GetOrder(self):
1717+ """Gets a datastore_query.Order for the current instance.
1718+
1719+ Returns:
1720+ datastore_query.Order or None if there are no sort orders set on the
1721+ current Query.
1722+ """
1723+
1724+ orders = [datastore_query.PropertyOrder(property, direction)
1725+ for property, direction in self.__orderings]
1726+ if orders:
1727+ return datastore_query.CompositeOrder(orders)
1728+ return None
1729+
1730+ def GetFilterPredicate(self):
1731+ """Returns a datastore_query.FilterPredicate for the current instance.
1732+
1733+ Returns:
1734+ datastore_query.FilterPredicate or None if no filters are set on the
1735+ current Query.
1736+ """
1737+ ordered_filters = [(i, f) for f, i in self.__filter_order.iteritems()]
1738+ ordered_filters.sort()
1739+
1740+ property_filters = []
1741+ for _, filter_str in ordered_filters:
1742+ if filter_str not in self:
1743+ continue
1744+
1745+ values = self[filter_str]
1746+ match = self._CheckFilter(filter_str, values)
1747+ name = match.group(1)
1748+
1749+ op = match.group(3)
1750+ if op is None or op == '==':
1751+ op = '='
1752+
1753+ property_filters.append(datastore_query.make_filter(name, op, values))
1754+
1755+ if property_filters:
1756+ return datastore_query.CompositeFilter(
1757+ datastore_query.CompositeFilter.AND,
1758+ property_filters)
1759+ return None
1760+
1761+ def GetCursor(self):
1762+ """Get the cursor from the last run of this query.
1763+
1764+ The source of this cursor varies depending on what the last call was:
1765+ - Run: A cursor that points immediately after the last result pulled off
1766+ the returned iterator.
1767+ - Get: A cursor that points immediately after the last result in the
1768+ returned list.
1769+ - Count: A cursor that points immediately after the last result counted.
1770+
1771+ Returns:
1772+ A datastore_query.Cursor object that can be used in subsiquent query
1773+ requests.
1774+ """
1775 try:
1776- compiled_cursor = self.__last_iterator.GetCompiledCursor(self)
1777- if not compiled_cursor:
1778+ cursor = self.__cursor_source()
1779+ if not cursor:
1780 raise AttributeError()
1781 except AttributeError:
1782 raise AssertionError('No cursor available, either this query has not '
1783 'been executed or there is no compilation '
1784 'available for this kind of query')
1785- return compiled_cursor
1786-
1787- def GetCompiledQuery(self):
1788- try:
1789- if not self.__compiled_query:
1790- raise AttributeError()
1791- except AttributeError:
1792- raise AssertionError('No compiled query available, either this query has '
1793- 'not been executed or there is no compilation '
1794- 'available for this kind of query')
1795- return self.__compiled_query
1796+ return cursor
1797+
1798+ def GetBatcher(self, config=None):
1799+ """Runs this query and returns a datastore_query.Batcher.
1800+
1801+ This is not intended to be used by application developers. Use Get()
1802+ instead!
1803+
1804+ Args:
1805+ config: Optional Configuration to use for this request.
1806+
1807+ Returns:
1808+ # an iterator that provides access to the query results
1809+ Iterator
1810+ """
1811+ query_options = self.GetQueryOptions().merge(config)
1812+ return self.GetQuery().run(_GetConnection(), query_options)
1813
1814 def Run(self, **kwargs):
1815 """Runs this query.
1816@@ -1142,80 +1295,17 @@
1817 offset: integer, offset for the query.
1818 prefetch_count: integer, number of results to return in the first query.
1819 next_count: number of results to return in subsequent next queries.
1820- rpc: datastore.RPC to use for this request.
1821-
1822- Returns:
1823- # an iterator that provides access to the query results
1824- Iterator
1825- """
1826- return self._Run(**kwargs)
1827-
1828- def _Run(self, limit=None, offset=None,
1829- prefetch_count=None, next_count=None, **kwargs):
1830- """Runs this query, with an optional result limit and an optional offset.
1831-
1832- Identical to Run, with the extra optional limit, offset, prefetch_count,
1833- next_count parameters. These parameters must be integers >= 0.
1834-
1835- This is not intended to be used by application developers. Use Get()
1836- instead!
1837-
1838- Args:
1839- limit: integer, limit for the query.
1840- offset: integer, offset for the query.
1841- prefetch_count: integer, number of results to return in the first query.
1842- next_count: number of results to return in subsequent next queries.
1843- rpc: datastore.RPC to use for this request.
1844-
1845- Returns:
1846- # an iterator that provides access to the query results
1847- Iterator
1848- """
1849- rpc = GetRpcFromKwargs(kwargs)
1850- self.__last_iterator, self.__compiled_query = Query._RunInternal(
1851- self._ToPb(limit, offset, prefetch_count),
1852- next_count=next_count,
1853- rpc=rpc)
1854-
1855- return self.__last_iterator
1856-
1857- @staticmethod
1858- def _RunInternal(request, next_count=None, rpc=None):
1859- """Runs the given request and wraps the result in an iterator.
1860-
1861- Args:
1862- request: datastore_pb.query, the request to run.
1863- next_count: number of results to return in subsequent next queries.
1864- rpc: datastore.RPC to use for this request.
1865-
1866- Returns:
1867- (Iterator, datastore_pb.CompiledQuery), the iterator and compiled query
1868- that result from running the given request.
1869- """
1870-
1871- if rpc:
1872- rpc_clone = rpc.clone()
1873- else:
1874- rpc_clone = None
1875-
1876- try:
1877- result = _MakeSyncCall('datastore_v3', 'RunQuery', request,
1878- datastore_pb.QueryResult(), rpc)
1879- except apiproxy_errors.ApplicationError, err:
1880- try:
1881- raise _ToDatastoreError(err)
1882- except datastore_errors.NeedIndexError, exc:
1883- yaml = datastore_index.IndexYamlForQuery(
1884- *datastore_index.CompositeIndexForQuery(request)[1:-1])
1885- raise datastore_errors.NeedIndexError(
1886- str(exc) + '\nThis query needs this index:\n' + yaml)
1887-
1888- iterator = Iterator(result, query_request_pb=request, batch_size=next_count,
1889- rpc=rpc_clone)
1890- if result.has_compiled_query():
1891- return iterator, result.compiled_query()
1892- else:
1893- return iterator, None
1894+ config: Optional Configuration to use for this request.
1895+
1896+ Returns:
1897+ # an iterator that provides access to the query results
1898+ Iterator
1899+ """
1900+ config = _Rpc2Config(_GetConfigFromKwargs(kwargs))
1901+ itr = Iterator(self.GetBatcher(config=config))
1902+ self.__cursor_source = itr.cursor
1903+ self.__compiled_query_source = itr._compiled_query
1904+ return itr
1905
1906 def Get(self, limit, offset=0, **kwargs):
1907 """Fetches and returns a maximum number of results from the query.
1908@@ -1249,54 +1339,64 @@
1909 int or long
1910 # the number of entities to skip
1911 int or long
1912- rpc: datastore.RPC to use for this request.
1913+ config: Optional Configuration to use for this request. If limit and
1914+ offset are specified in the config, they are ignored.
1915
1916 Returns:
1917 # a list of entities
1918 [Entity, ...]
1919 """
1920- if not isinstance(limit, (int, long)) or limit < 0:
1921- raise datastore_errors.BadArgumentError(
1922- 'Argument to Get named \'limit\' must be an int greater than or '
1923- 'equal to 0; received %s (a %s)' % (limit, typename(limit)))
1924-
1925- if not isinstance(offset, (int, long)) or offset < 0:
1926- raise datastore_errors.BadArgumentError(
1927- 'Argument to Get named \'offset\' must be an int greater than or '
1928- 'equal to 0; received %s (a %s)' % (offset, typename(offset)))
1929-
1930- return self._Run(
1931- limit=limit, offset=offset, prefetch_count=limit, **kwargs)._Get(limit)
1932+ config = _Rpc2Config(_GetConfigFromKwargs(kwargs))
1933+ batcher = self.GetBatcher(datastore_query.QueryOptions(
1934+ config=config, limit=limit, offset=offset, prefetch_size=limit))
1935+
1936+ if limit is None:
1937+ batch = batcher.next_batch(_MAX_INT_32)
1938+ else:
1939+ batch = batcher.next_batch(limit)
1940+ self.__cursor_source = lambda: batch.end_cursor
1941+ self.__compiled_query_source = lambda: batch._compiled_query
1942+ return batch.results
1943
1944 def Count(self, limit=1000, **kwargs):
1945- """Returns the number of entities that this query matches. The returned
1946- count is cached; successive Count() calls will not re-scan the datastore
1947- unless the query is changed.
1948+ """Returns the number of entities that this query matches.
1949
1950 Args:
1951 limit, a number or None. If there are more results than this, stop short
1952 and just return this number. Providing this argument makes the count
1953 operation more efficient.
1954- rpc: datastore.RPC to use for this request.
1955+ config: Optional Configuration to use for this request.
1956
1957 Returns:
1958 The number of results.
1959 """
1960- if not self.__cached_count:
1961- if limit is None:
1962- offset = _MAX_INT_32
1963- else:
1964- offset = limit
1965-
1966- iterator = self._Run(limit=0, offset=offset, **kwargs)
1967- self.__cached_count = iterator._SkippedResults()
1968-
1969- return self.__cached_count
1970+ if limit is None:
1971+ offset = _MAX_INT_32
1972+ else:
1973+ offset = limit
1974+
1975+ config = datastore_query.QueryOptions(
1976+ config=_Rpc2Config(_GetConfigFromKwargs(kwargs)),
1977+ limit=0,
1978+ offset=offset)
1979+
1980+ batch = self.GetBatcher(config=config).next()
1981+ self.__cursor_source = lambda: batch.cursor(0)
1982+ self.__compiled_query_source = lambda: batch._compiled_query
1983+ return batch.skipped_results
1984
1985 def __iter__(self):
1986 raise NotImplementedError(
1987 'Query objects should not be used as iterators. Call Run() first.')
1988
1989+ def __getstate__(self):
1990+ state = self.__dict__.copy()
1991+ if '_Query__cursor_source' in state:
1992+ del state['_Query__cursor_source']
1993+ if '_Query__compiled_query_source' in state:
1994+ del state['_Query__compiled_query_source']
1995+ return state
1996+
1997 def __setitem__(self, filter, value):
1998 """Implements the [] operator. Used to set filters.
1999
2000@@ -1325,8 +1425,6 @@
2001 self.__filter_order[filter] = self.__filter_counter
2002 self.__filter_counter += 1
2003
2004- self.__cached_count = None
2005-
2006 def setdefault(self, filter, value):
2007 """If the filter exists, returns its value. Otherwise sets it to value.
2008
2009@@ -1336,7 +1434,6 @@
2010 """
2011 datastore_types.ValidateProperty(' ', value)
2012 self._CheckFilter(filter, value)
2013- self.__cached_count = None
2014 return dict.setdefault(self, filter, value)
2015
2016 def __delitem__(self, filter):
2017@@ -1344,7 +1441,6 @@
2018 """
2019 dict.__delitem__(self, filter)
2020 del self.__filter_order[filter]
2021- self.__cached_count = None
2022
2023 match = Query.FILTER_REGEX.match(filter)
2024 property = match.group(1)
2025@@ -1447,92 +1543,42 @@
2026
2027 return match
2028
2029+ def _Run(self, limit=None, offset=None,
2030+ prefetch_count=None, next_count=None, **kwargs):
2031+ """Deprecated, use .Run instead."""
2032+ config = _Rpc2Config(_GetConfigFromKwargs(kwargs))
2033+ return self.Run(config=datastore_query.QueryOptions(
2034+ config=config,
2035+ limit=limit,
2036+ offset=offset,
2037+ prefetch_size=prefetch_count,
2038+ batch_size=next_count))
2039+
2040 def _ToPb(self, limit=None, offset=None, count=None):
2041- """Converts this Query to its protocol buffer representation. Not
2042- intended to be used by application developers. Enforced by hiding the
2043- datastore_pb classes.
2044-
2045- Args:
2046- # an upper bound on the number of results returned by the query.
2047- limit: int
2048- # number of results that match the query to skip. limit is applied
2049- # after the offset is fulfilled
2050- offset: int
2051- # the requested initial batch size
2052- count: int
2053-
2054- Returns:
2055- # the PB representation of this Query
2056- datastore_pb.Query
2057-
2058- Raises:
2059- BadRequestError if called inside a transaction and the query does not
2060- include an ancestor.
2061+ query_options = datastore_query.QueryOptions(
2062+ config=self.GetQueryOptions(),
2063+ limit=limit,
2064+ offset=offset,
2065+ batch_size=count)
2066+ return self.GetQuery()._to_pb(_GetConnection(), query_options)
2067+
2068+ def _GetCompiledQuery(self):
2069+ """Returns the internal-only pb representation of the last query run.
2070+
2071+ Do not use.
2072 """
2073-
2074- if not self.__ancestor and IsInTransaction():
2075- raise datastore_errors.BadRequestError(
2076- 'Only ancestor queries are allowed inside transactions.')
2077-
2078- pb = datastore_pb.Query()
2079- _MaybeSetupTransaction(pb, [self.__ancestor])
2080-
2081- if self.__kind is not None:
2082- pb.set_kind(self.__kind.encode('utf-8'))
2083- pb.set_keys_only(bool(self.__keys_only))
2084- if self.__app:
2085- pb.set_app(self.__app.encode('utf-8'))
2086- datastore_types.SetNamespace(pb, self.__namespace)
2087- if self.__compile:
2088- pb.set_compile(True)
2089- if limit is not None:
2090- pb.set_limit(limit)
2091- if offset is not None:
2092- pb.set_offset(offset)
2093- if count is not None:
2094- pb.set_count(count)
2095- if self.__ancestor:
2096- pb.mutable_ancestor().CopyFrom(self.__ancestor._Key__reference)
2097-
2098- if ((self.__hint == self.ORDER_FIRST and self.__orderings) or
2099- (self.__hint == self.ANCESTOR_FIRST and self.__ancestor) or
2100- (self.__hint == self.FILTER_FIRST and len(self) > 0)):
2101- pb.set_hint(self.__hint)
2102-
2103- ordered_filters = [(i, f) for f, i in self.__filter_order.iteritems()]
2104- ordered_filters.sort()
2105-
2106- for i, filter_str in ordered_filters:
2107- if filter_str not in self:
2108- continue
2109-
2110- values = self[filter_str]
2111- match = self._CheckFilter(filter_str, values)
2112- name = match.group(1)
2113-
2114- props = datastore_types.ToPropertyPb(name, values)
2115- if not isinstance(props, list):
2116- props = [props]
2117-
2118- op = match.group(3)
2119- if op is None:
2120- op = '='
2121-
2122- for prop in props:
2123- filter = pb.add_filter()
2124- filter.set_op(self.OPERATORS[op])
2125- filter.add_property().CopyFrom(prop)
2126-
2127- for property, direction in self.__orderings:
2128- order = pb.add_order()
2129- order.set_property(property.encode('utf-8'))
2130- order.set_direction(direction)
2131-
2132- if self.__cursor:
2133- pb.mutable_compiled_cursor().CopyFrom(self.__cursor)
2134- if self.__end_cursor:
2135- pb.mutable_end_compiled_cursor().CopyFrom(self.__end_cursor)
2136- return pb
2137+ try:
2138+ compiled_query = self.__compiled_query_source()
2139+ if not compiled_query:
2140+ raise AttributeError()
2141+ except AttributeError:
2142+ raise AssertionError('No compiled query available, either this query has '
2143+ 'not been executed or there is no compilation '
2144+ 'available for this kind of query')
2145+ return compiled_query
2146+
2147+ GetCompiledQuery = _GetCompiledQuery
2148+ GetCompiledCursor = GetCursor
2149
2150
2151 def AllocateIds(model_key, size=None, **kwargs):
2152@@ -1557,48 +1603,24 @@
2153 in which to allocate IDs
2154 size: integer, number of IDs to allocate.
2155 max: integer, upper bound of the range of IDs to allocate.
2156- rpc: datastore.RPC to use for this request.
2157+ config: Optional Configuration to use for this request.
2158
2159 Returns:
2160 (start, end) of the allocated range, inclusive.
2161 """
2162 max = kwargs.pop('max', None)
2163- rpc = GetRpcFromKwargs(kwargs)
2164+ config = _GetConfigFromKwargs(kwargs)
2165+ if getattr(config, 'read_policy', None) == EVENTUAL_CONSISTENCY:
2166+ raise datastore_errors.BadRequestError(
2167+ 'read_policy is only supported on read operations.')
2168 keys, _ = NormalizeAndTypeCheckKeys(model_key)
2169
2170 if len(keys) > 1:
2171 raise datastore_errors.BadArgumentError(
2172 'Cannot allocate IDs for more than one model key at a time')
2173
2174- req = datastore_pb.AllocateIdsRequest()
2175- if size is not None:
2176- if max is not None:
2177- raise datastore_errors.BadArgumentError(
2178- 'Cannot allocate ids using both size and max')
2179- if size > _MAX_ID_BATCH_SIZE:
2180- raise datastore_errors.BadArgumentError(
2181- 'Cannot allocate more than %s ids at a time; received %s'
2182- % (_MAX_ID_BATCH_SIZE, size))
2183- if size <= 0:
2184- raise datastore_errors.BadArgumentError(
2185- 'Cannot allocate less than 1 id; received %s' % size)
2186- req.set_size(size)
2187- if max:
2188- if max < 0:
2189- raise datastore_errors.BadArgumentError(
2190- 'Cannot allocate a range with a max less than 0 id; received %s' %
2191- size)
2192- req.set_max(max)
2193-
2194- req.mutable_model_key().CopyFrom(keys[0]._ToPb())
2195-
2196- try:
2197- resp = _MakeSyncCall('datastore_v3', 'AllocateIds', req,
2198- datastore_pb.AllocateIdsResponse(), rpc)
2199- except apiproxy_errors.ApplicationError, err:
2200- raise _ToDatastoreError(err)
2201-
2202- return resp.start(), resp.end()
2203+ rpc = _GetConnection().async_allocate_ids(config, keys[0], size, max)
2204+ return rpc.get_result()
2205
2206
2207 class MultiQuery(Query):
2208@@ -1642,17 +1664,17 @@
2209 limit: maximum number of values to return.
2210 offset: offset requested -- if nonzero, this will override the offset in
2211 the original query
2212- rpc: datastore.RPC to use for this request.
2213+ config: Optional Configuration to use for this request.
2214
2215 Returns:
2216 A list of entities with at most "limit" entries (less if the query
2217 completes before reading limit values).
2218 """
2219- rpc = GetRpcFromKwargs(kwargs)
2220+ config = _GetConfigFromKwargs(kwargs)
2221 count = 1
2222 result = []
2223
2224- iterator = self.Run(rpc=rpc)
2225+ iterator = self.Run(config=config)
2226
2227 try:
2228 for i in xrange(offset):
2229@@ -1782,17 +1804,14 @@
2230 Merge sort the results. First create a list of iterators, then walk
2231 though them and yield results in order.
2232 """
2233- rpc = GetRpcFromKwargs(kwargs)
2234+ config = _GetConfigFromKwargs(kwargs)
2235+ config = _Rpc2Config(config)
2236 results = []
2237 count = 1
2238 log_level = logging.DEBUG - 1
2239 for bound_query in self.__bound_queries:
2240 logging.log(log_level, 'Running query #%i' % count)
2241- if rpc:
2242- rpc_clone = rpc.clone()
2243- else:
2244- rpc_clone = None
2245- results.append(bound_query.Run(rpc=rpc_clone))
2246+ results.append(bound_query.Run(config=config))
2247 count += 1
2248
2249 def IterateResults(results):
2250@@ -1852,25 +1871,27 @@
2251 Args:
2252 limit: maximum number of entries to count (for any result > limit, return
2253 limit).
2254- rpc: datastore.RPC to use for this request.
2255+ config: Optional Configuration to use for this request.
2256
2257 Returns:
2258 count of the number of entries returned.
2259 """
2260- rpc = GetRpcFromKwargs(kwargs)
2261+ config = _GetConfigFromKwargs(kwargs)
2262 if limit is None:
2263 count = 0
2264- for i in self.Run(rpc=rpc):
2265+ for _ in self.Run(config=config):
2266 count += 1
2267 return count
2268 else:
2269- return len(self.Get(limit, rpc=rpc))
2270+ return len(self.Get(limit, config=config))
2271
2272- def GetCompiledCursor(self):
2273+ def GetCursor(self):
2274 raise AssertionError('No cursor available for a MultiQuery (queries '
2275 'using "IN" or "!=" operators)')
2276
2277- def GetCompiledQuery(self):
2278+
2279+ def _GetCompiledQuery(self):
2280+ """Internal only, do not use."""
2281 raise AssertionError('No compilation available for a MultiQuery (queries '
2282 'using "IN" or "!=" operators)')
2283
2284@@ -1934,253 +1955,8 @@
2285 def __iter__(self):
2286 return iter(self.__bound_queries)
2287
2288-
2289-
2290-class Iterator(object):
2291- """An iterator over the results of a datastore query.
2292-
2293- Iterators are used to access the results of a Query. An iterator is
2294- obtained by building a Query, then calling Run() on it.
2295-
2296- Iterator implements Python's iterator protocol, so results can be accessed
2297- with the for and in statements:
2298-
2299- > it = Query('Person').Run()
2300- > for person in it:
2301- > print 'Hi, %s!' % person['name']
2302- """
2303- def __init__(self, query_result_pb, batch_size=None, rpc=None,
2304- query_request_pb=None):
2305- """Constructor.
2306-
2307- kwargs gets stored and passed on to Next calls made by this iterator.
2308- """
2309- self.__cursor = query_result_pb.cursor()
2310- self.__keys_only = query_result_pb.keys_only()
2311- self.__batch_size = batch_size
2312- self.__rpc = rpc
2313- self.__skipped_results = 0
2314-
2315- self.__results_since_prev = 0
2316- self.__prev_compiled_cursor = None
2317- self.__next_compiled_cursor = None
2318-
2319- if query_request_pb:
2320- self.__remaining_offset = query_request_pb.offset()
2321- else:
2322- self.__remaining_offset = 0
2323-
2324- if query_request_pb and query_result_pb.has_compiled_cursor():
2325- if query_request_pb.has_compiled_cursor():
2326- self.__next_compiled_cursor = query_request_pb.compiled_cursor()
2327- else:
2328- self.__next_compiled_cursor = datastore_pb.CompiledCursor()
2329- self.__buffer = self._ProcessQueryResult(query_result_pb)
2330- self.__results_since_prev = query_request_pb.offset()
2331- else:
2332- self.__buffer = self._ProcessQueryResult(query_result_pb)
2333-
2334- def _Get(self, count):
2335- """Gets the next count result(s) of the query.
2336-
2337- Not intended to be used by application developers. Use the python
2338- iterator protocol instead.
2339-
2340- This method uses _Next to returns the next entities or keys from the list of
2341- matching results. If the query specified a sort order, results are returned
2342- in that order. Otherwise, the order is undefined.
2343-
2344- The argument, count, specifies the number of results to return. However, the
2345- length of the returned list may be smaller than count. This is the case only
2346- if count is greater than the number of remaining results.
2347-
2348- The results are always returned as a list. If there are no results left,
2349- an empty list is returned.
2350-
2351- Args:
2352- # the number of results to return; must be >= 1
2353- count: int or long
2354-
2355- Returns:
2356- # a list of entities or keys
2357- [Entity or Key, ...]
2358- """
2359- entity_list = self._Next(count)
2360- while len(entity_list) < count and self.__more_results:
2361- entity_list += self._Next(count - len(entity_list))
2362- return entity_list;
2363-
2364- def _Next(self, count=None):
2365- """Returns the next batch of results.
2366-
2367- Not intended to be used by application developers. Use the python
2368- iterator protocol instead.
2369-
2370- Values are returned in the order they are recieved from the datastore.
2371-
2372- If there are values in the internal buffer they are returned, otherwise a
2373- single RPC is run in an attempt to fulfill the request.
2374-
2375- The optional argument, count, specifies the number of results to return.
2376- However, the length of the returned list may be smaller than count. This is
2377- the case if:
2378- - the local buffer has results and count is greater than the number of
2379- results in the buffer.
2380- - count is greater than the number of remaining results
2381- - the size of the remaining results exceeds the RPC buffer limit
2382- Use _Get to ensure all possible entities are retrieved.
2383-
2384- When count is None, if there are items in the local buffer, they are
2385- all returned, otherwise the datastore backend is allowed to decide how many
2386- entities to send.
2387-
2388- The internal buffer is also used by the next() method so it is best not to
2389- mix _Next() and next().
2390-
2391- The results are always returned as a list. If there are results left, at
2392- least one result will be returned in this list. If there are no results
2393- left, an empty list is returned.
2394-
2395- Args:
2396- # the number of results to return; must be >= 1
2397- count: int or long or None
2398-
2399- Returns:
2400- # a list of entities or keys
2401- [Entity or Key, ...]
2402- """
2403- if count is not None and (not isinstance(count, (int, long)) or count < 0):
2404- raise datastore_errors.BadArgumentError(
2405- 'Argument to _Next must be an int greater than or equal to 0; received '
2406- '%s (a %s)' % (count, typename(count)))
2407-
2408- if self.__buffer:
2409- if count is None:
2410- entity_list = self.__buffer
2411- self.__buffer = []
2412- elif count <= len(self.__buffer):
2413- entity_list = self.__buffer[:count]
2414- del self.__buffer[:count]
2415- else:
2416- entity_list = self.__buffer
2417- self.__buffer = []
2418- self.__results_since_prev += len(entity_list)
2419- return entity_list
2420-
2421-
2422- if not self.__more_results:
2423- return []
2424-
2425- req = datastore_pb.NextRequest()
2426- if self.__remaining_offset:
2427- req.set_offset(self.__remaining_offset)
2428- if count is not None:
2429- req.set_count(count)
2430- if self.__next_compiled_cursor:
2431- req.set_compile(True)
2432- req.mutable_cursor().CopyFrom(self.__cursor)
2433- try:
2434- rpc = self.__rpc
2435- if rpc:
2436- self.__rpc = rpc.clone()
2437-
2438- result = _MakeSyncCall('datastore_v3', 'Next', req,
2439- datastore_pb.QueryResult(), rpc)
2440- except apiproxy_errors.ApplicationError, err:
2441- raise _ToDatastoreError(err)
2442-
2443- new_batch = self._ProcessQueryResult(result)
2444- if not self.__has_advanced:
2445- self.__more_results = False
2446- return new_batch
2447-
2448- def _ProcessQueryResult(self, result):
2449- """Returns all results from datastore_pb.QueryResult and updates
2450- self.__more_results
2451-
2452- Not intended to be used by application developers. Use the python
2453- iterator protocol instead.
2454-
2455- The results are always returned as a list. If there are no results left,
2456- an empty list is returned.
2457-
2458- Args:
2459- # the instance of datastore_pb.QueryResult to be stored
2460- result: datastore_pb.QueryResult
2461-
2462- Returns:
2463- # a list of entities or keys
2464- [Entity or Key, ...]
2465- """
2466- if self.__next_compiled_cursor and result.has_compiled_cursor():
2467- self.__prev_compiled_cursor = self.__next_compiled_cursor
2468- self.__next_compiled_cursor = result.compiled_cursor()
2469- self.__results_since_prev = 0
2470-
2471- self.__more_results = result.more_results()
2472- if result.skipped_results():
2473- self.__has_advanced = True
2474- self.__skipped_results += result.skipped_results()
2475- self.__remaining_offset -= result.skipped_results()
2476- else:
2477- self.__has_advanced = result.result_size() > 0
2478-
2479- if self.__keys_only:
2480- return [Key._FromPb(e.key()) for e in result.result_list()]
2481- else:
2482- return [Entity._FromPb(e) for e in result.result_list()]
2483-
2484- def _SkippedResults(self):
2485- self.__PrepBuffer()
2486- return self.__skipped_results
2487-
2488- def GetCompiledCursor(self, query):
2489- if not self.__buffer:
2490- return self.__next_compiled_cursor
2491- elif not self.__results_since_prev:
2492- return self.__prev_compiled_cursor
2493- elif self.__prev_compiled_cursor:
2494- return Query._RunInternal(query._ToPb(limit=0,
2495- offset=self.__results_since_prev),
2496- rpc=self.__rpc)[0].GetCompiledCursor(query)
2497- else:
2498- return None
2499-
2500- def next(self):
2501- self.__PrepBuffer()
2502- try:
2503- result = self.__buffer.pop(0)
2504- except IndexError:
2505- raise StopIteration
2506- self.__results_since_prev += 1
2507- return result
2508-
2509- def __PrepBuffer(self):
2510- """Loads the next set of values into the local buffer if needed."""
2511- while not self.__buffer and self.__more_results:
2512- self.__buffer = self._Next(self.__batch_size)
2513-
2514- def __iter__(self): return self
2515-
2516-class _Transaction(object):
2517- """Encapsulates a transaction currently in progress.
2518-
2519- If we've sent a BeginTransaction call, then handle will be a
2520- datastore_pb.Transaction that holds the transaction handle.
2521-
2522- If we know the entity group for this transaction, it's stored in the
2523- entity_group attribute, which is set by RunInTransaction().
2524-
2525- modified_keys is a set containing the Keys of all entities modified (ie put
2526- or deleted) in this transaction. If an entity is modified more than once, a
2527- BadRequestError is raised.
2528- """
2529- def __init__(self):
2530- """Initializes modified_keys to the empty set."""
2531- self.handle = None
2532- self.entity_group = None
2533- self.modified_keys = None
2534- self.modified_keys = set()
2535+ GetCompiledCursor = GetCursor
2536+ GetCompiledQuery = _GetCompiledQuery
2537
2538
2539 def RunInTransaction(function, *args, **kwargs):
2540@@ -2211,7 +1987,8 @@
2541 Runs the user-provided function inside a full-featured, ACID datastore
2542 transaction. Every Put, Get, and Delete call in the function is made within
2543 the transaction. All entities involved in these calls must belong to the
2544- same entity group. Queries are not supported.
2545+ same entity group. Queries are supported as long as they specify an
2546+ ancestor belonging to the same entity group.
2547
2548 The trailing arguments are passed to the function as positional arguments.
2549 If the function returns a value, that value will be returned by
2550@@ -2260,7 +2037,7 @@
2551 Nested transactions are not supported.
2552
2553 Args:
2554- # number of retries
2555+ # number of retries (not counting the initial try)
2556 retries: integer
2557 # a function to be run inside the transaction
2558 function: callable
2559@@ -2274,142 +2051,78 @@
2560 TransactionFailedError, if the transaction could not be committed.
2561 """
2562
2563- if _CurrentTransactionKey():
2564- raise datastore_errors.BadRequestError(
2565- 'Nested transactions are not supported.')
2566-
2567 if retries < 0:
2568 raise datastore_errors.BadRequestError(
2569 'Number of retries should be non-negative number.')
2570
2571- tx_key = None
2572+ if IsInTransaction():
2573+ raise datastore_errors.BadRequestError(
2574+ 'Nested transactions are not supported.')
2575+
2576+ old_connection = _GetConnection()
2577+ for i in range(0, retries + 1):
2578+ new_connection = old_connection.new_transaction()
2579+ _SetConnection(new_connection)
2580+ try:
2581+ ok, result = _DoOneTry(new_connection, function, args, kwargs)
2582+ if ok:
2583+ return result
2584+ finally:
2585+ _SetConnection(old_connection)
2586+
2587+ raise datastore_errors.TransactionFailedError(
2588+ 'The transaction could not be committed. Please try again.')
2589+
2590+
2591+def _DoOneTry(new_connection, function, args, kwargs):
2592+ """Helper to call a function in a transaction, once.
2593+
2594+ Args:
2595+ new_connection: The new, transactional, connection object.
2596+ function: The function to call.
2597+ args: Tuple of positional arguments.
2598+ kwargs: Dict of keyword arguments.
2599+ """
2600
2601 try:
2602- tx_key = _NewTransactionKey()
2603- tx = _Transaction()
2604- _txes[tx_key] = tx
2605-
2606- for i in range(0, retries + 1):
2607- tx.modified_keys.clear()
2608-
2609- try:
2610- result = function(*args, **kwargs)
2611- except:
2612- original_exception = sys.exc_info()
2613-
2614- if tx.handle:
2615- try:
2616- _MakeSyncCall('datastore_v3', 'Rollback',
2617- tx.handle, api_base_pb.VoidProto())
2618- except:
2619- logging.info('Exception sending Rollback:\n' +
2620- traceback.format_exc())
2621-
2622- type, value, trace = original_exception
2623- if type is datastore_errors.Rollback:
2624- return
2625- else:
2626- raise type, value, trace
2627-
2628- if tx.handle:
2629- try:
2630- _MakeSyncCall('datastore_v3', 'Commit',
2631- tx.handle, datastore_pb.CommitResponse())
2632- except apiproxy_errors.ApplicationError, err:
2633- if (err.application_error ==
2634- datastore_pb.Error.CONCURRENT_TRANSACTION):
2635- logging.warning('Transaction collision for entity group with '
2636- 'key %r. Retrying...', tx.entity_group)
2637- tx.handle = None
2638- tx.entity_group = None
2639- continue
2640- else:
2641- raise _ToDatastoreError(err)
2642-
2643- return result
2644-
2645- raise datastore_errors.TransactionFailedError(
2646- 'The transaction could not be committed. Please try again.')
2647-
2648- finally:
2649- if tx_key in _txes:
2650- del _txes[tx_key]
2651- del tx_key
2652+ result = function(*args, **kwargs)
2653+
2654+ except:
2655+ original_exception = sys.exc_info()
2656+
2657+ try:
2658+ new_connection.rollback()
2659+ except Exception:
2660+ logging.exception('Exception sending Rollback:')
2661+
2662+ type, value, trace = original_exception
2663+ if isinstance(value, datastore_errors.Rollback):
2664+ return True, None
2665+ else:
2666+ raise type, value, trace
2667+
2668+ else:
2669+ if new_connection.commit():
2670+ return True, result
2671+ else:
2672+ logging.warning('Transaction collision. Retrying... %s', '')
2673+ return False, None
2674
2675
2676 def _MaybeSetupTransaction(request, keys):
2677- """Begins a transaction, if necessary, and populates it in the request.
2678-
2679- If we're currently inside a transaction, this records the entity group,
2680- checks that the keys are all in that entity group, creates the transaction
2681- PB, and sends the BeginTransaction. It then populates the transaction handle
2682- in the request.
2683-
2684- Raises BadRequestError if the entity has a different entity group than the
2685- current transaction.
2686+ """Begin a transaction, if necessary, and populate it in the request.
2687+
2688+ This API exists for internal backwards compatibility, primarily with
2689+ api/taskqueue/taskqueue.py.
2690
2691 Args:
2692- request: GetRequest, PutRequest, DeleteRequest, or Query
2693- keys: sequence of Keys
2694+ request: A protobuf with a mutable_transaction() method.
2695+ keys: Unused.
2696
2697 Returns:
2698- _Transaction if we're inside a transaction, otherwise None
2699+ A transaction if we're inside a transaction, otherwise None
2700 """
2701- assert isinstance(request, (datastore_pb.GetRequest, datastore_pb.PutRequest,
2702- datastore_pb.DeleteRequest, datastore_pb.Query,
2703- taskqueue_service_pb.TaskQueueAddRequest,
2704- )), request.__class__
2705- tx_key = None
2706-
2707- try:
2708- tx_key = _CurrentTransactionKey()
2709- if tx_key:
2710- tx = _txes[tx_key]
2711-
2712- groups = [k.entity_group() for k in keys]
2713- if tx.entity_group:
2714- expected_group = tx.entity_group
2715- elif groups:
2716- expected_group = groups[0]
2717- else:
2718- expected_group = None
2719-
2720- for group in groups:
2721- if (group != expected_group or
2722-
2723-
2724-
2725-
2726-
2727-
2728-
2729- (not group.has_id_or_name() and group is not expected_group)):
2730- raise _DifferentEntityGroupError(expected_group, group)
2731-
2732- if not tx.entity_group and group.has_id_or_name():
2733- tx.entity_group = group
2734-
2735- if not tx.handle:
2736- req = datastore_pb.BeginTransactionRequest()
2737- if keys:
2738- req.set_app(keys[0].app())
2739- else:
2740- assert isinstance(request, taskqueue_service_pb.TaskQueueAddRequest)
2741- req.set_app(os.environ['APPLICATION_ID'])
2742- assert req.app()
2743-
2744- tx.handle = _MakeSyncCall('datastore_v3', 'BeginTransaction',
2745- req, datastore_pb.Transaction())
2746-
2747- if not tx.handle.app():
2748- tx.handle.set_app(req.app())
2749-
2750- request.mutable_transaction().CopyFrom(tx.handle)
2751-
2752- return tx
2753-
2754- finally:
2755- del tx_key
2756+ return _GetConnection()._set_request_transaction(request)
2757
2758
2759 def IsInTransaction():
2760@@ -2418,52 +2131,7 @@
2761 Returns:
2762 True if already running in transaction, else False.
2763 """
2764- return bool(_CurrentTransactionKey())
2765-
2766-
2767-def _DifferentEntityGroupError(a, b):
2768- """Raises a BadRequestError that says the given entity groups are different.
2769-
2770- Includes the two entity groups in the message, formatted more clearly and
2771- concisely than repr(Key).
2772-
2773- Args:
2774- a, b are both Keys that represent entity groups.
2775- """
2776- def id_or_name(key):
2777- if key.name():
2778- return 'name=%r' % key.name()
2779- else:
2780- return 'id=%r' % key.id()
2781-
2782- raise datastore_errors.BadRequestError(
2783- 'Cannot operate on different entity groups in a transaction: '
2784- '(kind=%r, %s) and (kind=%r, %s).' % (a.kind(), id_or_name(a),
2785- b.kind(), id_or_name(b)))
2786-
2787-
2788-def _FindTransactionFrameInStack():
2789- """Walks the stack to find a RunInTransaction() call.
2790-
2791- Returns:
2792- # this is the RunInTransactionCustomRetries() frame record, if found
2793- frame record or None
2794- """
2795- frame = sys._getframe()
2796- filename = frame.f_code.co_filename
2797-
2798- frame = frame.f_back.f_back
2799- while frame:
2800- if (frame.f_code.co_filename == filename and
2801- frame.f_code.co_name == 'RunInTransactionCustomRetries'):
2802- return frame
2803- frame = frame.f_back
2804-
2805- return None
2806-
2807-_CurrentTransactionKey = _FindTransactionFrameInStack
2808-
2809-_NewTransactionKey = sys._getframe
2810+ return isinstance(_GetConnection(), datastore_rpc.TransactionalConnection)
2811
2812
2813 def _GetCompleteKeyOrError(arg):
2814@@ -2541,44 +2209,29 @@
2815 dictionary[key] = value
2816
2817
2818-def _ToDatastoreError(err):
2819- """Converts an apiproxy.ApplicationError to an error in datastore_errors.
2820-
2821- Args:
2822- err: apiproxy.ApplicationError
2823-
2824- Returns:
2825- a subclass of datastore_errors.Error
2826- """
2827- return _DatastoreExceptionFromErrorCodeAndDetail(err.application_error,
2828- err.error_detail)
2829-
2830-
2831-def _DatastoreExceptionFromErrorCodeAndDetail(error, detail):
2832- """Converts a datastore_pb.Error into a datastore_errors.Error.
2833-
2834- Args:
2835- error: A member of the datastore_pb.Error enumeration.
2836- detail: A string providing extra details about the error.
2837-
2838- Returns:
2839- A subclass of datastore_errors.Error.
2840- """
2841- exception_class = {
2842- datastore_pb.Error.BAD_REQUEST: datastore_errors.BadRequestError,
2843- datastore_pb.Error.CONCURRENT_TRANSACTION:
2844- datastore_errors.TransactionFailedError,
2845- datastore_pb.Error.INTERNAL_ERROR: datastore_errors.InternalError,
2846- datastore_pb.Error.NEED_INDEX: datastore_errors.NeedIndexError,
2847- datastore_pb.Error.TIMEOUT: datastore_errors.Timeout,
2848- datastore_pb.Error.BIGTABLE_ERROR: datastore_errors.Timeout,
2849- datastore_pb.Error.COMMITTED_BUT_STILL_APPLYING:
2850- datastore_errors.CommittedButStillApplying,
2851- datastore_pb.Error.CAPABILITY_DISABLED:
2852- apiproxy_errors.CapabilityDisabledError,
2853- }.get(error, datastore_errors.Error)
2854-
2855- if detail is None:
2856- return exception_class()
2857- else:
2858- return exception_class(detail)
2859+class Iterator(datastore_query.ResultsIterator):
2860+ """Thin wrapper of datastore_query.ResultsIterator.
2861+
2862+ Deprecated, do not use, only for backwards compatability.
2863+ """
2864+ def _Next(self, count=None):
2865+ if count is None:
2866+ count = 20
2867+ result = []
2868+ for r in self:
2869+ if len(result) >= count:
2870+ break;
2871+ result.append(r)
2872+ return result
2873+
2874+ def GetCompiledCursor(self, query):
2875+ return self.cursor()
2876+
2877+ _Get = _Next
2878+
2879+
2880+DatastoreRPC = apiproxy_stub_map.UserRPC
2881+GetRpcFromKwargs = _GetConfigFromKwargs
2882+_CurrentTransactionKey = IsInTransaction
2883+_ToDatastoreError = datastore_rpc._ToDatastoreError
2884+_DatastoreExceptionFromErrorCodeAndDetail = datastore_rpc._DatastoreExceptionFromErrorCodeAndDetail
2885
2886=== modified file 'AppServer/google/appengine/api/datastore_distributed.py'
2887--- AppServer/google/appengine/api/datastore_distributed.py 2010-12-17 22:47:53 +0000
2888+++ AppServer/google/appengine/api/datastore_distributed.py 2010-12-24 09:11:16 +0000
2889@@ -58,9 +58,9 @@
2890
2891 SSL_DEFAULT_PORT = 8443
2892 try:
2893- __import__('google.appengine.api.labs.taskqueue.taskqueue_service_pb')
2894+ __import__('google.appengine.api.taskqueue.taskqueue_service_pb')
2895 taskqueue_service_pb = sys.modules.get(
2896- 'google.appengine.api.labs.taskqueue.taskqueue_service_pb')
2897+ 'google.appengine.api.taskqueue.taskqueue_service_pb')
2898 except ImportError:
2899 from google.appengine.api.taskqueue import taskqueue_service_pb
2900
2901@@ -528,18 +528,10 @@
2902 def _RemoteSend(self, request, response, method):
2903 tag = self.__app_id
2904 user = users.GetCurrentUser()
2905- APPSCALE_VERSION = '1'
2906- try:
2907- APPSCALE_VERSION = os.environ['APPSCALE_VERSION']
2908- except Exception, e:
2909- logging.info("WARNING: Appscale version secret not set")
2910-
2911 if user != None:
2912 tag += ":" + user.email()
2913 tag += ":" + user.nickname()
2914 tag += ":" + user.auth_domain()
2915- if APPSCALE_VERSION:
2916- tag += ":" + APPSCALE_VERSION
2917 api_request = remote_api_pb.Request()
2918 api_request.set_method(method)
2919 api_request.set_service_name("datastore_v3")
2920
2921=== modified file 'AppServer/google/appengine/api/datastore_file_stub.py'
2922--- AppServer/google/appengine/api/datastore_file_stub.py 2010-11-30 10:37:25 +0000
2923+++ AppServer/google/appengine/api/datastore_file_stub.py 2010-12-24 09:11:16 +0000
2924@@ -42,7 +42,6 @@
2925 import sys
2926 import tempfile
2927 import threading
2928-import warnings
2929
2930 import cPickle as pickle
2931
2932@@ -53,6 +52,7 @@
2933 from google.appengine.api import datastore_errors
2934 from google.appengine.api import datastore_types
2935 from google.appengine.api import users
2936+from google.appengine.api.taskqueue import taskqueue_service_pb
2937 from google.appengine.datastore import datastore_pb
2938 from google.appengine.datastore import datastore_index
2939 from google.appengine.datastore import datastore_stub_util
2940@@ -60,24 +60,12 @@
2941 from google.net.proto import ProtocolBuffer
2942 from google.appengine.datastore import entity_pb
2943
2944-try:
2945- __import__('google.appengine.api.labs.taskqueue.taskqueue_service_pb')
2946- taskqueue_service_pb = sys.modules.get(
2947- 'google.appengine.api.labs.taskqueue.taskqueue_service_pb')
2948-except ImportError:
2949- from google.appengine.api.taskqueue import taskqueue_service_pb
2950
2951 entity_pb.Reference.__hash__ = lambda self: hash(self.Encode())
2952 datastore_pb.Query.__hash__ = lambda self: hash(self.Encode())
2953 datastore_pb.Transaction.__hash__ = lambda self: hash(self.Encode())
2954
2955
2956-_MAXIMUM_RESULTS = 1000
2957-
2958-
2959-_MAX_QUERY_OFFSET = 1000
2960-
2961-
2962 _MAX_QUERY_COMPONENTS = 100
2963
2964
2965@@ -87,9 +75,6 @@
2966 _MAX_ACTIONS_PER_TXN = 5
2967
2968
2969-_CURSOR_CONCAT_STR = '!CURSOR!'
2970-
2971-
2972 class _StoredEntity(object):
2973 """Simple wrapper around an entity stored by the stub.
2974
2975@@ -109,266 +94,8 @@
2976
2977 self.encoded_protobuf = entity.Encode()
2978
2979- self.native = datastore.Entity._FromPb(entity)
2980-
2981-
2982-class _Cursor(object):
2983- """A query cursor.
2984-
2985- Public properties:
2986- cursor: the integer cursor
2987- count: the original total number of results
2988- keys_only: whether the query is keys_only
2989- app: the app for which this cursor was created
2990-
2991- Class attributes:
2992- _next_cursor: the next cursor to allocate
2993- _next_cursor_lock: protects _next_cursor
2994- """
2995- _next_cursor = 1
2996- _next_cursor_lock = threading.Lock()
2997-
2998- def __init__(self, query, results, order_compare_entities):
2999- """Constructor.
3000-
3001- Args:
3002- query: the query request proto
3003- # the query results, in order, such that results[self.offset+1] is
3004- # the next result
3005- results: list of datastore.Entity
3006- order_compare_entities: a __cmp__ function for datastore.Entity that
3007- follows sort order as specified by the query
3008- """
3009-
3010- if query.has_compiled_cursor() and query.compiled_cursor().position_list():
3011- (self.__last_result, inclusive) = self._DecodeCompiledCursor(
3012- query, query.compiled_cursor())
3013- start_cursor_position = _Cursor._GetCursorOffset(results,
3014- self.__last_result,
3015- inclusive,
3016- order_compare_entities)
3017- else:
3018- self.__last_result = None
3019- start_cursor_position = 0
3020-
3021- if query.has_end_compiled_cursor():
3022- (end_cursor_entity, inclusive) = self._DecodeCompiledCursor(
3023- query, query.end_compiled_cursor())
3024- end_cursor_position = _Cursor._GetCursorOffset(results,
3025- end_cursor_entity,
3026- inclusive,
3027- order_compare_entities)
3028- else:
3029- end_cursor_position = len(results)
3030-
3031- results = results[start_cursor_position:end_cursor_position]
3032-
3033- if query.has_limit():
3034- limit = query.limit()
3035- if query.offset():
3036- limit += query.offset()
3037- if limit > 0 and limit < len(results):
3038- results = results[:limit]
3039-
3040- self.__results = results
3041- self.__query = query
3042- self.__offset = 0
3043-
3044- self.app = query.app()
3045- self.keys_only = query.keys_only()
3046- self.count = len(self.__results)
3047- self.cursor = self._AcquireCursorID()
3048-
3049- def _AcquireCursorID(self):
3050- """Acquires the next cursor id in a thread safe manner.
3051- """
3052- self._next_cursor_lock.acquire()
3053- try:
3054- cursor_id = _Cursor._next_cursor
3055- _Cursor._next_cursor += 1
3056- finally:
3057- self._next_cursor_lock.release()
3058- return cursor_id
3059-
3060- @staticmethod
3061- def _GetCursorOffset(results, cursor_entity, inclusive, compare):
3062- """Converts a cursor entity into a offset into the result set even if the
3063- cursor_entity no longer exists.
3064-
3065- Args:
3066- cursor_entity: the decoded datastore.Entity from the compiled query
3067- inclusive: boolean that specifies if to offset past the cursor_entity
3068- compare: a function that takes two datastore.Entity and compares them
3069- Returns:
3070- the integer offset
3071- """
3072- lo = 0
3073- hi = len(results)
3074- if inclusive:
3075- while lo < hi:
3076- mid = (lo + hi) // 2
3077- if compare(results[mid], cursor_entity) < 0:
3078- lo = mid + 1
3079- else:
3080- hi = mid
3081- else:
3082- while lo < hi:
3083- mid = (lo + hi) // 2
3084- if compare(cursor_entity, results[mid]) < 0:
3085- hi = mid
3086- else:
3087- lo = mid + 1
3088- return lo
3089-
3090- def _ValidateQuery(self, query, query_info):
3091- """Ensure that the given query matches the query_info.
3092-
3093- Args:
3094- query: datastore_pb.Query instance we are chacking
3095- query_info: datastore_pb.Query instance we want to match
3096-
3097- Raises BadRequestError on failure.
3098- """
3099- error_msg = 'Cursor does not match query: %s'
3100- exc = datastore_errors.BadRequestError
3101- if query_info.filter_list() != query.filter_list():
3102- raise exc(error_msg % 'filters do not match')
3103- if query_info.order_list() != query.order_list():
3104- raise exc(error_msg % 'orders do not match')
3105-
3106- for attr in ('ancestor', 'kind', 'name_space', 'search_query'):
3107- query_info_has_attr = getattr(query_info, 'has_%s' % attr)
3108- query_info_attr = getattr(query_info, attr)
3109- query_has_attr = getattr(query, 'has_%s' % attr)
3110- query_attr = getattr(query, attr)
3111- if query_info_has_attr():
3112- if not query_has_attr() or query_info_attr() != query_attr():
3113- raise exc(error_msg % ('%s does not match' % attr))
3114- elif query_has_attr():
3115- raise exc(error_msg % ('%s does not match' % attr))
3116-
3117- def _MinimalQueryInfo(self, query):
3118- """Extract the minimal set of information for query matching.
3119-
3120- Args:
3121- query: datastore_pb.Query instance from which to extract info.
3122-
3123- Returns:
3124- datastore_pb.Query instance suitable for matching against when
3125- validating cursors.
3126- """
3127- query_info = datastore_pb.Query()
3128- query_info.set_app(query.app())
3129-
3130- for filter in query.filter_list():
3131- query_info.filter_list().append(filter)
3132- for order in query.order_list():
3133- query_info.order_list().append(order)
3134-
3135- if query.has_ancestor():
3136- query_info.mutable_ancestor().CopyFrom(query.ancestor())
3137-
3138- for attr in ('kind', 'name_space', 'search_query'):
3139- query_has_attr = getattr(query, 'has_%s' % attr)
3140- query_attr = getattr(query, attr)
3141- query_info_set_attr = getattr(query_info, 'set_%s' % attr)
3142- if query_has_attr():
3143- query_info_set_attr(query_attr())
3144-
3145- return query_info
3146-
3147- def _MinimalEntityInfo(self, entity_proto, query):
3148- """Extract the minimal set of information that preserves entity order.
3149-
3150- Args:
3151- entity_proto: datastore_pb.EntityProto instance from which to extract
3152- information
3153- query: datastore_pb.Query instance for which ordering must be preserved.
3154-
3155- Returns:
3156- datastore_pb.EntityProto instance suitable for matching against a list of
3157- results when finding cursor positions.
3158- """
3159- entity_info = datastore_pb.EntityProto();
3160- order_names = [o.property() for o in query.order_list()]
3161- entity_info.mutable_key().MergeFrom(entity_proto.key())
3162- entity_info.mutable_entity_group().MergeFrom(entity_proto.entity_group())
3163- for prop in entity_proto.property_list():
3164- if prop.name() in order_names:
3165- entity_info.add_property().MergeFrom(prop)
3166- return entity_info;
3167-
3168- def _DecodeCompiledCursor(self, query, compiled_cursor):
3169- """Converts a compiled_cursor into a cursor_entity.
3170-
3171- Returns:
3172- (cursor_entity, inclusive): a datastore.Entity and if it should be
3173- included in the result set.
3174- """
3175- assert len(compiled_cursor.position_list()) == 1
3176-
3177- position = compiled_cursor.position(0)
3178- entity_pb = datastore_pb.EntityProto()
3179- (query_info_encoded, entity_encoded) = position.start_key().split(
3180- _CURSOR_CONCAT_STR, 1)
3181- query_info_pb = datastore_pb.Query()
3182- query_info_pb.ParseFromString(query_info_encoded)
3183- self._ValidateQuery(query, query_info_pb)
3184-
3185- entity_pb.ParseFromString(entity_encoded)
3186- return (datastore.Entity._FromPb(entity_pb, True),
3187- position.start_inclusive())
3188-
3189- def _EncodeCompiledCursor(self, query, compiled_cursor):
3190- """Converts the current state of the cursor into a compiled_cursor
3191-
3192- Args:
3193- query: the datastore_pb.Query this cursor is related to
3194- compiled_cursor: an empty datstore_pb.CompiledCursor
3195- """
3196- if self.__last_result is not None:
3197- position = compiled_cursor.add_position()
3198- query_info = self._MinimalQueryInfo(query)
3199- entity_info = self._MinimalEntityInfo(self.__last_result.ToPb(), query)
3200- start_key = _CURSOR_CONCAT_STR.join((
3201- query_info.Encode(),
3202- entity_info.Encode()))
3203- position.set_start_key(str(start_key))
3204- position.set_start_inclusive(False)
3205-
3206- def PopulateQueryResult(self, result, count, offset, compile=False):
3207- """Populates a QueryResult with this cursor and the given number of results.
3208-
3209- Args:
3210- result: datastore_pb.QueryResult
3211- count: integer of how many results to return
3212- offset: integer of how many results to skip
3213- compile: boolean, whether we are compiling this query
3214- """
3215- offset = min(offset, self.count - self.__offset)
3216- limited_offset = min(offset, _MAX_QUERY_OFFSET)
3217- if limited_offset:
3218- self.__offset += limited_offset
3219- result.set_skipped_results(limited_offset)
3220-
3221- if offset == limited_offset and count:
3222- if count > _MAXIMUM_RESULTS:
3223- count = _MAXIMUM_RESULTS
3224- results = self.__results[self.__offset:self.__offset + count]
3225- count = len(results)
3226- self.__offset += count
3227- result.result_list().extend(r._ToPb() for r in results)
3228-
3229- if self.__offset:
3230- self.__last_result = self.__results[self.__offset - 1]
3231-
3232- result.mutable_cursor().set_app(self.app)
3233- result.mutable_cursor().set_cursor(self.cursor)
3234- result.set_keys_only(self.keys_only)
3235- result.set_more_results(self.__offset < self.count)
3236- if compile:
3237- self._EncodeCompiledCursor(
3238- self.__query, result.mutable_compiled_cursor())
3239+ self.native = datastore.Entity._FromPb(entity,
3240+ validate_reserved_properties=False)
3241
3242
3243 class KindPseudoKind(object):
3244@@ -381,10 +108,48 @@
3245 """
3246 name = '__kind__'
3247
3248+ def Query(self, entities, query, filters, orders):
3249+ """Perform a query on this pseudo-kind.
3250+
3251+ Args:
3252+ entities: all the app's entities.
3253+ query: the original datastore_pb.Query.
3254+ filters: the filters from query.
3255+ orders: the orders from query.
3256+
3257+ Returns:
3258+ (results, remaining_filters, remaining_orders)
3259+ results is a list of datastore.Entity
3260+ remaining_filters and remaining_orders are the filters and orders that
3261+ should be applied in memory
3262+ """
3263+ kind_range = datastore_stub_util.ParseKindQuery(query, filters, orders)
3264+ app_namespace_str = datastore_types.EncodeAppIdNamespace(
3265+ query.app(), query.name_space())
3266+ kinds = []
3267+
3268+ for app_namespace, kind in entities:
3269+ if app_namespace != app_namespace_str: continue
3270+ if not kind_range.Contains(kind): continue
3271+ kinds.append(datastore.Entity(self.name, name=kind))
3272+
3273+ return (kinds, [], [])
3274+
3275+
3276+class PropertyPseudoKind(object):
3277+ """Pseudo-kind for schema queries.
3278+
3279+ Provides a Query method to perform the actual query.
3280+
3281+ Public properties:
3282+ name: the pseudo-kind name
3283+ """
3284+ name = '__property__'
3285+
3286 def __init__(self, filestub):
3287 """Constructor.
3288
3289- Initializes a __kind__ pseudo-kind definition.
3290+ Initializes a __property__ pseudo-kind definition.
3291
3292 Args:
3293 filestub: the DatastoreFileStub instance being served by this
3294@@ -396,10 +161,10 @@
3295 """Perform a query on this pseudo-kind.
3296
3297 Args:
3298- entities: all the app's entities
3299- query: the original datastore_pb.Query
3300- filters: the filters from query
3301- orders: the orders from query
3302+ entities: all the app's entities.
3303+ query: the original datastore_pb.Query.
3304+ filters: the filters from query.
3305+ orders: the orders from query.
3306
3307 Returns:
3308 (results, remaining_filters, remaining_orders)
3309@@ -407,63 +172,69 @@
3310 remaining_filters and remaining_orders are the filters and orders that
3311 should be applied in memory
3312 """
3313- start_kind, start_inclusive, end_kind, end_inclusive = (
3314- datastore_stub_util.ParseKindQuery(query, filters, orders))
3315- keys_only = query.keys_only()
3316- app_str = query.app()
3317- namespace_str = query.name_space()
3318- keys_only = query.keys_only()
3319- app_namespace_str = datastore_types.EncodeAppIdNamespace(app_str,
3320- namespace_str)
3321- kinds = []
3322+ property_range = datastore_stub_util.ParsePropertyQuery(query, filters,
3323+ orders)
3324+ keys_only = query.keys_only()
3325+ app_namespace_str = datastore_types.EncodeAppIdNamespace(
3326+ query.app(), query.name_space())
3327+
3328+ properties = []
3329 if keys_only:
3330- usekey = '__kind__keys'
3331+ usekey = '__property__keys'
3332 else:
3333- usekey = '__kind__'
3334+ usekey = '__property__'
3335
3336 for app_namespace, kind in entities:
3337 if app_namespace != app_namespace_str: continue
3338- if start_kind is not None:
3339- if start_inclusive and kind < start_kind: continue
3340- if not start_inclusive and kind <= start_kind: continue
3341- if end_kind is not None:
3342- if end_inclusive and kind > end_kind: continue
3343- if not end_inclusive and kind >= end_kind: continue
3344+
3345+ (start_cmp, end_cmp) = property_range.MapExtremes(
3346+ lambda extreme, inclusive, is_end: cmp(kind, extreme[0]))
3347+ if not((start_cmp is None or start_cmp >= 0) and
3348+ (end_cmp is None or end_cmp <= 0)):
3349+ continue
3350
3351 app_kind = (app_namespace_str, kind)
3352
3353- kind_e = self.filestub._GetSchemaCache(app_kind, usekey)
3354- if not kind_e:
3355- kind_e = datastore.Entity(self.name, name=kind)
3356-
3357- if not keys_only:
3358- props = {}
3359-
3360- for entity in entities[app_kind].values():
3361- for prop in entity.protobuf.property_list():
3362- prop_name = prop.name()
3363- if prop_name not in props:
3364- props[prop_name] = set()
3365- cls = entity.native[prop_name].__class__
3366- tag = self.filestub._PROPERTY_TYPE_TAGS.get(cls)
3367- props[prop_name].add(tag)
3368-
3369- properties = []
3370- types = []
3371- for name in sorted(props):
3372- for tag in sorted(props[name]):
3373- properties.append(name)
3374- types.append(tag)
3375- if properties:
3376- kind_e['property'] = properties
3377- if types:
3378- kind_e['representation'] = types
3379-
3380- self.filestub._SetSchemaCache(app_kind, usekey, kind_e)
3381-
3382- kinds.append(kind_e)
3383-
3384- return (kinds, [], [])
3385+ kind_properties = self.filestub._GetSchemaCache(app_kind, usekey)
3386+ if not kind_properties:
3387+ kind_properties = []
3388+ kind_key = datastore_types.Key.from_path(KindPseudoKind.name, kind)
3389+ props = {}
3390+
3391+ for entity in entities[app_kind].values():
3392+ for prop in entity.protobuf.property_list():
3393+ prop_name = prop.name()
3394+ if (prop_name in
3395+ datastore_stub_util.GetInvisibleSpecialPropertyNames()):
3396+ continue
3397+ if prop_name not in props:
3398+ props[prop_name] = set()
3399+ native_value = entity.native[prop_name]
3400+ if not isinstance(native_value, list):
3401+ native_value = [native_value]
3402+ for value in native_value:
3403+ tag = self.filestub._PROPERTY_TYPE_TAGS.get(value.__class__)
3404+ if tag is not None:
3405+ props[prop_name].add(tag)
3406+ else:
3407+ logging.warning('Unexpected value of class %s in datastore', value.__class__)
3408+
3409+ for prop in sorted(props):
3410+ property_e = datastore.Entity(self.name, name=prop, parent=kind_key)
3411+ kind_properties.append(property_e)
3412+
3413+ if not keys_only and props[prop]:
3414+ property_e['property_representation'] = [
3415+ datastore_stub_util._PROPERTY_TYPE_NAMES[tag]
3416+ for tag in sorted(props[prop])]
3417+
3418+ self.filestub._SetSchemaCache(app_kind, usekey, kind_properties)
3419+
3420+ def InQuery(property_e):
3421+ return property_range.Contains((kind, property_e.key().name()))
3422+ properties += filter(InQuery, kind_properties)
3423+
3424+ return (properties, [], [])
3425
3426
3427 class NamespacePseudoKind(object):
3428@@ -476,25 +247,14 @@
3429 """
3430 name = '__namespace__'
3431
3432- def __init__(self, filestub):
3433- """Constructor.
3434-
3435- Initializes a __namespace__ pseudo-kind definition.
3436-
3437- Args:
3438- filestub: the DatastoreFileStub instance being served by this
3439- pseudo-kind.
3440- """
3441- self.filestub = filestub
3442-
3443 def Query(self, entities, query, filters, orders):
3444 """Perform a query on this pseudo-kind.
3445
3446 Args:
3447- entities: all the app's entities
3448- query: the original datastore_pb.Query
3449- filters: the filters from query
3450- orders: the orders from query
3451+ entities: all the app's entities.
3452+ query: the original datastore_pb.Query.
3453+ filters: the filters from query.
3454+ orders: the orders from query.
3455
3456 Returns:
3457 (results, remaining_filters, remaining_orders)
3458@@ -502,24 +262,16 @@
3459 remaining_filters and remaining_orders are the filters and orders that
3460 should be applied in memory
3461 """
3462- start_namespace, start_inclusive, end_namespace, end_inclusive = (
3463- datastore_stub_util.ParseNamespaceQuery(query, filters, orders))
3464+ namespace_range = datastore_stub_util.ParseNamespaceQuery(query, filters,
3465+ orders)
3466 app_str = query.app()
3467
3468 namespaces = set()
3469
3470 for app_namespace, kind in entities:
3471 (app_id, namespace) = datastore_types.DecodeAppIdNamespace(app_namespace)
3472- if app_id != app_str: continue
3473-
3474- if start_namespace is not None:
3475- if start_inclusive and namespace < start_namespace: continue
3476- if not start_inclusive and namespace <= start_namespace: continue
3477- if end_namespace is not None:
3478- if end_inclusive and namespace > end_namespace: continue
3479- if not end_inclusive and namespace >= end_namespace: continue
3480-
3481- namespaces.add(namespace)
3482+ if app_id == app_str and namespace_range.Contains(namespace):
3483+ namespaces.add(namespace)
3484
3485 namespace_entities = []
3486 for namespace in namespaces:
3487@@ -558,6 +310,8 @@
3488 datastore_types.PostalAddress: entity_pb.PropertyValue.kstringValue,
3489 datastore_types.Rating: entity_pb.PropertyValue.kint64Value,
3490 str: entity_pb.PropertyValue.kstringValue,
3491+ datastore_types.ByteString: entity_pb.PropertyValue.kstringValue,
3492+ datastore_types.BlobKey: entity_pb.PropertyValue.kstringValue,
3493 datastore_types.Text: entity_pb.PropertyValue.kstringValue,
3494 type(None): 0,
3495 unicode: entity_pb.PropertyValue.kstringValue,
3496@@ -635,8 +389,9 @@
3497 self.__indexes_lock = threading.Lock()
3498
3499 self.__pseudo_kinds = {}
3500- self._RegisterPseudoKind(KindPseudoKind(self))
3501- self._RegisterPseudoKind(NamespacePseudoKind(self))
3502+ self._RegisterPseudoKind(KindPseudoKind())
3503+ self._RegisterPseudoKind(PropertyPseudoKind(self))
3504+ self._RegisterPseudoKind(NamespacePseudoKind())
3505
3506 self.Read()
3507
3508@@ -826,14 +581,14 @@
3509
3510 return []
3511
3512- def __WritePickled(self, obj, filename, openfile=file):
3513+ def __WritePickled(self, obj, filename):
3514 """Pickles the object and writes it to the given file.
3515 """
3516 if not filename or filename == '/dev/null' or not obj:
3517 return
3518
3519 descriptor, tmp_filename = tempfile.mkstemp(dir=os.path.dirname(filename))
3520- tmpfile = openfile(tmp_filename, 'wb')
3521+ tmpfile = os.fdopen(descriptor, 'wb')
3522 pickler = pickle.Pickler(tmpfile, protocol=1)
3523 pickler.fast = True
3524 pickler.dump(obj)
3525@@ -921,6 +676,8 @@
3526 assert (clone.has_entity_group() and
3527 clone.entity_group().element_size() > 0)
3528
3529+ datastore_stub_util.PrepareSpecialPropertiesForStore(clone)
3530+
3531 self.__entities_lock.acquire()
3532
3533 try:
3534@@ -956,6 +713,8 @@
3535
3536 if entity:
3537 group.mutable_entity().CopyFrom(entity)
3538+ datastore_stub_util.PrepareSpecialPropertiesForLoad(
3539+ group.mutable_entity())
3540
3541
3542 def _Dynamic_Delete(self, delete_request, delete_response):
3543@@ -1166,6 +925,14 @@
3544 if cmped == 0:
3545 return cmp(a.key(), b.key())
3546
3547+ def order_compare_entities_pb(a, b):
3548+ """ Return a negative, zero or positive number depending on whether
3549+ entity a is considered smaller than, equal to, or larger than b,
3550+ according to the query's orderings. a and b are protobuf-encoded
3551+ entities."""
3552+ return order_compare_entities(datastore.Entity.FromPb(a),
3553+ datastore.Entity.FromPb(b))
3554+
3555 def order_compare_properties(x, y):
3556 """Return a negative, zero or positive number depending on whether
3557 property value x is considered smaller than, equal to, or larger than
3558@@ -1201,7 +968,11 @@
3559 else:
3560 self.__query_history[clone] = 1
3561
3562- cursor = _Cursor(query, results, order_compare_entities)
3563+ results = [r._ToPb() for r in results]
3564+ for result in results:
3565+ datastore_stub_util.PrepareSpecialPropertiesForLoad(result)
3566+ cursor = datastore_stub_util.ListCursor(query, results,
3567+ order_compare_entities_pb)
3568 self.__queries[cursor.cursor] = cursor
3569
3570 if query.has_count():
3571@@ -1243,7 +1014,8 @@
3572 query_result = datastore_pb.QueryResult()
3573 self._Dynamic_RunQuery(query, query_result)
3574 cursor = query_result.cursor().cursor()
3575- integer64proto.set_value(min(self.__queries[cursor].count, _MAXIMUM_RESULTS))
3576+ integer64proto.set_value(min(self.__queries[cursor].Count(),
3577+ datastore_stub_util._MAXIMUM_RESULTS))
3578 del self.__queries[cursor]
3579
3580 def _Dynamic_BeginTransaction(self, request, transaction):
3581@@ -1348,6 +1120,9 @@
3582
3583 for entity in self.__entities[app_kind].values():
3584 for prop in entity.protobuf.property_list():
3585+ if (prop.name() in
3586+ datastore_stub_util.GetInvisibleSpecialPropertyNames()):
3587+ continue
3588 if prop.name() not in props:
3589 props[prop.name()] = entity_pb.PropertyValue()
3590 props[prop.name()].MergeFrom(prop.value())
3591
3592=== modified file 'AppServer/google/appengine/api/datastore_types.py'
3593--- AppServer/google/appengine/api/datastore_types.py 2010-11-30 10:37:25 +0000
3594+++ AppServer/google/appengine/api/datastore_types.py 2010-12-24 09:11:16 +0000
3595@@ -679,7 +679,7 @@
3596 TERM = 'user-tag'
3597
3598 def __init__(self, tag):
3599- super(Category, self).__init__(self, tag)
3600+ super(Category, self).__init__()
3601 ValidateString(tag, 'tag')
3602
3603 def ToXml(self):
3604@@ -701,7 +701,7 @@
3605 Raises BadValueError if link is not a fully qualified, well-formed URL.
3606 """
3607 def __init__(self, link):
3608- super(Link, self).__init__(self, link)
3609+ super(Link, self).__init__()
3610 ValidateString(link, 'link', max_len=_MAX_LINK_PROPERTY_LENGTH)
3611
3612 scheme, domain, path, params, query, fragment = urlparse.urlparse(link)
3613@@ -724,7 +724,7 @@
3614 Raises BadValueError if email is not a valid email address.
3615 """
3616 def __init__(self, email):
3617- super(Email, self).__init__(self, email)
3618+ super(Email, self).__init__()
3619 ValidateString(email, 'email')
3620
3621 def ToXml(self):
3622@@ -915,7 +915,7 @@
3623 Raises BadValueError if phone is not a string or subtype.
3624 """
3625 def __init__(self, phone):
3626- super(PhoneNumber, self).__init__(self, phone)
3627+ super(PhoneNumber, self).__init__()
3628 ValidateString(phone, 'phone')
3629
3630 def ToXml(self):
3631@@ -933,7 +933,7 @@
3632 Raises BadValueError if address is not a string or subtype.
3633 """
3634 def __init__(self, address):
3635- super(PostalAddress, self).__init__(self, address)
3636+ super(PostalAddress, self).__init__()
3637 ValidateString(address, 'address')
3638
3639 def ToXml(self):
3640@@ -955,7 +955,7 @@
3641 MAX = 100
3642
3643 def __init__(self, rating):
3644- super(Rating, self).__init__(self, rating)
3645+ super(Rating, self).__init__()
3646 if isinstance(rating, float) or isinstance(rating, complex):
3647 raise datastore_errors.BadValueError(
3648 'Expected int or long; received %s (a %s).' %
3649@@ -1505,7 +1505,7 @@
3650 same type.
3651
3652 Returns:
3653- A list of entity_pb.PropertyValue instances.
3654+ A list of entity_pb.Property instances.
3655 """
3656 encoded_name = name.encode('utf-8')
3657
3658
3659=== modified file 'AppServer/google/appengine/api/images/__init__.py'
3660--- AppServer/google/appengine/api/images/__init__.py 2010-11-30 10:37:25 +0000
3661+++ AppServer/google/appengine/api/images/__init__.py 2010-12-24 09:11:16 +0000
3662@@ -897,6 +897,8 @@
3663 return image.histogram()
3664
3665
3666+IMG_SERVING_SIZES_LIMIT = 1600
3667+
3668 IMG_SERVING_SIZES = [
3669 32, 48, 64, 72, 80, 90, 94, 104, 110, 120, 128, 144,
3670 150, 160, 200, 220, 288, 320, 400, 512, 576, 640, 720,
3671@@ -927,20 +929,8 @@
3672
3673 "http://lh3.ggpht.com/SomeCharactersGoesHere=s32-c"
3674
3675- Available sizes for resize are:
3676- (e.g. "=sX" where X is one of the following values)
3677-
3678- 0, 32, 48, 64, 72, 80, 90, 94, 104, 110, 120, 128, 144,
3679- 150, 160, 200, 220, 288, 320, 400, 512, 576, 640, 720,
3680- 800, 912, 1024, 1152, 1280, 1440, 1600
3681-
3682- Available sizes for crop are:
3683- (e.g. "=sX-c" where X is one of the following values)
3684-
3685- 32, 48, 64, 72, 80, 104, 136, 144, 150, 160
3686-
3687- These values are also available as IMG_SERVING_SIZES and
3688- IMG_SERVING_CROP_SIZES integer lists.
3689+ Available sizes are any interger in the range [0, 1600] and is available as
3690+ IMG_SERVING_SIZES_LIMIT.
3691
3692 Args:
3693 size: int, size of resulting images
3694@@ -960,10 +950,7 @@
3695 if crop and not size:
3696 raise BadRequestError("Size should be set for crop operation")
3697
3698- if size and crop and not size in IMG_SERVING_CROP_SIZES:
3699- raise UnsupportedSizeError("Unsupported crop size")
3700-
3701- if size and not crop and not size in IMG_SERVING_SIZES:
3702+ if size and (size > IMG_SERVING_SIZES_LIMIT or size < 0):
3703 raise UnsupportedSizeError("Unsupported size")
3704
3705 request = images_service_pb.ImagesGetUrlBaseRequest()
3706@@ -999,4 +986,3 @@
3707 url += "-c"
3708
3709 return url
3710-
3711
3712=== modified file 'AppServer/google/appengine/api/images/images_stub.py'
3713--- AppServer/google/appengine/api/images/images_stub.py 2010-11-30 10:37:25 +0000
3714+++ AppServer/google/appengine/api/images/images_stub.py 2010-12-24 09:11:16 +0000
3715@@ -41,6 +41,9 @@
3716 from google.appengine.runtime import apiproxy_errors
3717
3718
3719+MAX_REQUEST_SIZE = 32 << 20
3720+
3721+
3722 def _ArgbToRgbaTuple(argb):
3723 """Convert from a single ARGB value to a tuple containing RGBA.
3724
3725@@ -89,7 +92,8 @@
3726 host_prefix: the URL prefix (protocol://host:port) to preprend to
3727 image urls on a call to GetUrlBase.
3728 """
3729- super(ImagesServiceStub, self).__init__(service_name)
3730+ super(ImagesServiceStub, self).__init__(service_name,
3731+ max_request_size=MAX_REQUEST_SIZE)
3732 self._host_prefix = host_prefix
3733 Image.init()
3734
3735
3736=== modified file 'AppServer/google/appengine/api/labs/taskqueue/__init__.py' (properties changed: -x to +x)
3737--- AppServer/google/appengine/api/labs/taskqueue/__init__.py 2010-05-07 09:58:53 +0000
3738+++ AppServer/google/appengine/api/labs/taskqueue/__init__.py 2010-12-24 09:11:16 +0000
3739@@ -15,6 +15,58 @@
3740 # limitations under the License.
3741 #
3742
3743-"""Task Queue API module."""
3744-
3745-from taskqueue import *
3746+"""Shim module so that the old labs import path still works."""
3747+
3748+
3749+
3750+__all__ = [
3751+
3752+ 'BadTaskStateError', 'BadTransactionState', 'BadTransactionStateError',
3753+ 'DatastoreError', 'DuplicateTaskNameError', 'Error', 'InternalError',
3754+ 'InvalidQueueError', 'InvalidQueueNameError', 'InvalidTaskError',
3755+ 'InvalidTaskNameError', 'InvalidUrlError', 'PermissionDeniedError',
3756+ 'TaskAlreadyExistsError', 'TaskTooLargeError', 'TombstonedTaskError',
3757+ 'TooManyTasksError', 'TransientError', 'UnknownQueueError',
3758+
3759+ 'MAX_QUEUE_NAME_LENGTH', 'MAX_TASK_NAME_LENGTH', 'MAX_TASK_SIZE_BYTES',
3760+ 'MAX_URL_LENGTH',
3761+
3762+ 'Queue', 'Task', 'add']
3763+
3764+
3765+import os
3766+import sys
3767+import warnings
3768+
3769+from google.appengine.api.taskqueue import *
3770+
3771+
3772+if os.environ.get('DATACENTER', None) is None:
3773+ warnings.warn('google.appengine.api.labs.taskqueue is deprecated, please use '
3774+ 'google.appengine.api.taskqueue', DeprecationWarning,
3775+ stacklevel=2)
3776+
3777+
3778+def _map_module(module_name):
3779+ """Map a module from the new path to the labs path.
3780+
3781+ Args:
3782+ module_name: Name of the module to be mapped.
3783+
3784+ Raises:
3785+ ImportError: If the specified module we are mapping from does not exist.
3786+
3787+ Returns:
3788+ The module object of the module that was mapped.
3789+ """
3790+ labs_module_name = '%s.%s' % (__name__, module_name)
3791+ module_prefix = '.'.join(__name__.split('.')[:2])
3792+ new_module_name = '%s.api.taskqueue.%s' % (module_prefix, module_name)
3793+
3794+ __import__(new_module_name)
3795+ sys.modules[labs_module_name] = sys.modules[new_module_name]
3796+ return sys.modules[labs_module_name]
3797+
3798+taskqueue = _map_module('taskqueue')
3799+taskqueue_service_pb = _map_module('taskqueue_service_pb')
3800+taskqueue_stub = _map_module('taskqueue_stub')
3801
3802=== removed file 'AppServer/google/appengine/api/labs/taskqueue/taskqueue.py'
3803--- AppServer/google/appengine/api/labs/taskqueue/taskqueue.py 2010-11-30 10:37:25 +0000
3804+++ AppServer/google/appengine/api/labs/taskqueue/taskqueue.py 1970-01-01 00:00:00 +0000
3805@@ -1,953 +0,0 @@
3806-#!/usr/bin/env python
3807-#
3808-# Copyright 2007 Google Inc.
3809-#
3810-# Licensed under the Apache License, Version 2.0 (the "License");
3811-# you may not use this file except in compliance with the License.
3812-# You may obtain a copy of the License at
3813-#
3814-# http://www.apache.org/licenses/LICENSE-2.0
3815-#
3816-# Unless required by applicable law or agreed to in writing, software
3817-# distributed under the License is distributed on an "AS IS" BASIS,
3818-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
3819-# See the License for the specific language governing permissions and
3820-# limitations under the License.
3821-#
3822-
3823-"""Task Queue API.
3824-
3825-Enables an application to queue background work for itself. Work is done through
3826-webhooks that process tasks pushed from a queue. Tasks will execute in
3827-best-effort order of ETA. Webhooks that fail will cause tasks to be retried at a
3828-later time. Multiple queues may exist with independent throttling controls.
3829-
3830-Webhook URLs may be specified directly for Tasks, or the default URL scheme
3831-may be used, which will translate Task names into URLs relative to a Queue's
3832-base path. A default queue is also provided for simple usage.
3833-"""
3834-
3835-
3836-
3837-import calendar
3838-import datetime
3839-import os
3840-import re
3841-import time
3842-import urllib
3843-import urlparse
3844-
3845-import taskqueue_service_pb
3846-
3847-from google.appengine.api import apiproxy_stub_map
3848-from google.appengine.api import namespace_manager
3849-from google.appengine.api import urlfetch
3850-from google.appengine.runtime import apiproxy_errors
3851-import os,sys
3852-APPSCALE_HOME = os.environ.get("APPSCALE_HOME")
3853-
3854-
3855-class Error(Exception):
3856- """Base-class for exceptions in this module."""
3857-
3858-
3859-class UnknownQueueError(Error):
3860- """The queue specified is unknown."""
3861-
3862-
3863-class TransientError(Error):
3864- """There was a transient error while accessing the queue.
3865-
3866- Please Try again later.
3867- """
3868-
3869-
3870-class InternalError(Error):
3871- """There was an internal error while accessing this queue.
3872-
3873- If this problem continues, please contact the App Engine team through
3874- our support forum with a description of your problem.
3875- """
3876-
3877-
3878-class InvalidTaskError(Error):
3879- """The task's parameters, headers, or method is invalid."""
3880-
3881-
3882-class InvalidTaskNameError(InvalidTaskError):
3883- """The task's name is invalid."""
3884-
3885-
3886-class TaskTooLargeError(InvalidTaskError):
3887- """The task is too large with its headers and payload."""
3888-
3889-
3890-class TaskAlreadyExistsError(InvalidTaskError):
3891- """Task already exists. It has not yet run."""
3892-
3893-
3894-class TombstonedTaskError(InvalidTaskError):
3895- """Task has been tombstoned."""
3896-
3897-
3898-class InvalidUrlError(InvalidTaskError):
3899- """The task's relative URL is invalid."""
3900-
3901-
3902-class BadTaskStateError(Error):
3903- """The task is in the wrong state for the requested operation."""
3904-
3905-
3906-class InvalidQueueError(Error):
3907- """The Queue's configuration is invalid."""
3908-
3909-
3910-class InvalidQueueNameError(InvalidQueueError):
3911- """The Queue's name is invalid."""
3912-
3913-
3914-class _RelativeUrlError(Error):
3915- """The relative URL supplied is invalid."""
3916-
3917-
3918-class PermissionDeniedError(Error):
3919- """The requested operation is not allowed for this app."""
3920-
3921-
3922-class DuplicateTaskNameError(Error):
3923- """The add arguments contain tasks with identical names."""
3924-
3925-
3926-class TooManyTasksError(Error):
3927- """Too many tasks were present in a single function call."""
3928-
3929-
3930-class DatastoreError(Error):
3931- """There was a datastore error while accessing the queue."""
3932-
3933-
3934-class BadTransactionStateError(Error):
3935- """The state of the current transaction does not permit this operation."""
3936-
3937-BadTransactionState = BadTransactionStateError
3938-
3939-MAX_QUEUE_NAME_LENGTH = 100
3940-
3941-MAX_TASK_NAME_LENGTH = 500
3942-
3943-MAX_TASK_SIZE_BYTES = 10 * (2 ** 10)
3944-
3945-MAX_URL_LENGTH = 2083
3946-
3947-_DEFAULT_QUEUE = 'default'
3948-
3949-_DEFAULT_QUEUE_PATH = '/_ah/queue'
3950-
3951-_METHOD_MAP = {
3952- 'GET': taskqueue_service_pb.TaskQueueAddRequest.GET,
3953- 'POST': taskqueue_service_pb.TaskQueueAddRequest.POST,
3954- 'HEAD': taskqueue_service_pb.TaskQueueAddRequest.HEAD,
3955- 'PUT': taskqueue_service_pb.TaskQueueAddRequest.PUT,
3956- 'DELETE': taskqueue_service_pb.TaskQueueAddRequest.DELETE,
3957-}
3958-
3959-_NON_POST_METHODS = frozenset(['GET', 'HEAD', 'PUT', 'DELETE'])
3960-
3961-_BODY_METHODS = frozenset(['POST', 'PUT'])
3962-
3963-_TASK_NAME_PATTERN = r'^[a-zA-Z0-9-]{1,%s}$' % MAX_TASK_NAME_LENGTH
3964-
3965-_TASK_NAME_RE = re.compile(_TASK_NAME_PATTERN)
3966-
3967-_QUEUE_NAME_PATTERN = r'^[a-zA-Z0-9-]{1,%s}$' % MAX_QUEUE_NAME_LENGTH
3968-
3969-_QUEUE_NAME_RE = re.compile(_QUEUE_NAME_PATTERN)
3970-
3971-_ERROR_MAPPING = {
3972- taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE: UnknownQueueError,
3973- taskqueue_service_pb.TaskQueueServiceError.TRANSIENT_ERROR:
3974- TransientError,
3975- taskqueue_service_pb.TaskQueueServiceError.INTERNAL_ERROR: InternalError,
3976- taskqueue_service_pb.TaskQueueServiceError.TASK_TOO_LARGE:
3977- TaskTooLargeError,
3978- taskqueue_service_pb.TaskQueueServiceError.INVALID_TASK_NAME:
3979- InvalidTaskNameError,
3980- taskqueue_service_pb.TaskQueueServiceError.INVALID_QUEUE_NAME:
3981- InvalidQueueNameError,
3982- taskqueue_service_pb.TaskQueueServiceError.INVALID_URL: InvalidUrlError,
3983- taskqueue_service_pb.TaskQueueServiceError.INVALID_QUEUE_RATE:
3984- InvalidQueueError,
3985- taskqueue_service_pb.TaskQueueServiceError.PERMISSION_DENIED:
3986- PermissionDeniedError,
3987- taskqueue_service_pb.TaskQueueServiceError.TASK_ALREADY_EXISTS:
3988- TaskAlreadyExistsError,
3989- taskqueue_service_pb.TaskQueueServiceError.TOMBSTONED_TASK:
3990- TombstonedTaskError,
3991- taskqueue_service_pb.TaskQueueServiceError.INVALID_ETA: InvalidTaskError,
3992- taskqueue_service_pb.TaskQueueServiceError.INVALID_REQUEST: Error,
3993- taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_TASK: Error,
3994- taskqueue_service_pb.TaskQueueServiceError.TOMBSTONED_QUEUE: Error,
3995- taskqueue_service_pb.TaskQueueServiceError.DUPLICATE_TASK_NAME:
3996- DuplicateTaskNameError,
3997-
3998- taskqueue_service_pb.TaskQueueServiceError.TOO_MANY_TASKS:
3999- TooManyTasksError,
4000-
4001-}
4002-
4003-_PRESERVE_ENVIRONMENT_HEADERS = (
4004- ('X-AppEngine-Default-Namespace', 'HTTP_X_APPENGINE_DEFAULT_NAMESPACE'),)
4005-
4006-
4007-class _UTCTimeZone(datetime.tzinfo):
4008- """UTC timezone."""
4009-
4010- ZERO = datetime.timedelta(0)
4011-
4012- def utcoffset(self, dt):
4013- return self.ZERO
4014-
4015- def dst(self, dt):
4016- return self.ZERO
4017-
4018- def tzname(self, dt):
4019- return 'UTC'
4020-
4021-
4022-_UTC = _UTCTimeZone()
4023-
4024-
4025-def _parse_relative_url(relative_url):
4026- """Parses a relative URL and splits it into its path and query string.
4027-
4028- Args:
4029- relative_url: The relative URL, starting with a '/'.
4030-
4031- Returns:
4032- Tuple (path, query) where:
4033- path: The path in the relative URL.
4034- query: The query string in the URL without the '?' character.
4035-
4036- Raises:
4037- _RelativeUrlError if the relative_url is invalid for whatever reason
4038- """
4039- if not relative_url:
4040- raise _RelativeUrlError('Relative URL is empty')
4041- (scheme, netloc, path, query, fragment) = urlparse.urlsplit(relative_url)
4042- if scheme or netloc:
4043- raise _RelativeUrlError('Relative URL may not have a scheme or location')
4044- if fragment:
4045- raise _RelativeUrlError('Relative URL may not specify a fragment')
4046- if not path or path[0] != '/':
4047- raise _RelativeUrlError('Relative URL path must start with "/"')
4048- return path, query
4049-
4050-
4051-def _flatten_params(params):
4052- """Converts a dictionary of parameters to a list of parameters.
4053-
4054- Any unicode strings in keys or values will be encoded as UTF-8.
4055-
4056- Args:
4057- params: Dictionary mapping parameter keys to values. Values will be
4058- converted to a string and added to the list as tuple (key, value). If
4059- a values is iterable and not a string, each contained value will be
4060- added as a separate (key, value) tuple.
4061-
4062- Returns:
4063- List of (key, value) tuples.
4064- """
4065- def get_string(value):
4066- if isinstance(value, unicode):
4067- return unicode(value).encode('utf-8')
4068- else:
4069- return str(value)
4070-
4071- param_list = []
4072- for key, value in params.iteritems():
4073- key = get_string(key)
4074- if isinstance(value, basestring):
4075- param_list.append((key, get_string(value)))
4076- else:
4077- try:
4078- iterator = iter(value)
4079- except TypeError:
4080- param_list.append((key, str(value)))
4081- else:
4082- param_list.extend((key, get_string(v)) for v in iterator)
4083-
4084- return param_list
4085-
4086-
4087-class Task(object):
4088- """Represents a single Task on a queue."""
4089-
4090- __CONSTRUCTOR_KWARGS = frozenset([
4091- 'countdown', 'eta', 'headers', 'method', 'name', 'params', 'url'])
4092-
4093- __eta_posix = None
4094-
4095- def __init__(self, payload=None, **kwargs):
4096- """Initializer.
4097-
4098- All parameters are optional.
4099-
4100- Args:
4101- payload: The payload data for this Task that will be delivered to the
4102- webhook as the HTTP request body. This is only allowed for POST and PUT
4103- methods.
4104- countdown: Time in seconds into the future that this Task should execute.
4105- Defaults to zero.
4106- eta: Absolute time when the Task should execute. May not be specified
4107- if 'countdown' is also supplied. This may be timezone-aware or
4108- timezone-naive.
4109- headers: Dictionary of headers to pass to the webhook. Values in the
4110- dictionary may be iterable to indicate repeated header fields.
4111- method: Method to use when accessing the webhook. Defaults to 'POST'.
4112- name: Name to give the Task; if not specified, a name will be
4113- auto-generated when added to a queue and assigned to this object. Must
4114- match the _TASK_NAME_PATTERN regular expression.
4115- params: Dictionary of parameters to use for this Task. For POST requests
4116- these params will be encoded as 'application/x-www-form-urlencoded' and
4117- set to the payload. For all other methods, the parameters will be
4118- converted to a query string. May not be specified if the URL already
4119- contains a query string.
4120- url: Relative URL where the webhook that should handle this task is
4121- located for this application. May have a query string unless this is
4122- a POST method.
4123-
4124- Raises:
4125- InvalidTaskError if any of the parameters are invalid;
4126- InvalidTaskNameError if the task name is invalid; InvalidUrlError if
4127- the task URL is invalid or too long; TaskTooLargeError if the task with
4128- its payload is too large.
4129- """
4130- args_diff = set(kwargs.iterkeys()) - self.__CONSTRUCTOR_KWARGS
4131- if args_diff:
4132- raise TypeError('Invalid arguments: %s' % ', '.join(args_diff))
4133-
4134- self.__name = kwargs.get('name')
4135- if self.__name and not _TASK_NAME_RE.match(self.__name):
4136- raise InvalidTaskNameError(
4137- 'Task name does not match expression "%s"; found %s' %
4138- (_TASK_NAME_PATTERN, self.__name))
4139-
4140- self.__default_url, self.__relative_url, query = Task.__determine_url(
4141- kwargs.get('url', ''))
4142- self.__headers = urlfetch._CaselessDict()
4143- self.__headers.update(kwargs.get('headers', {}))
4144- self.__method = kwargs.get('method', 'POST').upper()
4145- self.__payload = None
4146- params = kwargs.get('params', {})
4147-
4148- for header_name, environ_name in _PRESERVE_ENVIRONMENT_HEADERS:
4149- value = os.environ.get(environ_name)
4150- if value is not None:
4151- self.__headers.setdefault(header_name, value)
4152-
4153- self.__headers.setdefault('X-AppEngine-Current-Namespace',
4154- namespace_manager.get_namespace())
4155- if query and params:
4156- raise InvalidTaskError('Query string and parameters both present; '
4157- 'only one of these may be supplied')
4158-
4159- if self.__method == 'POST':
4160- if payload and params:
4161- raise InvalidTaskError('Message body and parameters both present for '
4162- 'POST method; only one of these may be supplied')
4163- elif query:
4164- raise InvalidTaskError('POST method may not have a query string; '
4165- 'use the "params" keyword argument instead')
4166- elif params:
4167- self.__payload = Task.__encode_params(params)
4168- self.__headers.setdefault(
4169- 'content-type', 'application/x-www-form-urlencoded')
4170- elif payload is not None:
4171- self.__payload = Task.__convert_payload(payload, self.__headers)
4172- elif self.__method in _NON_POST_METHODS:
4173- if payload and self.__method not in _BODY_METHODS:
4174- raise InvalidTaskError('Payload may only be specified for methods %s' %
4175- ', '.join(_BODY_METHODS))
4176- if payload:
4177- self.__payload = Task.__convert_payload(payload, self.__headers)
4178- if params:
4179- query = Task.__encode_params(params)
4180- if query:
4181- self.__relative_url = '%s?%s' % (self.__relative_url, query)
4182- else:
4183- raise InvalidTaskError('Invalid method: %s' % self.__method)
4184-
4185- self.__headers_list = _flatten_params(self.__headers)
4186- self.__eta_posix = Task.__determine_eta_posix(
4187- kwargs.get('eta'), kwargs.get('countdown'))
4188- self.__eta = None
4189- self.__enqueued = False
4190-
4191- if self.size > MAX_TASK_SIZE_BYTES:
4192- raise TaskTooLargeError('Task size must be less than %d; found %d' %
4193- (MAX_TASK_SIZE_BYTES, self.size))
4194-
4195- @staticmethod
4196- def __determine_url(relative_url):
4197- """Determines the URL of a task given a relative URL and a name.
4198-
4199- Args:
4200- relative_url: The relative URL for the Task.
4201-
4202- Returns:
4203- Tuple (default_url, relative_url, query) where:
4204- default_url: True if this Task is using the default URL scheme;
4205- False otherwise.
4206- relative_url: String containing the relative URL for this Task.
4207- query: The query string for this task.
4208-
4209- Raises:
4210- InvalidUrlError if the relative_url is invalid.
4211- """
4212- if not relative_url:
4213- default_url, query = True, ''
4214- else:
4215- default_url = False
4216- try:
4217- relative_url, query = _parse_relative_url(relative_url)
4218- except _RelativeUrlError, e:
4219- raise InvalidUrlError(e)
4220-
4221- if len(relative_url) > MAX_URL_LENGTH:
4222- raise InvalidUrlError(
4223- 'Task URL must be less than %d characters; found %d' %
4224- (MAX_URL_LENGTH, len(relative_url)))
4225-
4226- return (default_url, relative_url, query)
4227-
4228- @staticmethod
4229- def __determine_eta_posix(eta=None, countdown=None, current_time=time.time):
4230- """Determines the ETA for a task.
4231-
4232- If 'eta' and 'countdown' are both None, the current time will be used.
4233- Otherwise, only one of them may be specified.
4234-
4235- Args:
4236- eta: A datetime.datetime specifying the absolute ETA or None;
4237- this may be timezone-aware or timezone-naive.
4238- countdown: Count in seconds into the future from the present time that
4239- the ETA should be assigned to.
4240-
4241- Returns:
4242- A float giving a POSIX timestamp containing the ETA.
4243-
4244- Raises:
4245- InvalidTaskError if the parameters are invalid.
4246- """
4247- if eta is not None and countdown is not None:
4248- raise InvalidTaskError('May not use a countdown and ETA together')
4249- elif eta is not None:
4250- if not isinstance(eta, datetime.datetime):
4251- raise InvalidTaskError('ETA must be a datetime.datetime instance')
4252- elif eta.tzinfo is None:
4253- return time.mktime(eta.timetuple()) + eta.microsecond*1e-6
4254- else:
4255- return calendar.timegm(eta.utctimetuple()) + eta.microsecond*1e-6
4256- elif countdown is not None:
4257- try:
4258- countdown = float(countdown)
4259- except ValueError:
4260- raise InvalidTaskError('Countdown must be a number')
4261- except OverflowError:
4262- raise InvalidTaskError('Countdown out of range')
4263- else:
4264- return current_time() + countdown
4265- else:
4266- return current_time()
4267-
4268- @staticmethod
4269- def __encode_params(params):
4270- """URL-encodes a list of parameters.
4271-
4272- Args:
4273- params: Dictionary of parameters, possibly with iterable values.
4274-
4275- Returns:
4276- URL-encoded version of the params, ready to be added to a query string or
4277- POST body.
4278- """
4279- return urllib.urlencode(_flatten_params(params))
4280-
4281- @staticmethod
4282- def __convert_payload(payload, headers):
4283- """Converts a Task payload into UTF-8 and sets headers if necessary.
4284-
4285- Args:
4286- payload: The payload data to convert.
4287- headers: Dictionary of headers.
4288-
4289- Returns:
4290- The payload as a non-unicode string.
4291-
4292- Raises:
4293- InvalidTaskError if the payload is not a string or unicode instance.
4294- """
4295- if isinstance(payload, unicode):
4296- headers.setdefault('content-type', 'text/plain; charset=utf-8')
4297- payload = payload.encode('utf-8')
4298- elif not isinstance(payload, str):
4299- raise InvalidTaskError(
4300- 'Task payloads must be strings; invalid payload: %r' % payload)
4301- return payload
4302-
4303- @property
4304- def on_queue_url(self):
4305- """Returns True if this Task will run on the queue's URL."""
4306- return self.__default_url
4307-
4308- @property
4309- def eta_posix(self):
4310- """Returns a POSIX timestamp giving when this Task will execute."""
4311- if self.__eta_posix is None and self.__eta is not None:
4312- self.__eta_posix = Task.__determine_eta_posix(self.__eta)
4313- return self.__eta_posix
4314-
4315- @property
4316- def eta(self):
4317- """Returns a datetime when this Task will execute."""
4318- if self.__eta is None and self.__eta_posix is not None:
4319- self.__eta = datetime.datetime.fromtimestamp(self.__eta_posix, _UTC)
4320- return self.__eta
4321-
4322- @property
4323- def headers(self):
4324- """Returns a copy of the headers for this Task."""
4325- return self.__headers.copy()
4326-
4327- @property
4328- def method(self):
4329- """Returns the method to use for this Task."""
4330- return self.__method
4331-
4332- @property
4333- def name(self):
4334- """Returns the name of this Task.
4335-
4336- Will be None if using auto-assigned Task names and this Task has not yet
4337- been added to a Queue.
4338- """
4339- return self.__name
4340-
4341- @property
4342- def payload(self):
4343- """Returns the payload for this task, which may be None."""
4344- return self.__payload
4345-
4346- @property
4347- def size(self):
4348- """Returns the size of this task in bytes."""
4349- HEADER_SEPERATOR = len(': \r\n')
4350- header_size = sum((len(key) + len(value) + HEADER_SEPERATOR)
4351- for key, value in self.__headers_list)
4352- return (len(self.__method) + len(self.__payload or '') +
4353- len(self.__relative_url) + header_size)
4354-
4355- @property
4356- def url(self):
4357- """Returns the relative URL for this Task."""
4358- return self.__relative_url
4359-
4360- @property
4361- def was_enqueued(self):
4362- """Returns True if this Task has been enqueued.
4363-
4364- Note: This will not check if this task already exists in the queue.
4365- """
4366- return self.__enqueued
4367-
4368- def add(self, queue_name=_DEFAULT_QUEUE, transactional=False):
4369- """Adds this Task to a queue. See Queue.add."""
4370- return Queue(queue_name).add(self, transactional=transactional)
4371-
4372-
4373-class Queue(object):
4374- """Represents a Queue."""
4375-
4376- def __init__(self, name=_DEFAULT_QUEUE):
4377- """Initializer.
4378-
4379- Args:
4380- name: Name of this queue. If not supplied, defaults to the default queue.
4381-
4382- Raises:
4383- InvalidQueueNameError if the queue name is invalid.
4384- """
4385- if not _QUEUE_NAME_RE.match(name):
4386- raise InvalidQueueNameError(
4387- 'Queue name does not match pattern "%s"; found %s' %
4388- (_QUEUE_NAME_PATTERN, name))
4389- self.__name = name
4390- self.__url = '%s/%s' % (_DEFAULT_QUEUE_PATH, self.__name)
4391-
4392- self._app = None
4393-
4394- def add(self, task, transactional=False):
4395- """Adds a Task or list of Tasks to this Queue.
4396-
4397- If a list of more than one Tasks is given, a raised exception does not
4398- guarantee that no tasks were added to the queue (unless transactional is set
4399- to True). To determine which tasks were successfully added when an exception
4400- is raised, check the Task.was_enqueued property.
4401-
4402- Args:
4403- task: A Task instance or a list of Task instances that will added to the
4404- queue.
4405- transactional: If False adds the Task(s) to a queue irrespectively to the
4406- enclosing transaction success or failure. An exception is raised if True
4407- and called outside of a transaction. (optional)
4408-
4409- Returns:
4410- The Task or list of tasks that was supplied to this method.
4411-
4412- Raises:
4413- BadTaskStateError: if the Task(s) has already been added to a queue.
4414- BadTransactionStateError: if the transactional argument is true but this
4415- call is being made outside of the context of a transaction.
4416- Error-subclass on application errors.
4417- """
4418- try:
4419- tasks = list(iter(task))
4420- except TypeError:
4421- tasks = [task]
4422- multiple = False
4423- else:
4424- multiple = True
4425-
4426- self.__AddTasks(tasks, transactional)
4427-
4428- if multiple:
4429- return tasks
4430- else:
4431- assert len(tasks) == 1
4432- return tasks[0]
4433-
4434- def __AddTasks(self, tasks, transactional):
4435- """Internal implementation of .add() where tasks must be a list."""
4436-
4437- request = taskqueue_service_pb.TaskQueueBulkAddRequest()
4438- response = taskqueue_service_pb.TaskQueueBulkAddResponse()
4439-
4440- task_names = set()
4441- for task in tasks:
4442- if task.name:
4443- if task.name in task_names:
4444- raise DuplicateTaskNameError(
4445- 'The task name %r is used more than once in the request' %
4446- task.name)
4447- task_names.add(task.name)
4448-
4449- self.__FillAddRequest(task, request.add_add_request(), transactional)
4450-
4451- try:
4452- apiproxy_stub_map.MakeSyncCall('taskqueue', 'BulkAdd', request, response)
4453- except apiproxy_errors.ApplicationError, e:
4454- raise self.__TranslateError(e.application_error, e.error_detail)
4455-
4456- assert response.taskresult_size() == len(tasks), (
4457- 'expected %d results from BulkAdd(), got %d' % (
4458- len(tasks), response.taskresult_size()))
4459-
4460- exception = None
4461- for task, task_result in zip(tasks, response.taskresult_list()):
4462- if task_result.result() == taskqueue_service_pb.TaskQueueServiceError.OK:
4463- if task_result.has_chosen_task_name():
4464- task._Task__name = task_result.chosen_task_name()
4465- task._Task__enqueued = True
4466- elif (task_result.result() ==
4467- taskqueue_service_pb.TaskQueueServiceError.SKIPPED):
4468- pass
4469- elif exception is None:
4470- exception = self.__TranslateError(task_result.result())
4471-
4472- if exception is not None:
4473- raise exception
4474-
4475- return tasks
4476-
4477- def __FillAddRequest(self, task, task_request, transactional):
4478- """Populates a TaskQueueAddRequest with the data from a Task instance.
4479-
4480- Args:
4481- task: The Task instance to use as a source for the data to be added to
4482- task_request.
4483- task_request: The taskqueue_service_pb.TaskQueueAddRequest to populate.
4484- transactional: If true then populates the task_request.transaction message
4485- with information from the enclosing transaction (if any).
4486-
4487- Raises:
4488- BadTaskStateError: If the task was already added to a Queue.
4489- BadTransactionStateError: If the transactional argument is True and there
4490- is no enclosing transaction.
4491- InvalidTaskNameError: If the transactional argument is True and the task
4492- is named.
4493- """
4494- if task.was_enqueued:
4495- raise BadTaskStateError('Task has already been enqueued')
4496-
4497- adjusted_url = task.url
4498- if task.on_queue_url:
4499- adjusted_url = self.__url + task.url
4500-
4501-
4502- task_request.set_queue_name(self.__name)
4503- task_request.set_eta_usec(long(task.eta_posix * 1e6))
4504- task_request.set_method(_METHOD_MAP.get(task.method))
4505- task_request.set_url(adjusted_url)
4506-
4507- if task.name:
4508- task_request.set_task_name(task.name)
4509- else:
4510- task_request.set_task_name('')
4511-
4512- if task.payload:
4513- task_request.set_body(task.payload)
4514- for key, value in _flatten_params(task.headers):
4515- header = task_request.add_header()
4516- header.set_key(key)
4517- header.set_value(value)
4518-
4519- if self._app:
4520- task_request.set_app_id(self._app)
4521-
4522- if transactional:
4523- from google.appengine.api import datastore
4524- if not datastore._MaybeSetupTransaction(task_request, []):
4525- raise BadTransactionStateError(
4526- 'Transactional adds are not allowed outside of transactions')
4527-
4528- if task_request.has_transaction() and task.name:
4529- raise InvalidTaskNameError(
4530- 'Task bound to a transaction cannot be named.')
4531-
4532- @property
4533- def name(self):
4534- """Returns the name of this queue."""
4535- return self.__name
4536-
4537- @staticmethod
4538- def __TranslateError(error, detail=''):
4539- """Translates a TaskQueueServiceError into an exception.
4540-
4541- Args:
4542- error: Value from TaskQueueServiceError enum.
4543- detail: A human-readable description of the error.
4544-
4545- Returns:
4546- The corresponding Exception sub-class for that error code.
4547- """
4548- if (error >= taskqueue_service_pb.TaskQueueServiceError.DATASTORE_ERROR
4549- and isinstance(error, int)):
4550- from google.appengine.api import datastore
4551- datastore_exception = datastore._DatastoreExceptionFromErrorCodeAndDetail(
4552- error - taskqueue_service_pb.TaskQueueServiceError.DATASTORE_ERROR,
4553- detail)
4554-
4555- class JointException(datastore_exception.__class__, DatastoreError):
4556- """There was a datastore error while accessing the queue."""
4557- __msg = (u'taskqueue.DatastoreError caused by: %s %s' %
4558- (datastore_exception.__class__, detail))
4559- def __str__(self):
4560- return JointException.__msg
4561-
4562- return JointException()
4563- else:
4564- exception_class = _ERROR_MAPPING.get(error, None)
4565- if exception_class:
4566- return exception_class(detail)
4567- else:
4568- return Error('Application error %s: %s' % (error, detail))
4569-
4570-
4571-def add(*args, **kwargs):
4572- """Convenience method will create a Task and add it to a queue.
4573-
4574- All parameters are optional.
4575-
4576- Args:
4577- name: Name to give the Task; if not specified, a name will be
4578- auto-generated when added to a queue and assigned to this object. Must
4579- match the _TASK_NAME_PATTERN regular expression.
4580- queue_name: Name of this queue. If not supplied, defaults to
4581- the default queue.
4582- url: Relative URL where the webhook that should handle this task is
4583- located for this application. May have a query string unless this is
4584- a POST method.
4585- method: Method to use when accessing the webhook. Defaults to 'POST'.
4586- headers: Dictionary of headers to pass to the webhook. Values in the
4587- dictionary may be iterable to indicate repeated header fields.
4588- payload: The payload data for this Task that will be delivered to the
4589- webhook as the HTTP request body. This is only allowed for POST and PUT
4590- methods.
4591- params: Dictionary of parameters to use for this Task. For POST requests
4592- these params will be encoded as 'application/x-www-form-urlencoded' and
4593- set to the payload. For all other methods, the parameters will be
4594- converted to a query string. May not be specified if the URL already
4595- contains a query string.
4596- transactional: If False adds the Task(s) to a queue irrespectively to the
4597- enclosing transaction success or failure. An exception is raised if True
4598- and called outside of a transaction. (optional)
4599- countdown: Time in seconds into the future that this Task should execute.
4600- Defaults to zero.
4601- eta: Absolute time when the Task should execute. May not be specified
4602- if 'countdown' is also supplied. This may be timezone-aware or
4603- timezone-naive.
4604-
4605- Returns:
4606- The Task that was added to the queue.
4607-
4608- Raises:
4609- InvalidTaskError if any of the parameters are invalid;
4610- InvalidTaskNameError if the task name is invalid; InvalidUrlError if
4611- the task URL is invalid or too long; TaskTooLargeError if the task with
4612- its payload is too large.
4613- """
4614- transactional = kwargs.pop('transactional', False)
4615- queue_name = kwargs.pop('queue_name', _DEFAULT_QUEUE)
4616- return Task(*args, **kwargs).add(
4617- queue_name=queue_name, transactional=transactional)
4618-
4619-def getLang(file):
4620- supportedExtensions = {
4621- "rb" : "ruby",
4622- "py" : "python",
4623- "pl" : "perl",
4624- }
4625-
4626- # return None if file is None
4627- extension = file.split(".")[-1]
4628-
4629- try:
4630- lang = supportedExtensions[extension]
4631- return lang
4632- except:
4633- sys.stderr.write("extension " + extension + " not recognized\n")
4634- return "none"
4635-
4636-def writeTempFile(suffix, data):
4637- suffix = urllib.unquote(suffix)
4638- regex = r"[^\w\d/\.-]"
4639- pattern = re.compile(regex)
4640- suffix = pattern.sub('', suffix)
4641-
4642- fileLoc = "/tmp/" + suffix
4643- f = open(fileLoc, "w+")
4644- f.write(data)
4645- f.close()
4646- return fileLoc
4647-
4648-def getAllIPs():
4649- all_ips = []
4650- fileLoc = APPSCALE_HOME + "/.appscale/all_ips"
4651- if os.path.exists(fileLoc):
4652- f = open(fileLoc)
4653- text = f.read()
4654-
4655-def getNumOfNodes():
4656- num_of_nodes = 0
4657- fileLoc = APPSCALE_HOME + "/.appscale/num_of_nodes"
4658- if os.path.exists(fileLoc):
4659- f = open(fileLoc)
4660- num_of_nodes = int(f.read())
4661-
4662- return num_of_nodes
4663-
4664-def putMRInput(data, inputLoc):
4665- inputLoc = urllib.unquote(inputLoc)
4666- regex = r"[^\w\d/\.-]"
4667- pattern = re.compile(regex)
4668- inputLoc = pattern.sub('', inputLoc)
4669-
4670- fileLoc = "/tmp/" + inputLoc
4671- f = open(fileLoc, "w+")
4672- f.write(data)
4673- f.close()
4674-
4675- removeInput = APPSCALE_HOME + "/AppDB/hadoop-0.20.0/bin/hadoop fs -rmr " + inputLoc
4676- sys.stderr.write(removeInput + "\n")
4677- os.system(removeInput)
4678-
4679- put = APPSCALE_HOME + "/AppDB/hadoop-0.20.0/bin/hadoop fs -put " + fileLoc + " " + inputLoc
4680- os.system(put)
4681-
4682- return
4683-def runMRJob(mapper, reducer, inputLoc, outputLoc, config={}):
4684- mapper = urllib.unquote(mapper)
4685- reducer = urllib.unquote(reducer)
4686- inputLoc = urllib.unquote(inputLoc)
4687- outputLoc = urllib.unquote(outputLoc)
4688-
4689- regex = r"[^\w\d/\.-]"
4690- pattern = re.compile(regex)
4691-
4692- mydir = os.getcwd() + "/"
4693- mapper = "\"" + getLang(mapper) + " " + mydir + pattern.sub('', mapper) + "\""
4694- reducer = "\"" + getLang(reducer) + " " + mydir + pattern.sub('', reducer) + "\""
4695- inputLoc = pattern.sub('', inputLoc)
4696- outputLoc = pattern.sub('', outputLoc)
4697-
4698- removeOutput = APPSCALE_HOME + "/AppDB/hadoop-0.20.0/bin/hadoop fs -rmr " + outputLoc
4699- sys.stderr.write(removeOutput + "\n")
4700- os.system(removeOutput)
4701-
4702- formattedConfig = ""
4703- for key in config:
4704- formattedConfig = formattedConfig + " -D " + key + "=" + config[key]
4705-
4706- command = APPSCALE_HOME + "/AppDB/hadoop-0.20.0/bin/hadoop jar " + APPSCALE_HOME + "/AppDB/hadoop-0.20.0/contrib/streaming/hadoop-0.20.0-streaming.jar " + formattedConfig + " -input " + inputLoc + " -output " + outputLoc + " -mapper " + mapper + " -reducer " + reducer
4707- sys.stderr.write("\n" + command + "\n")
4708- start = time.time()
4709- os.system(command)
4710- end = time.time()
4711- sys.stderr.write("\nTime elapsed = " + str(end - start) + "seconds\n")
4712-
4713-def getMROutput(outputLoc):
4714- outputLoc = urllib.unquote(outputLoc)
4715- regex = r"[^\w\d/\.-]"
4716- pattern = re.compile(regex)
4717- outputLoc = pattern.sub('', outputLoc)
4718-
4719- fileLoc = "/tmp/" + outputLoc
4720-
4721- rmr = "rm -rf " + fileLoc
4722- os.system(rmr)
4723- get = APPSCALE_HOME + "/AppDB/hadoop-0.20.0/bin/hadoop fs -get " + outputLoc + " " + fileLoc
4724- os.system(get)
4725-
4726- contents = "no output"
4727- if os.path.exists(fileLoc):
4728- cmd = "cat " + fileLoc + "/part*"
4729- contents = os.popen(cmd).read()
4730-
4731-
4732- sys.stderr.write(contents)
4733- return contents
4734-
4735-def getMRLogs(outputLoc):
4736- outputLoc = urllib.unquote(outputLoc)
4737- regex = r"[^\w\d/\.-]"
4738- pattern = re.compile(regex)
4739- outputLoc = pattern.sub('', outputLoc)
4740-
4741- fileLoc = "/tmp/" + outputLoc
4742-
4743- rmr = "rm -rf " + fileLoc
4744- os.system(rmr)
4745-
4746- get = APPSCALE_HOME + "/AppDB/hadoop-0.20.0/bin/hadoop fs -get " + outputLoc + " " + fileLoc
4747- os.system(get)
4748-
4749- contents = "no logs"
4750- if os.path.exists(fileLoc):
4751- cmd = "cat " + fileLoc + "/_logs/history/*"
4752- contents = os.popen(cmd).read()
4753-
4754-
4755- sys.stderr.write(contents)
4756- return contents
4757-
4758-
4759
4760=== removed file 'AppServer/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py'
4761--- AppServer/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py 2010-11-30 10:37:25 +0000
4762+++ AppServer/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py 1970-01-01 00:00:00 +0000
4763@@ -1,5229 +0,0 @@
4764-#!/usr/bin/env python
4765-#
4766-# Copyright 2007 Google Inc.
4767-#
4768-# Licensed under the Apache License, Version 2.0 (the "License");
4769-# you may not use this file except in compliance with the License.
4770-# You may obtain a copy of the License at
4771-#
4772-# http://www.apache.org/licenses/LICENSE-2.0
4773-#
4774-# Unless required by applicable law or agreed to in writing, software
4775-# distributed under the License is distributed on an "AS IS" BASIS,
4776-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
4777-# See the License for the specific language governing permissions and
4778-# limitations under the License.
4779-#
4780-
4781-from google.net.proto import ProtocolBuffer
4782-import array
4783-import dummy_thread as thread
4784-
4785-__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
4786- unusednames=printElemNumber,debug_strs no-special"""
4787-
4788-from google.appengine.datastore.datastore_v3_pb import *
4789-import google.appengine.datastore.datastore_v3_pb
4790-from google.net.proto.message_set import MessageSet
4791-class TaskQueueServiceError(ProtocolBuffer.ProtocolMessage):
4792-
4793- OK = 0
4794- UNKNOWN_QUEUE = 1
4795- TRANSIENT_ERROR = 2
4796- INTERNAL_ERROR = 3
4797- TASK_TOO_LARGE = 4
4798- INVALID_TASK_NAME = 5
4799- INVALID_QUEUE_NAME = 6
4800- INVALID_URL = 7
4801- INVALID_QUEUE_RATE = 8
4802- PERMISSION_DENIED = 9
4803- TASK_ALREADY_EXISTS = 10
4804- TOMBSTONED_TASK = 11
4805- INVALID_ETA = 12
4806- INVALID_REQUEST = 13
4807- UNKNOWN_TASK = 14
4808- TOMBSTONED_QUEUE = 15
4809- DUPLICATE_TASK_NAME = 16
4810- SKIPPED = 17
4811- TOO_MANY_TASKS = 18
4812- INVALID_PAYLOAD = 19
4813- INVALID_RETRY_PARAMETERS = 20
4814- DATASTORE_ERROR = 10000
4815-
4816- _ErrorCode_NAMES = {
4817- 0: "OK",
4818- 1: "UNKNOWN_QUEUE",
4819- 2: "TRANSIENT_ERROR",
4820- 3: "INTERNAL_ERROR",
4821- 4: "TASK_TOO_LARGE",
4822- 5: "INVALID_TASK_NAME",
4823- 6: "INVALID_QUEUE_NAME",
4824- 7: "INVALID_URL",
4825- 8: "INVALID_QUEUE_RATE",
4826- 9: "PERMISSION_DENIED",
4827- 10: "TASK_ALREADY_EXISTS",
4828- 11: "TOMBSTONED_TASK",
4829- 12: "INVALID_ETA",
4830- 13: "INVALID_REQUEST",
4831- 14: "UNKNOWN_TASK",
4832- 15: "TOMBSTONED_QUEUE",
4833- 16: "DUPLICATE_TASK_NAME",
4834- 17: "SKIPPED",
4835- 18: "TOO_MANY_TASKS",
4836- 19: "INVALID_PAYLOAD",
4837- 20: "INVALID_RETRY_PARAMETERS",
4838- 10000: "DATASTORE_ERROR",
4839- }
4840-
4841- def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
4842- ErrorCode_Name = classmethod(ErrorCode_Name)
4843-
4844-
4845- def __init__(self, contents=None):
4846- pass
4847- if contents is not None: self.MergeFromString(contents)
4848-
4849-
4850- def MergeFrom(self, x):
4851- assert x is not self
4852-
4853- def Equals(self, x):
4854- if x is self: return 1
4855- return 1
4856-
4857- def IsInitialized(self, debug_strs=None):
4858- initialized = 1
4859- return initialized
4860-
4861- def ByteSize(self):
4862- n = 0
4863- return n + 0
4864-
4865- def Clear(self):
4866- pass
4867-
4868- def OutputUnchecked(self, out):
4869- pass
4870-
4871- def TryMerge(self, d):
4872- while d.avail() > 0:
4873- tt = d.getVarInt32()
4874- if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
4875- d.skipData(tt)
4876-
4877-
4878- def __str__(self, prefix="", printElemNumber=0):
4879- res=""
4880- return res
4881-
4882-
4883- def _BuildTagLookupTable(sparse, maxtag, default=None):
4884- return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
4885-
4886-
4887- _TEXT = _BuildTagLookupTable({
4888- 0: "ErrorCode",
4889- }, 0)
4890-
4891- _TYPES = _BuildTagLookupTable({
4892- 0: ProtocolBuffer.Encoder.NUMERIC,
4893- }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
4894-
4895- _STYLE = """"""
4896- _STYLE_CONTENT_TYPE = """"""
4897-class TaskQueueRetryParameters(ProtocolBuffer.ProtocolMessage):
4898- has_retry_limit_ = 0
4899- retry_limit_ = 0
4900- has_age_limit_sec_ = 0
4901- age_limit_sec_ = 0
4902- has_min_backoff_sec_ = 0
4903- min_backoff_sec_ = 0.1
4904- has_max_backoff_sec_ = 0
4905- max_backoff_sec_ = 3600.0
4906- has_max_doublings_ = 0
4907- max_doublings_ = 16
4908-
4909- def __init__(self, contents=None):
4910- if contents is not None: self.MergeFromString(contents)
4911-
4912- def retry_limit(self): return self.retry_limit_
4913-
4914- def set_retry_limit(self, x):
4915- self.has_retry_limit_ = 1
4916- self.retry_limit_ = x
4917-
4918- def clear_retry_limit(self):
4919- if self.has_retry_limit_:
4920- self.has_retry_limit_ = 0
4921- self.retry_limit_ = 0
4922-
4923- def has_retry_limit(self): return self.has_retry_limit_
4924-
4925- def age_limit_sec(self): return self.age_limit_sec_
4926-
4927- def set_age_limit_sec(self, x):
4928- self.has_age_limit_sec_ = 1
4929- self.age_limit_sec_ = x
4930-
4931- def clear_age_limit_sec(self):
4932- if self.has_age_limit_sec_:
4933- self.has_age_limit_sec_ = 0
4934- self.age_limit_sec_ = 0
4935-
4936- def has_age_limit_sec(self): return self.has_age_limit_sec_
4937-
4938- def min_backoff_sec(self): return self.min_backoff_sec_
4939-
4940- def set_min_backoff_sec(self, x):
4941- self.has_min_backoff_sec_ = 1
4942- self.min_backoff_sec_ = x
4943-
4944- def clear_min_backoff_sec(self):
4945- if self.has_min_backoff_sec_:
4946- self.has_min_backoff_sec_ = 0
4947- self.min_backoff_sec_ = 0.1
4948-
4949- def has_min_backoff_sec(self): return self.has_min_backoff_sec_
4950-
4951- def max_backoff_sec(self): return self.max_backoff_sec_
4952-
4953- def set_max_backoff_sec(self, x):
4954- self.has_max_backoff_sec_ = 1
4955- self.max_backoff_sec_ = x
4956-
4957- def clear_max_backoff_sec(self):
4958- if self.has_max_backoff_sec_:
4959- self.has_max_backoff_sec_ = 0
4960- self.max_backoff_sec_ = 3600.0
4961-
4962- def has_max_backoff_sec(self): return self.has_max_backoff_sec_
4963-
4964- def max_doublings(self): return self.max_doublings_
4965-
4966- def set_max_doublings(self, x):
4967- self.has_max_doublings_ = 1
4968- self.max_doublings_ = x
4969-
4970- def clear_max_doublings(self):
4971- if self.has_max_doublings_:
4972- self.has_max_doublings_ = 0
4973- self.max_doublings_ = 16
4974-
4975- def has_max_doublings(self): return self.has_max_doublings_
4976-
4977-
4978- def MergeFrom(self, x):
4979- assert x is not self
4980- if (x.has_retry_limit()): self.set_retry_limit(x.retry_limit())
4981- if (x.has_age_limit_sec()): self.set_age_limit_sec(x.age_limit_sec())
4982- if (x.has_min_backoff_sec()): self.set_min_backoff_sec(x.min_backoff_sec())
4983- if (x.has_max_backoff_sec()): self.set_max_backoff_sec(x.max_backoff_sec())
4984- if (x.has_max_doublings()): self.set_max_doublings(x.max_doublings())
4985-
4986- def Equals(self, x):
4987- if x is self: return 1
4988- if self.has_retry_limit_ != x.has_retry_limit_: return 0
4989- if self.has_retry_limit_ and self.retry_limit_ != x.retry_limit_: return 0
4990- if self.has_age_limit_sec_ != x.has_age_limit_sec_: return 0
4991- if self.has_age_limit_sec_ and self.age_limit_sec_ != x.age_limit_sec_: return 0
4992- if self.has_min_backoff_sec_ != x.has_min_backoff_sec_: return 0
4993- if self.has_min_backoff_sec_ and self.min_backoff_sec_ != x.min_backoff_sec_: return 0
4994- if self.has_max_backoff_sec_ != x.has_max_backoff_sec_: return 0
4995- if self.has_max_backoff_sec_ and self.max_backoff_sec_ != x.max_backoff_sec_: return 0
4996- if self.has_max_doublings_ != x.has_max_doublings_: return 0
4997- if self.has_max_doublings_ and self.max_doublings_ != x.max_doublings_: return 0
4998- return 1
4999-
5000- def IsInitialized(self, debug_strs=None):
The diff has been truncated for viewing.

Subscribers

People subscribed via source and target branches