Merge lp:~openerp-dev/openobject-addons/6.0-bug-744175-xrg into lp:openobject-addons/6.0

Proposed by Stephane Wirtel (OpenERP)
Status: Needs review
Proposed branch: lp:~openerp-dev/openobject-addons/6.0-bug-744175-xrg
Merge into: lp:openobject-addons/6.0
Diff against target: 552 lines (+281/-39)
6 files modified
document/document_directory.py (+33/-2)
document/document_storage.py (+20/-14)
document/nodes.py (+20/-6)
document/test/document_test3.yml (+115/-0)
document_ftp/ftpserver/abstracted_fs.py (+73/-17)
document_ftp/test/document_ftp_test2.yml (+20/-0)
To merge this branch: bzr merge lp:~openerp-dev/openobject-addons/6.0-bug-744175-xrg
Reviewer Review Type Date Requested Status
OpenERP Core Team Pending
Review via email: mp+56148@code.launchpad.net

Description of the change

There is an OPW on this branch

I asked to the customer to check if this branch fixes its problem, and waiting for a reply.

To post a comment you must log in.
Revision history for this message
Stephane Wirtel (OpenERP) (stephane-openerp) wrote :

Don't merge it without the feedback of the customer.

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'document/document_directory.py'
2--- document/document_directory.py 2011-01-14 09:34:28 +0000
3+++ document/document_directory.py 2011-04-04 12:41:11 +0000
4@@ -25,6 +25,7 @@
5
6 import nodes
7 from tools.translate import _
8+import logging
9
10 class document_directory(osv.osv):
11 _name = 'document.directory'
12@@ -124,18 +125,48 @@
13 res.append((d.id, s or d.name))
14 return res
15
16- def get_full_path(self, cr, uid, dir_id, context=None):
17+ def get_full_path(self, cr, uid, dir_id, res_vector=None, context=None):
18 """ Return the full path to this directory, in a list, root first
19 """
20 if isinstance(dir_id, (tuple, list)):
21 assert len(dir_id) == 1
22 dir_id = dir_id[0]
23-
24+ _log = logging.getLogger('document')
25 def _parent(dir_id, path):
26 parent=self.browse(cr, uid, dir_id)
27 if parent.parent_id and not parent.ressource_parent_type_id:
28 _parent(parent.parent_id.id,path)
29 path.append(parent.name)
30+ if parent.ressource_type_id:
31+ # here, we try to fill the names of dynamic nodes in the
32+ # path leading to the directory.
33+ # Since document.directory records do /not/ contain information
34+ # (aka. the vector) about the dynamic stuff, we need to trust
35+ # some external variable. If not available, we place an item
36+ # of (<model name>, False) in the path. The caller must handle
37+ # that or bork.
38+ if res_vector is None:
39+ _log.debug("get_full_path: missing a vector for %s", parent.ressource_type_id.model)
40+ elif res_vector and parent.ressource_type_id.model == res_vector[0]:
41+ obj = self.pool.get(res_vector[0])
42+ rbro = obj.browse(cr, uid, res_vector[1], context=context)
43+ fld_name = (parent.resource_field and parent.resource_field.name) or 'name'
44+
45+ if parent.ressource_tree:
46+ rev_names = []
47+ while rbro:
48+ par = getattr(rbro, obj._parent_name, False)
49+ if not par:
50+ break
51+ rev_names.append(rbro[fld_name])
52+ rev_names.reverse()
53+ path += map(nodes.filter_res_name, rev_names)
54+ else:
55+ # print "appending for %s:%s: %s" %(res_vector[0], res_vector[1], rbro[fld_name])
56+ path.append(nodes.filter_res_name(rbro[fld_name]))
57+ else:
58+ _log.debug("get_full_path: missing a vector for: %s (had %s) ", parent.ressource_type_id.model, res_vector[0])
59+ path.append((parent.ressource_type_id.model, False))
60 else:
61 path.append(parent.name)
62 return path
63
64=== modified file 'document/document_storage.py'
65--- document/document_storage.py 2011-01-14 00:11:01 +0000
66+++ document/document_storage.py 2011-04-04 12:41:11 +0000
67@@ -142,9 +142,10 @@
68 fsize = os.stat(fname).st_size
69 cr.execute("UPDATE ir_attachment " \
70 " SET index_content = %s, file_type = %s, " \
71- " file_size = %s " \
72+ " file_size = %s, "\
73+ " write_date = now(), write_uid = %s " \
74 " WHERE id = %s",
75- (icont_u, mime, fsize, par.file_id))
76+ (icont_u, mime, fsize, par.context.uid, par.file_id))
77 par.content_length = fsize
78 par.content_type = mime
79 cr.commit()
80@@ -157,9 +158,10 @@
81 par = self._get_parent()
82 cr = pooler.get_db(par.context.dbname).cursor()
83 fsize = os.stat(fname).st_size
84- cr.execute("UPDATE ir_attachment SET file_size = %s " \
85+ cr.execute("UPDATE ir_attachment SET file_size = %s, " \
86+ " write_date = now(), write_uid = %s " \
87 " WHERE id = %s",
88- (fsize, par.file_id))
89+ (fsize, par.context.uid, par.file_id))
90 par.content_length = fsize
91 cr.commit()
92 cr.close()
93@@ -228,17 +230,19 @@
94 out = psycopg2.Binary(data)
95 cr.execute("UPDATE ir_attachment " \
96 "SET db_datas = %s, file_size=%s, " \
97- " index_content= %s, file_type=%s " \
98+ " index_content= %s, file_type=%s, " \
99+ " write_date = now(), write_uid = %s " \
100 " WHERE id = %s",
101- (out, len(data), icont_u, mime, par.file_id))
102+ (out, len(data), icont_u, mime, par.context.uid, par.file_id))
103 elif self.mode == 'a':
104 data = self.getvalue()
105 out = psycopg2.Binary(data)
106 cr.execute("UPDATE ir_attachment " \
107 "SET db_datas = COALESCE(db_datas,'') || %s, " \
108- " file_size = COALESCE(file_size, 0) + %s " \
109+ " file_size = COALESCE(file_size, 0) + %s, " \
110+ " write_date = now(), write_uid = %s " \
111 " WHERE id = %s",
112- (out, len(data), par.file_id))
113+ (out, len(data), par.context.uid, par.file_id))
114 cr.commit()
115 except Exception:
116 logging.getLogger('document.storage').exception('Cannot update db file #%d for close:', par.file_id)
117@@ -306,18 +310,20 @@
118 icont_u = ''
119
120 cr.execute('UPDATE ir_attachment SET db_datas = %s::bytea, file_size=%s, ' \
121- 'index_content = %s, file_type = %s ' \
122+ 'index_content = %s, file_type = %s, ' \
123+ ' write_date = now(), write_uid = %s ' \
124 'WHERE id = %s',
125- (base64.encodestring(data), len(data), icont_u, mime, par.file_id))
126+ (base64.encodestring(data), len(data), icont_u, mime, par.context.uid, par.file_id))
127 elif self.mode == 'a':
128 data = self.getvalue()
129 # Yes, we're obviously using the wrong representation for storing our
130 # data as base64-in-bytea
131 cr.execute("UPDATE ir_attachment " \
132 "SET db_datas = encode( (COALESCE(decode(encode(db_datas,'escape'),'base64'),'') || decode(%s, 'base64')),'base64')::bytea , " \
133- " file_size = COALESCE(file_size, 0) + %s " \
134+ " file_size = COALESCE(file_size, 0) + %s, " \
135+ " write_date = now(), write_uid = %s " \
136 " WHERE id = %s",
137- (base64.encodestring(data), len(data), par.file_id))
138+ (base64.encodestring(data), len(data), par.context.uid, par.file_id))
139 cr.commit()
140 except Exception:
141 logging.getLogger('document.storage').exception('Cannot update db file #%d for close:', par.file_id)
142@@ -627,8 +633,8 @@
143 # a hack: /assume/ that the calling write operation will not try
144 # to write the fname and size, and update them in the db concurrently.
145 # We cannot use a write() here, because we are already in one.
146- cr.execute('UPDATE ir_attachment SET store_fname = %s, file_size = %s, index_content = %s, file_type = %s WHERE id = %s',
147- (store_fname, filesize, icont_u, mime, file_node.file_id))
148+ cr.execute('UPDATE ir_attachment SET store_fname = %s, file_size = %s, index_content = %s, file_type = %s, write_date = now(), write_uid = %s WHERE id = %s',
149+ (store_fname, filesize, icont_u, mime, uid, file_node.file_id))
150 file_node.content_length = filesize
151 file_node.content_type = mime
152 return True
153
154=== modified file 'document/nodes.py'
155--- document/nodes.py 2011-01-14 09:34:28 +0000
156+++ document/nodes.py 2011-04-04 12:41:11 +0000
157@@ -57,6 +57,17 @@
158 cre = cre[:fdot]
159 return time.mktime(time.strptime(cre,'%Y-%m-%d %H:%M:%S')) + frac
160
161+def filter_res_name(name):
162+ """ Filter forbidden chars from a resource name to node names
163+
164+ Resource names (eg. project.project.name) could contain any chars,
165+ some of them being illegal for filesystem representation.
166+ This is the central point to do this kind of filtering.
167+ Remember that the substitution char must be '_', because in SQL ILIKE
168+ it will match back the original (single) char.
169+ """
170+ return name.replace('/','_') # any other weird char?
171+
172 def get_node_context(cr, uid, context):
173 return node_context(cr, uid, context)
174
175@@ -866,7 +877,7 @@
176 # Yes! we can't do better but skip nameless records.
177
178 # Escape the name for characters not supported in filenames
179- res_name = res_name.replace('/','_') # any other weird char?
180+ res_name = filter_res_name(res_name)
181
182 if name and (res_name != name):
183 # we have matched _ to any character, but we only meant to match
184@@ -1043,7 +1054,7 @@
185 res_name = getattr(bo, namefield)
186 if not res_name:
187 continue
188- res_name = res_name.replace('/', '_')
189+ res_name = filter_res_name(res_name)
190 if name and (res_name != name):
191 continue
192 # TODO Revise
193@@ -1177,6 +1188,11 @@
194 self.write_date = fil.write_date or fil.create_date
195 self.content_length = fil.file_size
196 self.displayname = fil.name
197+ self.res_vector = None
198+ if fil.res_model:
199+ self.res_vector = (fil.res_model, fil.res_id)
200+ elif fil.partner_id:
201+ self.res_vector = ('res.partner', fil.partner_id.id)
202
203 self.uidperms = 14
204 if parent:
205@@ -1251,7 +1267,8 @@
206 dirpath = []
207 if fbro.parent_id:
208 dirobj = self.context._dirobj.pool.get('document.directory')
209- dirpath = dirobj.get_full_path(cr, uid, fbro.parent_id.id, context=self.context.context)
210+ dirpath = dirobj.get_full_path(cr, uid, fbro.parent_id.id,
211+ res_vector=self.res_vector, context=self.context.context)
212 if fbro.datas_fname:
213 dirpath.append(fbro.datas_fname)
214 else:
215@@ -1280,9 +1297,6 @@
216
217 def get_data_len(self, cr, fil_obj = None):
218 # TODO: verify with the storage object!
219- bin_size = self.context.context.get('bin_size', False)
220- if bin_size and not self.content_length:
221- self.content_length = fil_obj.db_datas
222 return self.content_length
223
224 def set_data(self, cr, data, fil_obj = None):
225
226=== added file 'document/test/document_test3.yml'
227--- document/test/document_test3.yml 1970-01-01 00:00:00 +0000
228+++ document/test/document_test3.yml 2011-04-04 12:41:11 +0000
229@@ -0,0 +1,115 @@
230+-
231+ I will now test the realstore functionality of DMS
232+-
233+ !assert {model: document.storage, id: storage_default }:
234+ - id != False
235+-
236+ I create a realstore folder, with some arbitrary storage path
237+-
238+ !python {model: document.storage}: |
239+ import tempfile
240+ tdir = tempfile.mkdtemp()
241+ print "I will be storing at %s" % tdir
242+ context['tests_doc_tmpdir'] = tdir
243+-
244+ !record {model: document.storage, id: test_realstore_id }:
245+ name: Realstore testing
246+ type: realstore
247+-
248+ !python {model: document.storage }: |
249+ id = ref('test_realstore_id')
250+ self.write(cr, uid, [id,], {'path': context['tests_doc_tmpdir']})
251+-
252+ I create a "Testing Realstore" folder where all the test data will go.
253+-
254+ !record {model: document.directory, id: dir_tests_realstore }:
255+ name: 'Testing Realstore'
256+ parent_id: dir_root
257+ storage_id: test_realstore_id
258+-
259+ I create an attachment into the realstore
260+-
261+ !record {model: ir.attachment, id: file_test_rs1 }:
262+ name: Test file.txt
263+ parent_id: dir_tests_realstore
264+-
265+ I delete the attachment from the root folder
266+-
267+ !python {model: ir.attachment}: |
268+ self.unlink(cr, uid, [ref('file_test_rs1')])
269+-
270+ I create a second attachment into the Testing folder.
271+-
272+ !record {model: ir.attachment, id: file_test_rs2 }:
273+ name: Test file 2
274+ parent_id: dir_tests_realstore
275+-
276+ I update the attachment with data, namely "abcd"
277+-
278+ !record {model: ir.attachment, id: file_test_rs2 }:
279+ datas: "YWJjZA==\n"
280+-
281+ I test that the datas of the attachment are correct
282+-
283+ !assert {model: ir.attachment, id: file_test_rs2 }:
284+ - datas == "YWJjZA==\n"
285+ - file_size == 4
286+ - file_type == 'text/plain'
287+-
288+ I open the real file and check the data
289+-
290+ !python {model: ir.attachment}: |
291+ import os
292+ rpath = os.path.join(context['tests_doc_tmpdir'], 'Documents', 'Testing Realstore', 'Test file 2')
293+ assert os.path.exists(rpath), "Cannot find %s!" % rpath
294+ print "Found path:", rpath
295+-
296+ I now check for Realstore & Dynamic folders
297+-
298+ I create a dynamic folder for companies
299+-
300+ !record {model: document.directory, id: test_dynfolder_1 }:
301+ name: Companies
302+ parent_id: dir_tests_realstore
303+ type: ressource
304+ ressource_type_id: base.model_res_company
305+ resource_find_all: False
306+ company_id: False
307+-
308+ I attach one document for the dynamic folder of companies
309+-
310+ !record {model: ir.attachment, id: file_test_rs3 }:
311+ name: Test file 3
312+ parent_id: test_dynfolder_1
313+ datas: "YWJjZA==\n"
314+ res_model: res.company
315+ res_id: !eval ref('base.main_company')
316+-
317+ I open the real dynamic file and check the data
318+-
319+ !python {model: ir.attachment}: |
320+ import os
321+ comp_obj = self.pool.get('res.company')
322+ comp_name = comp_obj.browse(cr, uid, ref('base.main_company')).name
323+ rpath = os.path.join(context['tests_doc_tmpdir'], 'Documents', 'Testing Realstore', \
324+ 'Companies', comp_name, 'Test file 3')
325+ assert os.path.exists(rpath), "Cannot find %s!" % rpath
326+ print "Found path:", rpath
327+-
328+ I delete the attachments
329+-
330+ !python {model: ir.attachment}: |
331+ self.unlink(cr, uid, [ref('file_test_rs2')])
332+ self.unlink(cr, uid, [ref('file_test_rs3')])
333+-
334+ I delete the tests folder
335+-
336+ !python {model: document.directory}: |
337+ self.unlink(cr, uid, [ref('dir_tests_realstore'), ref('test_dynfolder_1')])
338+ cr.commit()
339+-
340+ I delete the realstore
341+-
342+ !python {model: document.storage}: |
343+ self.unlink(cr, uid, [ref('test_realstore_id')])
344+ cr.commit()
345
346=== modified file 'document_ftp/ftpserver/abstracted_fs.py'
347--- document_ftp/ftpserver/abstracted_fs.py 2011-01-14 09:34:28 +0000
348+++ document_ftp/ftpserver/abstracted_fs.py 2011-04-04 12:41:11 +0000
349@@ -1,6 +1,5 @@
350 # -*- encoding: utf-8 -*-
351
352-import os
353 import time
354 from tarfile import filemode
355 import logging
356@@ -33,6 +32,42 @@
357
358 from ftpserver import _to_decode, _to_unicode
359
360+class ftp_path(object):
361+ """Util functions for ftp (Unix) paths, instead of os.path
362+
363+ os.path will behave differently according to platform. For FTP paths
364+ we always want the Unix behavior
365+ """
366+
367+ @staticmethod
368+ def join(*pathelems):
369+ return '/'.join(pathelems)
370+
371+ @staticmethod
372+ def isabs(path):
373+ return path.startswith('/')
374+
375+ @staticmethod
376+ def split(path):
377+ return path.rsplit('/',1)
378+
379+ @staticmethod
380+ def normpath(path):
381+ if '//' not in path and '..' not in path and './' not in path:
382+ return path
383+
384+ pathelems = path.split('/')
385+ res = []
386+ for p in pathelems:
387+ if len(res) and not p:
388+ continue
389+ if p == '.':
390+ continue
391+ if p == '..' and len(res):
392+ res.pop()
393+ continue
394+ res.append(p)
395+ return '/'.join(res)
396
397 class abstracted_fs(object):
398 """A class used to interact with the file system, providing a high
399@@ -92,15 +127,10 @@
400
401 Pathname returned is relative!.
402 """
403- p = os.path.normpath(ftppath)
404+ p = ftp_path.normpath(ftppath)
405 # normalize string in a standard web-path notation having '/'
406 # as separator. xrg: is that really in the spec?
407 p = p.replace("\\", "/")
408- # os.path.normpath supports UNC paths (e.g. "//a/b/c") but we
409- # don't need them. In case we get an UNC path we collapse
410- # redundant separators appearing at the beginning of the string
411- while p[:2] == '//':
412- p = p[1:]
413 if p == '.':
414 return ''
415 return p
416@@ -119,7 +149,7 @@
417 if node:
418 paths = node.full_path()
419 res = '/' + node.context.dbname + '/' + \
420- _to_decode(os.path.join(*paths))
421+ _to_decode(ftp_path.join(*paths))
422
423 return res
424
425@@ -188,6 +218,7 @@
426 raise NotImplementedError # TODO
427
428 text = not 'b' in mode
429+ node = None # for pyflakes
430 # for unique file , maintain version if duplicate file
431 if dir:
432 cr = dir.cr
433@@ -255,16 +286,18 @@
434 """
435 path = self.ftpnorm(line)
436 if self.cwd_node is None:
437- if not os.path.isabs(path):
438- path = os.path.join(self.root, path)
439+ if not path:
440+ path = self.root or '/'
441+ elif not ftp_path.isabs(path):
442+ path = ftp_path.join(self.root, path)
443
444 if path == '/' and mode in ('list', 'cwd'):
445 return (None, None, None )
446
447- path = _to_unicode(os.path.normpath(path)) # again, for '/db/../ss'
448+ path = _to_unicode(ftp_path.normpath(path)) # again, for '/db/../ss'
449 if path == '.': path = ''
450
451- if os.path.isabs(path) and self.cwd_node is not None \
452+ if ftp_path.isabs(path) and self.cwd_node is not None \
453 and path.startswith(self.cwd):
454 # make relative, so that cwd_node is used again
455 path = path[len(self.cwd):]
456@@ -273,18 +306,19 @@
457
458 p_parts = path.split('/') # hard-code the unix sep here, by spec.
459
460- assert '..' not in p_parts
461
462 rem_path = None
463 if mode in ('create',):
464 rem_path = p_parts[-1]
465 p_parts = p_parts[:-1]
466+ assert rem_path != '..' # certainly invalid
467
468- if os.path.isabs(path):
469+ if ftp_path.isabs(path):
470 # we have to start from root, again
471 while p_parts and p_parts[0] == '':
472 p_parts = p_parts[1:]
473 # self._log.debug("Path parts: %r ", p_parts)
474+ assert '..' not in p_parts
475 if not p_parts:
476 raise IOError(errno.EPERM, 'Cannot perform operation at root dir')
477 dbname = p_parts[0]
478@@ -311,10 +345,32 @@
479 if p_parts and p_parts[-1] == '':
480 p_parts = p_parts[:-1]
481 cr, uid = self.get_node_cr_uid(self.cwd_node)
482+ start_node = self.cwd_node
483+ while p_parts and p_parts[0] == '..':
484+ if start_node.parent:
485+ p_parts = p_parts[1:]
486+ if isinstance(start_node.path, (list, tuple)):
487+ # node.parent is NOT a direct parent!
488+ inm_path = list(start_node.path[:-1])
489+ while p_parts and inm_path and p_parts[0] == '..':
490+ inm_path = inm_path[:-1]
491+ p_parts = p_parts[1:]
492+ if inm_path:
493+ p_parts = inm_path + p_parts
494+ start_node = start_node.parent
495+ else:
496+ # node has no (known) parent
497+ if len(p_parts) > 1:
498+ raise IOError(errno.ENOENT, 'Path does not exist')
499+ elif mode in ('list', 'cwd'):
500+ return (None, None, None )
501+ else:
502+ raise IOError(errno.ENOENT, 'Invalid path for %s operation' % mode)
503+ assert '..' not in p_parts
504 if p_parts:
505- node = self.cwd_node.get_uri(cr, p_parts)
506+ node = start_node.get_uri(cr, p_parts)
507 else:
508- node = self.cwd_node
509+ node = start_node
510 if node is False and mode not in ('???'):
511 cr.close()
512 raise IOError(errno.ENOENT, 'Path does not exist')
513@@ -506,7 +562,7 @@
514 if not glob.has_magic(ftppath):
515 return self.get_list_dir(self.ftp2fs(rawline, datacr))
516 else:
517- basedir, basename = os.path.split(ftppath)
518+ basedir, basename = ftp_path.split(ftppath)
519 if glob.has_magic(basedir):
520 return iter(['Directory recursion not supported.\r\n'])
521 else:
522
523=== modified file 'document_ftp/test/document_ftp_test2.yml'
524--- document_ftp/test/document_ftp_test2.yml 2011-01-14 00:11:01 +0000
525+++ document_ftp/test/document_ftp_test2.yml 2011-04-04 12:41:11 +0000
526@@ -222,6 +222,26 @@
527 ftp.close()
528 # TODO move
529 -
530+ I check the functionality of cd ".." command
531+-
532+ !python {model: ir.attachment}: |
533+ from document_ftp import test_easyftp as te
534+ ftp = te.get_ftp_folder(cr, uid, self, 'Documents/Test-Folder2')
535+ pwd = ftp.pwd().rsplit('/',1)[-1]
536+ assert pwd == 'Test-Folder2', pwd
537+ try:
538+ ftp.cwd('../Test-Folder3')
539+ except Exception, e:
540+ raise AssertionError("FTP error: " + str(e))
541+ pwd = ftp.pwd().rsplit('/',1)[-1]
542+ assert pwd == 'Test-Folder3', pwd
543+ try:
544+ ftp.cwd('..')
545+ except Exception, e:
546+ raise AssertionError("FTP error: " + str(e))
547+ pwd = ftp.pwd().rsplit('/',1)[-1]
548+ assert pwd == 'Documents', pwd
549+-
550 I remove the 'Test-Folder3'
551 -
552 !python {model: ir.attachment}: |