← Back to team overview

openerp-dev-web team mailing list archive

[Merge] lp:~xrg/openobject-addons/trunk-patch15 into lp:~openerp-dev/openobject-addons/trunk-dev-addons1

 

tfr (Openerp) has proposed merging lp:~xrg/openobject-addons/trunk-patch15 into lp:~openerp-dev/openobject-addons/trunk-dev-addons1.

Requested reviews:
  OpenERP R&D Team (openerp-dev)

-- 
https://code.launchpad.net/~xrg/openobject-addons/trunk-patch15/+merge/44226
Your team OpenERP R&D Team is requested to review the proposed merge of lp:~xrg/openobject-addons/trunk-patch15 into lp:~openerp-dev/openobject-addons/trunk-dev-addons1.
=== modified file 'caldav/calendar.py'
--- caldav/calendar.py	2010-12-13 06:43:09 +0000
+++ caldav/calendar.py	2010-12-20 13:10:53 +0000
@@ -579,8 +579,8 @@
                                     string="Type", size=64),
             'line_ids': fields.one2many('basic.calendar.lines', 'calendar_id', 'Calendar Lines'),
             'create_date': fields.datetime('Created Date', readonly=True),
-            'write_date': fields.datetime('Modifided Date', readonly=True),
-            'description': fields.text("description"),
+            'write_date': fields.datetime('Write Date', readonly=True),
+            'description': fields.text("Description"),
             'calendar_color': fields.char('Color', size=20, help="For supporting clients, the color of the calendar entries"),
             'calendar_order': fields.integer('Order', help="For supporting clients, the order of this folder among the calendars"),
             'has_webcal': fields.boolean('WebCal', required=True, help="Also export a <name>.ics entry next to the calendar folder, with WebCal content."),
@@ -818,6 +818,7 @@
 
     _name = 'basic.calendar.fields'
     _description = 'Calendar fields'
+    _order = 'name'
 
     _columns = {
         'field_id': fields.many2one('ir.model.fields', 'OpenObject Field'),
@@ -833,7 +834,7 @@
     }
 
     _defaults = {
-        'fn': lambda *a: 'field',
+        'fn': 'field',
     }
 
     _sql_constraints = [
@@ -1164,11 +1165,16 @@
         self.__attribute__ = get_attribute_mapping(cr, uid, self._calname, ctx)
         for child in ical_data.getChildren():
             if child.name.lower() == 'trigger':
-                seconds = child.value.seconds
-                days = child.value.days
-                diff = (days * 86400) +  seconds
-                interval = 'days'
-                related = 'before'
+                if isinstance(child.value, timedelta):
+                    seconds = child.value.seconds
+                    days = child.value.days
+                    diff = (days * 86400) +  seconds
+                    interval = 'days'
+                    related = 'before'
+                elif isinstance(child.value, datetime):
+                    # TODO
+                    # remember, spec says this datetime is in UTC
+                    raise NotImplementedError("we cannot parse absolute triggers")
                 if not seconds:
                     duration = abs(days)
                     related = days > 0 and 'after' or 'before'

=== modified file 'caldav/wizard/calendar_event_import.py'
--- caldav/wizard/calendar_event_import.py	2010-12-13 06:43:09 +0000
+++ caldav/wizard/calendar_event_import.py	2010-12-20 13:10:53 +0000
@@ -82,8 +82,11 @@
                   'msg': fields.text('', readonly=True),
                }
 
+    def _get_msg(self, cr, uid, context):
+        return _('Import Sucessful')
+        
     _defaults = {
-               'msg':lambda *a:'Import Sucessful'
+               'msg': _get_msg,
                }
 
 calendar_event_import()

=== modified file 'document/content_index.py'
--- document/content_index.py	2010-11-11 06:47:07 +0000
+++ document/content_index.py	2010-12-20 13:10:53 +0000
@@ -165,11 +165,9 @@
                     os.write(fd, content)
                     os.close(fd)
             
-                fp = Popen(['file','-b','--mime',fname], shell=False, stdout=PIPE).stdout
-                try:
-                    result = fp.read()
-                finally:    
-                    fp.close()
+                pop = Popen(['file','-b','--mime',fname], shell=False, stdout=PIPE)
+                (result, _) = pop.communicate()
+                
                 mime2 = result.split(';')[0]
                 self.__logger.debug('File gave us: %s', mime2)
                 # Note that the temporary file still exists now.

=== modified file 'document/document_view.xml'
--- document/document_view.xml	2010-11-26 16:13:59 +0000
+++ document/document_view.xml	2010-12-20 13:10:53 +0000
@@ -82,7 +82,6 @@
                     <group colspan="4" col="4" attrs="{'invisible': [('type','!=','ressource')]}">
                         <field name="ressource_type_id"  on_change="onchange_content_id(ressource_type_id)"
                             attrs="{'required': [('type','=','ressource')] }"/>
-                        <field name="resource_find_all" groups="base.group_extended" />
                         <newline/>
                         <field name="resource_field"     domain="[('model_id','=',ressource_type_id), ('ttype', 'in', ('char', 'selection', 'date', 'datetime'))]"/>
                         <field name="ressource_tree"/>
@@ -93,6 +92,9 @@
                         <field name="ressource_parent_type_id"/>
                         <field name="ressource_id" select="2" readonly="1"/>
                     </group>
+                    <group colspan="4" col="2" attrs="{'invisible': [('type','!=','ressource'),('resource_parent_type_id','=',False)]}">
+                        <field name="resource_find_all" groups="base.group_extended" />
+                    </group>
 
                 </page>
                 <page string="Generated Files" groups="base.group_extended">

=== modified file 'document/nodes.py'
--- document/nodes.py	2010-11-12 11:49:46 +0000
+++ document/nodes.py	2010-12-20 13:10:53 +0000
@@ -837,7 +837,9 @@
             where.append(('id','=',self.resm_id))
 
         if name:
-            where.append((self.namefield,'=',name))
+            # The =like character will match underscores against any characters
+            # including the special ones that couldn't exist in a FTP/DAV request
+            where.append((self.namefield,'=like',name.replace('\\','\\\\')))
             is_allowed = self.check_perms(1)
         else:
             is_allowed = self.check_perms(5)
@@ -858,12 +860,22 @@
         for bo in obj.browse(cr, uid, resids, context=ctx):
             if not bo:
                 continue
-            name = getattr(bo, self.namefield)
-            if not name:
+            res_name = getattr(bo, self.namefield)
+            if not res_name:
                 continue
                 # Yes! we can't do better but skip nameless records.
+            
+            # Escape the name for characters not supported in filenames
+            res_name = res_name.replace('/','_') # any other weird char?
+            
+            if name and (res_name != name):
+                # we have matched _ to any character, but we only meant to match
+                # the special ones.
+                # Eg. 'a_c' will find 'abc', 'a/c', 'a_c', may only
+                # return 'a/c' and 'a_c'
+                continue
 
-            res.append(self.res_obj_class(name, self.dir_id, self, self.context, self.res_model, bo))
+            res.append(self.res_obj_class(res_name, self.dir_id, self, self.context, self.res_model, bo))
         return res
 
     def _get_ttag(self,cr):
@@ -896,7 +908,10 @@
         self.domain = parent.domain
         self.displayname = path
         self.dctx_dict = parent.dctx_dict
-        self.res_find_all = parent.res_find_all
+        if isinstance(parent, node_res_dir):
+            self.res_find_all = parent.res_find_all
+        else:
+            self.res_find_all = False
         if res_bo:
             self.res_id = res_bo.id
             dc2 = self.context.context.copy()
@@ -1016,8 +1031,10 @@
         # Directory Structure display in tree structure
         if self.res_id and directory.ressource_tree:
             where1 = []
+            if name:
+                where1.append(('name','=like',name.replace('\\','\\\\')))
             if obj._parent_name in obj.fields_get(cr, uid):
-                where1 = where + [(obj._parent_name, '=', self.res_id)]
+                where1.append((obj._parent_name, '=', self.res_id))
             namefield = directory.resource_field.name or 'name'
             resids = obj.search(cr, uid, where1, context=ctx)
             for bo in obj.browse(cr, uid, resids, context=ctx):
@@ -1026,27 +1043,37 @@
                 res_name = getattr(bo, namefield)
                 if not res_name:
                     continue
+                res_name = res_name.replace('/', '_')
+                if name and (res_name != name):
+                    continue
                 # TODO Revise
                 klass = directory.get_node_class(directory, dynamic=True, context=ctx)
-                res.append(klass(res_name, dir_id=self.dir_id, parent=self, context=self.context, res_model=self.res_model, res_bo=bo))
+                rnode = klass(res_name, dir_id=self.dir_id, parent=self, context=self.context,
+                                res_model=self.res_model, res_bo=bo)
+                rnode.res_find_all = self.res_find_all
+                res.append(rnode)
 
 
         where2 = where + [('parent_id','=',self.dir_id) ]
         ids = dirobj.search(cr, uid, where2, context=ctx)
+        bo = obj.browse(cr, uid, self.res_id, context=ctx)
+        
         for dirr in dirobj.browse(cr, uid, ids, context=ctx):
+            if name and (name != dirr.name):
+                continue
             if dirr.type == 'directory':
                 klass = dirr.get_node_class(dirr, dynamic=True, context=ctx)
-                res.append(klass(dirr.name, dirr.id, self, self.context, self.res_model, res_bo = None, res_id = self.res_id))
+                res.append(klass(dirr.name, dirr.id, self, self.context, self.res_model, res_bo = bo, res_id = self.res_id))
             elif dirr.type == 'ressource':
                 # child resources can be controlled by properly set dctx
                 klass = dirr.get_node_class(dirr, context=ctx)
-                res.append(klass(dirr.name,self,self.context, dirr, {'active_id': self.res_id}))
+                res.append(klass(dirr.name,self,self.context, dirr, {'active_id': self.res_id})) # bo?
 
         fil_obj = dirobj.pool.get('ir.attachment')
         if self.res_find_all:
             where2 = where
-        where3 = where2  + [('res_model', '=', self.res_model), ('res_id','=',self.res_id)]
-        # print "where clause for dir_obj", where2
+        where3 = where2 + [('res_model', '=', self.res_model), ('res_id','=',self.res_id)]
+        # print "where clause for dir_obj", where3
         ids = fil_obj.search(cr, uid, where3, context=ctx)
         if ids:
             for fil in fil_obj.browse(cr, uid, ids, context=ctx):
@@ -1057,17 +1084,19 @@
         # Get Child Ressource Directories
         if directory.ressource_type_id and directory.ressource_type_id.id:
             where4 = where + [('ressource_parent_type_id','=',directory.ressource_type_id.id)]
-            where5 = where4 + [('ressource_id','=',0)]
+            where5 = where4 + ['|', ('ressource_id','=',0), ('ressource_id','=',self.res_id)]
             dirids = dirobj.search(cr,uid, where5)
-            where5 = where4 + [('ressource_id','=',self.res_id)]
-            dirids = dirids + dirobj.search(cr,uid, where5)
             for dirr in dirobj.browse(cr, uid, dirids, context=ctx):
                 if dirr.type == 'directory' and not dirr.parent_id:
                     klass = dirr.get_node_class(dirr, dynamic=True, context=ctx)
-                    res.append(klass(dirr.name, dirr.id, self, self.context, self.res_model, res_bo = None, res_id = self.res_id))
+                    rnode = klass(dirr.name, dirr.id, self, self.context, self.res_model, res_bo = bo, res_id = self.res_id)
+                    rnode.res_find_all = dirr.resource_find_all
+                    res.append(rnode)
                 if dirr.type == 'ressource':
                     klass = dirr.get_node_class(dirr, context=ctx)
-                    res.append(klass(dirr.name, self, self.context, dirr, {'active_id': self.res_id}))
+                    rnode = klass(dirr.name, self, self.context, dirr, {'active_id': self.res_id})
+                    rnode.res_find_all = dirr.resource_find_all
+                    res.append(rnode)
         return res
 
     def create_child_collection(self, cr, objname):
@@ -1092,7 +1121,8 @@
                 'name': objname,
                 'ressource_parent_type_id': obj and obj.ressource_type_id.id or False,
                 'ressource_id': object2 and object2.id or False,
-                'parent_id' : False
+                'parent_id' : False,
+                'resource_find_all': False,
         }
         if (obj and (obj.type in ('directory'))) or not object2:
             val['parent_id'] =  obj and obj.id or False

=== modified file 'document/test_cindex.py'
--- document/test_cindex.py	2010-08-10 12:29:57 +0000
+++ document/test_cindex.py	2010-12-20 13:10:53 +0000
@@ -3,9 +3,13 @@
 import sys
 import os
 import glob
+import time
+import logging
 
 from optparse import OptionParser
 
+logging.basicConfig(level=logging.DEBUG)
+
 parser = OptionParser()
 parser.add_option("-q", "--quiet",
                   action="store_false", dest="verbose", default=True,
@@ -15,6 +19,10 @@
                   action="store_true", dest="docontent", default=False,
                   help="Disect content, rather than the file.")
 
+parser.add_option("--delay",
+                  action="store_true", dest="delay", default=False,
+                  help="delay after the operation, to inspect child processes")
+
 (options, args) = parser.parse_args()
 
 import content_index, std_index
@@ -34,9 +42,12 @@
         if options.verbose:
             for line in res[:5]:
                 print line
+        if options.delay:
+            time.sleep(30)
     except Exception,e:
         import traceback
         tb_s = reduce(lambda x, y: x+y, traceback.format_exception( sys.exc_type, sys.exc_value, sys.exc_traceback))
-        
+    except KeyboardInterrupt:
+        print "Keyboard interrupt"
 
 #eof

=== modified file 'document_webdav/dav_fs.py'
--- document_webdav/dav_fs.py	2010-11-03 11:26:04 +0000
+++ document_webdav/dav_fs.py	2010-12-20 13:10:53 +0000
@@ -35,6 +35,9 @@
 from DAV.davcmd import copyone, copytree, moveone, movetree, delone, deltree
 from cache import memoize
 from tools import misc
+
+from webdav import mk_lock_response
+
 try:
     from tools.dict_tools import dict_merge2
 except ImportError:
@@ -209,18 +212,20 @@
             self.parent.log_error("Cannot %s: %s", opname, err.strerror)
             self.parent.log_message("Exc: %s",traceback.format_exc())
             raise default_exc(err.strerror)
-        except Exception,e:
+        except Exception, e:
             import traceback
             if cr: cr.close()
             self.parent.log_error("Cannot %s: %s", opname, str(e))
             self.parent.log_message("Exc: %s",traceback.format_exc())
             raise default_exc("Operation failed")
 
-    #def _get_dav_lockdiscovery(self, uri):
-    #    raise DAV_NotFound
+    def _get_dav_lockdiscovery(self, uri):
+        """ We raise that so that the node API is used """
+        raise DAV_NotFound
 
-    #def A_get_dav_supportedlock(self, uri):
-    #    raise DAV_NotFound
+    def _get_dav_supportedlock(self, uri):
+        """ We raise that so that the node API is used """
+        raise DAV_NotFound
 
     def match_prop(self, uri, match, ns, propname):
         if self.M_NS.has_key(ns):
@@ -346,7 +351,7 @@
         """ Return the base URI of this request, or even join it with the
             ajoin path elements
         """
-        return self.baseuri+ '/'.join(ajoin)
+        return self.parent.get_baseuri(self) + '/'.join(ajoin)
 
     @memoize(4)
     def db_list(self):
@@ -911,6 +916,91 @@
         cr.close()
         return result
 
+    def unlock(self, uri, token):
+        """ Unlock a resource from that token 
+        
+        @return True if unlocked, False if no lock existed, Exceptions
+        """
+        cr, uid, pool, dbname, uri2 = self.get_cr(uri)
+        if not dbname:
+            if cr: cr.close()
+            raise DAV_Error, 409
+
+        node = self.uri2object(cr, uid, pool, uri2)
+        try:
+            node_fn = node.dav_unlock
+        except AttributeError:
+            # perhaps the node doesn't support locks
+            cr.close()
+            raise DAV_Error(400, 'No locks for this resource')
+
+        res = self._try_function(node_fn, (cr, token), "unlock %s" % uri, cr=cr)
+        cr.commit()
+        cr.close()
+        return res
+
+    def lock(self, uri, lock_data):
+        """ Lock (may create) resource.
+            Data is a dict, may contain:
+                depth, token, refresh, lockscope, locktype, owner
+        """
+        cr, uid, pool, dbname, uri2 = self.get_cr(uri)
+        created = False
+        if not dbname:
+            if cr: cr.close()
+            raise DAV_Error, 409
+
+        try:
+            node = self.uri2object(cr, uid, pool, uri2[:])
+        except Exception:
+            node = False
+        
+        objname = misc.ustr(uri2[-1])
+        
+        if not node:
+            dir_node = self.uri2object(cr, uid, pool, uri2[:-1])
+            if not dir_node:
+                cr.close()
+                raise DAV_NotFound('Parent folder not found')
+
+            # We create a new node (file) but with empty data=None,
+            # as in RFC4918 p. 9.10.4
+            node = self._try_function(dir_node.create_child, (cr, objname, None),
+                    "create %s" % objname, cr=cr)
+            if not node:
+                cr.commit()
+                cr.close()
+                raise DAV_Error(400, "Failed to create resource")
+            
+            created = True
+
+        try:
+            node_fn = node.dav_lock
+        except AttributeError:
+            # perhaps the node doesn't support locks
+            cr.close()
+            raise DAV_Error(400, 'No locks for this resource')
+
+        # Obtain the lock on the node
+        lres, pid, token = self._try_function(node_fn, (cr, lock_data), "lock %s" % objname, cr=cr)
+
+        if not lres:
+            cr.commit()
+            cr.close()
+            raise DAV_Error(423, "Resource already locked")
+        
+        assert isinstance(lres, list), 'lres: %s' % repr(lres)
+        
+        try:
+            data = mk_lock_response(self, uri, lres)
+            cr.commit()
+        except Exception:
+            cr.close()
+            raise
+
+        cr.close()
+        return created, data, token
+
     @memoize(CACHE_SIZE)
     def is_collection(self, uri):
         """ test if the given uri is a collection """

=== modified file 'document_webdav/document_webdav.py'
--- document_webdav/document_webdav.py	2010-11-23 07:05:05 +0000
+++ document_webdav/document_webdav.py	2010-12-20 13:10:53 +0000
@@ -80,6 +80,10 @@
     _name = 'document.webdav.dir.property'
     
     _columns = {
+        'create_date': fields.datetime('Date Created', readonly=True),
+        'create_uid':  fields.many2one('res.users', 'Creator', readonly=True),
+        'write_date': fields.datetime('Date Modified', readonly=True),
+        'write_uid':  fields.many2one('res.users', 'Last Modification User', readonly=True),
         'dir_id': fields.many2one('document.directory', 'Directory', required=False, select=1),
         'namespace': fields.char('Namespace', size=127, required=True),
         'name': fields.char('Name', size=64, required=True),
@@ -93,4 +97,34 @@
         
 dav_dir_property()
 
+class dav_file_property(osv.osv):
+    """ Arbitrary WebDAV properties, attached to ir.attachments.
+    
+    A special case is the locks that can be applied on file nodes.
+    
+    There _can_ be properties without a file (RFC?), which means that they
+    globally apply to all the attachments of the present database.
+    
+    TODO access permissions, per property.
+    """
+    _name = 'document.webdav.file.property'
+    
+    _columns = {
+        'create_date': fields.datetime('Date Created', readonly=True),
+        'create_uid':  fields.many2one('res.users', 'Creator', readonly=True),
+        'write_date': fields.datetime('Date Modified', readonly=True),
+        'write_uid':  fields.many2one('res.users', 'Last Modification User', readonly=True),
+        'file_id': fields.many2one('ir.attachment', 'Document', required=False, select=1),
+        'namespace': fields.char('Namespace', size=127, required=True),
+        'name': fields.char('Name', size=64, required=True),
+        'value': fields.text('Value'),
+        'do_subst': fields.boolean('Substitute', required=True),
+        }
+        
+    _defaults = {
+        'do_subst': False,
+        }
+        
+dav_file_property()
+
 #eof
\ No newline at end of file

=== modified file 'document_webdav/nodes.py'
--- document_webdav/nodes.py	2010-10-12 11:27:33 +0000
+++ document_webdav/nodes.py	2010-12-20 13:10:53 +0000
@@ -22,14 +22,14 @@
 
 from document import nodes
 from tools.safe_eval import safe_eval as eval
+import time
+import urllib
+import uuid
 try:
     from tools.dict_tools import dict_filter
 except ImportError:
     from document.dict_tools import dict_filter
 
-import urllib
-
-    
 class node_acl_mixin(object):
     def _get_dav_owner(self, cr):
         return self.uuser
@@ -116,6 +116,153 @@
                 return val
         return None
 
+    def _dav_lock_hlpr(self, cr, lock_data, par_class, prop_model,
+                            prop_ref_field, res_id):
+        """ Helper, which uses the dav properties table for placing locks
+        
+        @param lock_data a dictionary of input to this function.
+        @return list of tuples, DAV:activelock _contents_ structure.
+                See webdav.py:class Prop2Xml() for semantics
+        
+        Note: although the DAV response shall be an <activelock/>, this
+        function will only return the elements inside the activelock,
+        because the calling function needs to append the <lockroot/> in
+        it. See webdav.py:mk_lock_response()
+        
+        In order to reuse code, this function can be called with 
+        lock_data['unlock_mode']=True, in order to unlock.
+        
+        @return bool in unlock mode, (davstruct, prop_id, token) in lock/refresh,
+                    or (False, prop_id, token) if already locked,
+                    or (False, False, False) if lock not found to refresh
+        """
+        assert prop_model
+        assert res_id
+        assert isinstance(lock_data, dict), '%r' % lock_data
+        propobj = self.context._dirobj.pool.get(prop_model)
+        uid = self.context.uid
+        ctx = self.context.context.copy()
+        ctx.update(self.dctx)
+        ctx.update({'uid': uid, 'dbname': self.context.dbname })
+        ctx['node_classname'] = "%s.%s" % (self.__class__.__module__, self.__class__.__name__)
+        dict_filter(self.context.extra_ctx, ['username', 'groupname', 'webdav_path'], ctx)
+        sdomain = [(prop_ref_field, '=', res_id), ('namespace', '=', 'DAV:'),
+                    ('name','=', 'lockdiscovery')]
+        props_to_delete = []
+        lock_found = False
+        lock_val = None
+        tmout2 = int(lock_data.get('timeout', 3*3600))
+        
+        prop_ids = propobj.search(cr, uid, sdomain, context=ctx)
+        if prop_ids:
+            for pbro in propobj.browse(cr, uid, prop_ids, context=ctx):
+                val = pbro.value
+                if pbro.do_subst:
+                    if val.startswith("('") and val.endswith(")"):
+                        glbls = { 'urlquote': urllib.quote, }
+                        val = eval(val, glbls, ctx)
+                    else:
+                        # all locks should be at "subst" format
+                        continue
+                if not (val and isinstance(val, tuple) 
+                        and val[0:2] == ( 'activelock','DAV:')):
+                    # print "Value is not activelock:", val
+                    continue
+                
+                old_token = False
+                old_owner = False
+                try:
+                    # discover the timeout. If anything goes wrong, delete
+                    # the lock (cleanup)
+                    tmout = False
+                    for parm in val[2]:
+                        if parm[1] != 'DAV:':
+                            continue
+                        if parm[0] == 'timeout':
+                            if isinstance(parm[2], basestring) \
+                                    and parm[2].startswith('Second-'):
+                                tmout = int(parm[2][7:])
+                        elif parm[0] == 'locktoken':
+                            if isinstance(parm[2], basestring):
+                                old_token = parm[2]
+                            elif isinstance(parm[2], tuple) and \
+                                parm[2][0:2] == ('href','DAV:'):
+                                    old_token = parm[2][2]
+                            else:
+                                # print "Mangled token in DAV property: %r" % parm[2]
+                                props_to_delete.append(pbro.id)
+                                continue
+                        elif parm[0] == 'owner':
+                            old_owner = parm[2] # not used yet
+                    if tmout:
+                        mdate = pbro.write_date or pbro.create_date
+                        mdate = time.mktime(time.strptime(mdate,'%Y-%m-%d %H:%M:%S'))
+                        if mdate + tmout < time.time():
+                            props_to_delete.append(pbro.id)
+                            continue
+                    else:
+                        props_to_delete.append(pbro.id)
+                        continue
+                except ValueError:
+                    props_to_delete.append(pbro.id)
+                    continue
+                
+                # A valid lock is found here
+                if lock_data.get('refresh', False):
+                    if old_token != lock_data.get('token'):
+                        continue
+                    # refresh mode. Just touch anything and the ORM will update
+                    # the write uid+date, won't it?
+                    # Note: we don't update the owner, because incoming refresh
+                    # wouldn't have a body, anyway.
+                    propobj.write(cr, uid, [pbro.id,], { 'name': 'lockdiscovery'})
+                elif lock_data.get('unlock_mode', False):
+                    if old_token != lock_data.get('token'):
+                        continue
+                    props_to_delete.append(pbro.id)
+                
+                lock_found = pbro.id
+                lock_val = val
+
+        if tmout2 > 3*3600: # 3 hours maximum
+            tmout2 = 3*3600
+        elif tmout2 < 300:
+            # 5 minutes minimum, but an unlock request can always
+            # break it at any time. Ensures no negative values, either.
+            tmout2 = 300
+        
+        if props_to_delete:
+            # explicitly delete, as admin, any of the ids we have identified.
+            propobj.unlink(cr, 1, props_to_delete)
+        
+        if lock_data.get('unlock_mode', False):
+            return lock_found and True
+        elif (not lock_found) and not (lock_data.get('refresh', False)):
+            # Create a new lock, attach and return it.
+            new_token = uuid.uuid4().urn
+            lock_val = ('activelock', 'DAV:', 
+                    [ ('locktype', 'DAV:', (lock_data.get('locktype',False) or 'write','DAV:')),
+                      ('lockscope', 'DAV:', (lock_data.get('lockscope',False) or 'exclusive','DAV:')),
+                      # ? ('depth', 'DAV:', lock_data.get('depth','0') ),
+                      ('timeout','DAV:', 'Second-%d' % tmout2),
+                      ('locktoken', 'DAV:', ('href', 'DAV:', new_token)),
+                      # ('lockroot', 'DAV: ..., we don't store that, appended by caller
+                    ])
+            new_owner = lock_data.get('lockowner',False) or ctx.get('username', False)
+            if new_owner:
+                lock_val[2].append( ('owner', 'DAV:',  new_owner) )
+            prop_id = propobj.create(cr, uid, { prop_ref_field: res_id,
+                    'namespace': 'DAV:', 'name': 'lockdiscovery',
+                    'do_subst': True, 'value': repr(lock_val) })
+            return (lock_val[2], prop_id, new_token )
+        elif not lock_found: # and refresh
+            return (False, False, False)
+        elif lock_found and not lock_data.get('refresh', False):
+            # already locked
+            return (False, lock_found, old_token)
+        else:
+            return (lock_val[2], lock_found, old_token )
+
 class node_dir(node_acl_mixin, nodes.node_dir):
     """ override node_dir and add DAV functionality
     """
@@ -141,7 +288,8 @@
 class node_file(node_acl_mixin, nodes.node_file):
     DAV_PROPS = { "DAV:": ('owner', 'group', 
                             'supported-privilege-set', 
-                            'current-user-privilege-set'), 
+                            'current-user-privilege-set',
+                            ), 
                 }
     DAV_M_NS = { "DAV:" : '_get_dav',}
     http_options = { 'DAV': ['access-control', ] }
@@ -152,10 +300,45 @@
 
     def get_dav_props(self, cr):
         return self._get_dav_props_hlpr(cr, nodes.node_dir, 
-                None, 'file_id', self.file_id)
-                #'document.webdav.dir.property', 'dir_id', self.dir_id)
-
-    #def get_dav_eprop(self, cr, ns, prop):
+                'document.webdav.file.property', 'file_id', self.file_id)
+
+    def dav_lock(self, cr, lock_data):
+        """ Locks or unlocks the node, using DAV semantics.
+        
+        Unlocking will be done when lock_data['unlock_mode'] == True
+        
+        See _dav_lock_hlpr() for calling details.
+        
+        It is fundamentally OK to use this function from non-DAV endpoints,
+        but they will all have to emulate the tuple-in-list structure of
+        the DAV lock data. RFC if this translation should be done inside
+        the _dav_lock_hlpr (to ease other protocols).
+        """
+        return self._dav_lock_hlpr(cr, lock_data, nodes.node_file, 
+                'document.webdav.file.property', 'file_id', self.file_id)
+
+    def dav_unlock(self, cr, token):
+        """Releases the token lock held for the node
+        
+        This is a utility complement of dav_lock()
+        """
+        lock_data = { 'token': token, 'unlock_mode': True }
+        return self._dav_lock_hlpr(cr, lock_data, nodes.node_file, 
+                'document.webdav.file.property', 'file_id', self.file_id)
+
+    def get_dav_eprop(self, cr, ns, prop):
+        if ns == 'DAV:' and prop == 'supportedlock':
+            return [ ('lockentry', 'DAV:', 
+                        [ ('lockscope','DAV:', ('shared', 'DAV:')),
+                          ('locktype','DAV:', ('write', 'DAV:')),
+                        ]),
+                   ('lockentry', 'DAV:', 
+                        [ ('lockscope','DAV:', ('exclusive', 'DAV:')),
+                          ('locktype','DAV:', ('write', 'DAV:')),
+                        ] )
+                   ]
+        return self._get_dav_eprop_hlpr(cr, ns, prop, nodes.node_file,
+                'document.webdav.file.property', 'file_id', self.file_id)
 
 class node_database(nodes.node_database):
     def get_dav_resourcetype(self, cr):

=== modified file 'document_webdav/security/ir.model.access.csv'
--- document_webdav/security/ir.model.access.csv	2010-10-12 10:38:59 +0000
+++ document_webdav/security/ir.model.access.csv	2010-12-20 13:10:53 +0000
@@ -2,3 +2,4 @@
 "access_webdav_dir_property_all","webdav.dir.property all","model_document_webdav_dir_property",,1,0,0,0
 "access_webdav_dir_property_group_doc_manager","webdav.dir.property document manager","model_document_webdav_dir_property","base.group_system",1,1,1,1
 "access_webdav_dir_property_group_system","webdav.dir.property group system","model_document_webdav_dir_property","base.group_system",1,1,1,1
+"access_webdav_file_property_all","webdav.file.property all","model_document_webdav_file_property",,1,1,1,1

=== modified file 'document_webdav/test/webdav_test1.yml'
--- document_webdav/test/webdav_test1.yml	2010-11-03 11:25:35 +0000
+++ document_webdav/test/webdav_test1.yml	2010-12-20 13:10:53 +0000
@@ -1,12 +1,17 @@
 -
-    In order to test the document_ftp functionality
+    In order to test the document_webdav functionality
 -
     I open the HTTP port and perform an OPTIONS request to the server
 -
     !python {model: ir.attachment}: |
         from document_webdav import test_davclient as te
         reload(te) # reload..
-        dc = te.DAVClient()
+        dc = te.DAVClient(timeout=2.0)
+        # have a small timeout, enough for any heavily-loaded test server to
+        # respond, but small so that this test won't block further loading.
+        # Don't catch the exception, so that the whole YAML test will abort
+        # if the WebDAV service is not available (eg. during an upgrade from
+        # command line).
         dc.gd_options()
         dc.get_creds(self, cr, uid)
         dc.gd_options(path=cr.dbname, expect={'DAV': ['1',]})

=== modified file 'document_webdav/test_davclient.py'
--- document_webdav/test_davclient.py	2010-11-12 11:49:46 +0000
+++ document_webdav/test_davclient.py	2010-12-20 13:10:53 +0000
@@ -324,7 +324,7 @@
     """An instance of a WebDAV client, connected to the OpenERP server
     """
     
-    def __init__(self, user=None, passwd=None, dbg=0, use_ssl=False, useragent=False):
+    def __init__(self, user=None, passwd=None, dbg=0, use_ssl=False, useragent=False, timeout=None):
         if use_ssl:
             self.host = config.get_misc('httpsd', 'interface', False)
             self.port = config.get_misc('httpsd', 'port', 8071)
@@ -346,6 +346,7 @@
         self.user = user
         self.passwd = passwd
         self.dbg = dbg
+        self.timeout = timeout or 5.0 # seconds, tests need to respond pretty fast!
         self.hdrs = {}
         if useragent:
             self.set_useragent(useragent)
@@ -386,7 +387,7 @@
         dbg = self.dbg
         hdrs.update(self.hdrs)
         log.debug("Getting %s http://%s:%d/%s";, method, self.host, self.port, path)
-        conn = httplib.HTTPConnection(self.host, port=self.port)
+        conn = httplib.HTTPConnection(self.host, port=self.port, timeout=self.timeout)
         conn.set_debuglevel(dbg)
         if not path:
             path = "/index.html"

=== modified file 'document_webdav/webdav.py'
--- document_webdav/webdav.py	2010-10-28 05:10:32 +0000
+++ document_webdav/webdav.py	2010-12-20 13:10:53 +0000
@@ -268,6 +268,39 @@
 PROPFIND.mk_prop_response = mk_prop_response
 PROPFIND.mk_propname_response = mk_propname_response
 
+def mk_lock_response(self, uri, props):
+    """ Prepare the data response to a DAV LOCK command
+    
+    This function is here, merely to be in the same file as the
+    ones above, that have similar code.
+    """
+    doc = domimpl.createDocument('DAV:', "D:prop", None)
+    ms = doc.documentElement
+    ms.setAttribute("xmlns:D", "DAV:")
+    # ms.tagName = 'D:multistatus'
+    namespaces = []
+    nsnum = 0
+    propgen = Prop2xml(doc, namespaces, nsnum)
+    # write href information
+    uparts=urlparse.urlparse(uri)
+    fileloc=uparts[2]
+    if isinstance(fileloc, unicode):
+        fileloc = fileloc.encode('utf-8')
+    davpath = self.parent.get_davpath()
+    if uparts[0] and uparts[1]:
+        hurl = '%s://%s%s%s' % (uparts[0], uparts[1], davpath, urllib.quote(fileloc))
+    else:
+        # When the request has been relative, we don't have enough data to
+        # reply with absolute url here.
+        hurl = '%s%s' % (davpath, urllib.quote(fileloc))
+        
+    props.append( ('lockroot', 'DAV:', ('href', 'DAV:', (hurl))))
+    pld = doc.createElement('D:lockdiscovery')
+    ms.appendChild(pld)
+    propgen._prop_child(pld, 'DAV:', 'activelock', props)
+
+    return doc.toxml(encoding="utf-8")
+
 super_create_prop = REPORT.create_prop
 
 def create_prop(self):

=== modified file 'document_webdav/webdav_server.py'
--- document_webdav/webdav_server.py	2010-11-18 07:08:23 +0000
+++ document_webdav/webdav_server.py	2010-12-20 13:10:53 +0000
@@ -1,8 +1,14 @@
 # -*- encoding: utf-8 -*-
 
 #
-# Copyright P. Christeas <p_christ@xxxxxx> 2008,2009
+# Copyright P. Christeas <p_christ@xxxxxx> 2008-2010
 #
+# Disclaimer: Many of the functions below borrow code from the
+#   python-webdav library (http://code.google.com/p/pywebdav/ ),
+#   which they import and override to suit OpenERP functionality.
+# python-webdav was written by: Simon Pamies <s.pamies@xxxxxxxxxxx>
+#                               Christian Scholz <mrtopf@xxxxxxxxx>
+#                               Vince Spicer <vince@xxxxxxxx>
 #
 # WARNING: This program as such is intended to be used by professional
 # programmers who take the whole responsability of assessing all potential
@@ -38,7 +44,9 @@
 import re
 from string import atoi
 from DAV.errors import *
+from DAV.utils import IfParser, TagList
 # from DAV.constants import DAV_VERSION_1, DAV_VERSION_2
+from xml.dom import minidom
 
 khtml_re = re.compile(r' KHTML/([0-9\.]+) ')
 
@@ -103,6 +111,7 @@
             self.headers['Destination'] = up.path[len(self.davpath):]
         else:
             raise DAV_Forbidden("Not allowed to copy/move outside webdav path")
+        # TODO: locks
         DAVRequestHandler.copymove(self, CLASS)
 
     def get_davpath(self):
@@ -262,6 +271,139 @@
         except DAV_Error, (ec, dd):
             return self.send_status(ec)
 
+    def do_UNLOCK(self):
+        """ Unlocks given resource """
+
+        dc = self.IFACE_CLASS
+        self.log_message('UNLOCKing resource %s' % self.headers)
+
+        uri = urlparse.urljoin(self.get_baseuri(dc), self.path)
+        uri = urllib.unquote(uri)
+
+        token = self.headers.get('Lock-Token', False)
+        if token:
+            token = token.strip()
+            if token[0] == '<' and token[-1] == '>':
+                token = token[1:-1]
+            else:
+                token = False
+
+        if not token:
+            return self.send_status(400, 'Bad lock token')
+
+        try:
+            res = dc.unlock(uri, token)
+        except DAV_Error, (ec, dd):
+            return self.send_status(ec, dd)
+        
+        if res == True:
+            self.send_body(None, '204', 'OK', 'Resource unlocked.')
+        else:
+            # We just differentiate the description, for debugging purposes
+            self.send_body(None, '204', 'OK', 'Resource not locked.')
+
+    def do_LOCK(self):
+        """ Attempt to place a lock on the given resource.
+        """
+
+        dc = self.IFACE_CLASS
+        lock_data = {}
+
+        self.log_message('LOCKing resource %s' % self.headers)
+
+        body = None
+        if self.headers.has_key('Content-Length'):
+            l = self.headers['Content-Length']
+            body = self.rfile.read(atoi(l))
+
+        depth = self.headers.get('Depth', 'infinity')
+
+        uri = urlparse.urljoin(self.get_baseuri(dc), self.path)
+        uri = urllib.unquote(uri)
+        self.log_message('do_LOCK: uri = %s' % uri)
+
+        ifheader = self.headers.get('If')
+
+        if ifheader:
+            ldif = IfParser(ifheader)
+            if isinstance(ldif, list):
+                if len(ldif) !=1 or (not isinstance(ldif[0], TagList)) \
+                        or len(ldif[0].list) != 1:
+                    raise DAV_Error(400, "Cannot accept multiple tokens")
+                ldif = ldif[0].list[0]
+                if ldif[0] == '<' and ldif[-1] == '>':
+                    ldif = ldif[1:-1]
+
+            lock_data['token'] = ldif
+
+        if not body:
+            lock_data['refresh'] = True
+        else:
+            lock_data['refresh'] = False
+            lock_data.update(self._lock_unlock_parse(body))
+
+        if lock_data['refresh'] and not lock_data.get('token', False):
+            raise DAV_Error(400, 'Lock refresh must specify token')
+
+        lock_data['depth'] = depth
+
+        try:
+            created, data, lock_token = dc.lock(uri, lock_data)
+        except DAV_Error, (ec, dd):
+            return self.send_status(ec, dd)
+
+        headers = {}
+        if not lock_data['refresh']:
+            headers['Lock-Token'] = '<%s>' % lock_token
+
+        if created:
+            self.send_body(data, '201', 'Created',  ctype='text/xml', headers=headers)
+        else:
+            self.send_body(data, '200', 'OK', ctype='text/xml', headers=headers)
+
+    def _lock_unlock_parse(self, body):
+        # Override the python-webdav function, with some improvements
+        # Unlike the py-webdav one, we also parse the owner minidom elements into
+        # pure pythonic struct.
+        doc = minidom.parseString(body)
+
+        data = {}
+        owners = []
+        for info in doc.getElementsByTagNameNS('DAV:', 'lockinfo'):
+            for scope in info.getElementsByTagNameNS('DAV:', 'lockscope'):
+                for scc in scope.childNodes:
+                    if scc.nodeType == info.ELEMENT_NODE \
+                            and scc.namespaceURI == 'DAV:':
+                        data['lockscope'] = scc.localName
+                        break
+            for ltype in info.getElementsByTagNameNS('DAV:', 'locktype'):
+                for ltc in ltype.childNodes:
+                    if ltc.nodeType == info.ELEMENT_NODE \
+                            and ltc.namespaceURI == 'DAV:':
+                        data['locktype'] = ltc.localName
+                        break
+            for own in info.getElementsByTagNameNS('DAV:', 'owner'):
+                for ono in own.childNodes:
+                    if ono.nodeType == info.TEXT_NODE:
+                        if ono.data:
+                            owners.append(ono.data)
+                    elif ono.nodeType == info.ELEMENT_NODE \
+                            and ono.namespaceURI == 'DAV:' \
+                            and ono.localName == 'href':
+                        href = ''
+                        for hno in ono.childNodes:
+                            if hno.nodeType == info.TEXT_NODE:
+                                href += hno.data
+                        owners.append(('href','DAV:', href))
+
+            if len(owners) == 1:
+                data['lockowner'] = owners[0]
+            elif not owners:
+                pass
+            else:
+                data['lockowner'] = owners
+        return data
+
 from service.http_server import reg_http_service,OpenERPAuthProvider
 
 class DAVAuthProvider(OpenERPAuthProvider):

=== modified file 'document_webdav/webdav_view.xml'
--- document_webdav/webdav_view.xml	2010-10-12 10:38:59 +0000
+++ document_webdav/webdav_view.xml	2010-12-20 13:10:53 +0000
@@ -37,7 +37,7 @@
             <field name="model">document.webdav.dir.property</field>
             <field name="type">search</field>
             <field name="arch" type="xml">
-                <search string="Search Document storage">
+                <search string="Search Document properties">
                     <field name="name" />
                     <field name="namespace" />
                     <newline/>
@@ -92,7 +92,68 @@
 	    </page>
 	</field>
     </record>
-
+    
+    <!-- File properties -->
+    <record model="ir.ui.view" id="view_file_props_form">
+        <field name="name">document.webdav.file.property.form</field>
+        <field name="model">document.webdav.file.property</field>
+        <field name="type">form</field>
+        <field name="arch" type="xml">
+            <form string="Properties">
+		<field name="namespace"/>
+		<field name="name"/>
+		<newline />
+		<field name="file_id" />
+		<field name="do_subst" />
+		<newline />
+		<field name="value" colspan="4" />
+            </form>
+        </field>
+    </record>
+
+    <record model="ir.ui.view" id="view_file_props_tree">
+        <field name="name">document.webdav.file.property.tree</field>
+        <field name="model">document.webdav.file.property</field>
+        <field name="type">tree</field>
+        <field name="arch" type="xml">
+            <tree string="Properties" toolbar="1">
+		<field name="file_id" />
+		<field name="namespace"/>
+		<field name="name"/>
+            </tree>
+        </field>
+    </record>
+
+    <record id="view_file_props_filter" model="ir.ui.view">
+            <field name="name">Search View: File DAV properties</field>
+            <field name="model">document.webdav.file.property</field>
+            <field name="type">search</field>
+            <field name="arch" type="xml">
+                <search string="Search Document properties">
+                    <field name="name" />
+                    <field name="namespace" />
+                    <newline/>
+                    <group expand="0" string="Group By..." groups="base.group_extended">
+                        <filter string="Document" icon="terp-stock_symbol-selection" domain="[]" context="{'group_by':'file_id'}"/>
+                        <filter string="Namespace" icon="terp-stock_symbol-selection" domain="[]" context="{'group_by':'namespace'}"/>
+                    </group>
+               </search>
+            </field>
+        </record>
+
+    <record model="ir.actions.act_window" id="action_file_props_form">
+        <field name="type">ir.actions.act_window</field>
+        <field name="res_model">document.webdav.file.property</field>
+        <field name="view_type">form</field>
+        <field name="view_mode">tree,form</field>
+        <field name="search_view_id" ref="view_file_props_filter"/>
+    </record>
+   <menuitem
+        name="DAV properties for documents"
+        action="action_file_props_form"
+        id="menu_file_props"
+        groups="base.group_no_one"
+        parent="document.menu_document_management_configuration"/>
 
 </data>
 </openerp>


Follow ups