[Zope-CVS] CVS: Products/Ape/lib/apelib/zodb3 - connection.py:1.13 db.py:1.8 scanner.py:1.6 serializers.py:1.9 storage.py:1.15 utils.py:1.4 zodbtables.py:1.4

Shane Hathaway shane at zope.com
Sat Mar 20 01:34:56 EST 2004


Update of /cvs-repository/Products/Ape/lib/apelib/zodb3
In directory cvs.zope.org:/tmp/cvs-serv19743/lib/apelib/zodb3

Modified Files:
	connection.py db.py scanner.py serializers.py storage.py 
	utils.py zodbtables.py 
Log Message:
Converted method and function names to conform with PEP 8.

PEP 8 (or perhaps the next revision) recommends 
lowercase_with_underscores over mixedCase.  Since everything is being 
renamed for this release anyway, why not throw in this too? :-)

Also got SQLMultiTableProperties back into shape.



=== Products/Ape/lib/apelib/zodb3/connection.py 1.12 => 1.13 ===
--- Products/Ape/lib/apelib/zodb3/connection.py:1.12	Wed Mar 17 20:08:14 2004
+++ Products/Ape/lib/apelib/zodb3/connection.py	Sat Mar 20 01:34:25 2004
@@ -60,10 +60,10 @@
         if pool_ctl is not None:
             ctl = self._scan_ctl
             if ctl is None:
-                self._scan_ctl = ctl = pool_ctl.newConnection()
+                self._scan_ctl = ctl = pool_ctl.new_connection()
             if ctl.elapsed():
                 # Let the scanner know which OIDs matter.
-                ctl.setOIDs(self._cache.cache_data.keys())
+                ctl.set_oids(self._cache.cache_data.keys())
                 # If it's time, scan on behalf of the whole pool.
                 if pool_ctl.elapsed():
                     pool_ctl.scan()
@@ -72,8 +72,8 @@
                     self._flush_invalidations()
 
 
-    def _prepareRoot(self):
-        osio = self.getObjectSystemIO()
+    def _prepare_root(self):
+        osio = self._get_osio()
         oid = osio.conf.oid_gen.root_oid
         try:
             self[oid]
@@ -92,11 +92,13 @@
             self.tpc_finish(t)
 
     def root(self):
-        osio = self.getObjectSystemIO()
+        osio = self._get_osio()
         oid = osio.conf.oid_gen.root_oid
         return self[oid]
 
-    def getObjectSystemIO(self):
+    def _get_osio(self):
+        """Returns an ObjectSystemIO.
+        """
         osio = self._osio
         if osio is None:
             conf = self._db._conf_resource.access(self)
@@ -132,14 +134,14 @@
         except:
             raise "Could not load oid %s, pickled data in traceback info may\
             contain clues" % (oid)
-        osio = self.getObjectSystemIO()
-        obj = osio.newObject(classification)
+        osio = self._get_osio()
+        obj = osio.new_instance(classification)
         assert obj is not None
 
         obj._p_oid=oid
         obj._p_jar=self
         obj._p_changed=None
-        self.setSerial(obj, serial)
+        self._set_serial(obj, serial)
 
         self._cache[oid] = obj
         
@@ -157,8 +159,8 @@
             return obj
 
         if classification:
-            osio = self.getObjectSystemIO()
-            obj = osio.newObject(classification)
+            osio = self._get_osio()
+            obj = osio.new_instance(classification)
             if obj is not None:
                 obj._p_oid=oid
                 obj._p_jar=self
@@ -170,7 +172,7 @@
         return self[oid]
 
 
-    def mayBegin(self, transaction):
+    def _may_begin(self, transaction):
         if hasattr(self, '_begun') and not self._begun:
             self._storage.tpc_begin(transaction)
             self._begun = 1
@@ -178,7 +180,7 @@
 
     def commit(self, obj, transaction):
         if obj is self:
-            self.mayBegin(transaction)
+            self._may_begin(transaction)
             # We registered ourself.  Execute a commit action, if any.
             if self._Connection__onCommitActions is not None:
                 method_name, args, kw = \
@@ -214,7 +216,7 @@
             # Nothing to do
             return
 
-        self.mayBegin(transaction)
+        self._may_begin(transaction)
 
         stack=[obj]
 
@@ -238,7 +240,7 @@
             del stack[-1]
             oid=obj._p_oid
             assert oid != 'unmanaged', repr(obj)
-            serial = self.getSerial(obj)
+            serial = self._get_serial(obj)
             if serial == HASH0:
                 # new object
                 self._creating.append(oid)
@@ -257,7 +259,7 @@
 
             # SDH: hook in the serializer.
             # state=obj.__getstate__()
-            osio = self.getObjectSystemIO()
+            osio = self._get_osio()
             event, classification, state = osio.serialize(oid, obj)
             ext_refs = event.external
             if ext_refs:
@@ -282,7 +284,7 @@
                         stack.append(ext_ref)
 
             if event.upos:
-                self.handleUnmanaged(obj, event.upos)
+                self._handle_unmanaged(obj, event.upos)
 
             seek(0)
             clear_memo()
@@ -353,13 +355,13 @@
             # else:
             #     d=object.__dict__
             #     for k,v in state.items(): d[k]=v
-            osio = self.getObjectSystemIO()
+            osio = self._get_osio()
             event = osio.deserialize(oid, obj, classification, state)
 
             if event.upos:
-                self.handleUnmanaged(obj, event.upos)
+                self._handle_unmanaged(obj, event.upos)
 
-            self.setSerial(obj, serial)
+            self._set_serial(obj, serial)
 
             if invalid:
                 if obj._p_independent():
@@ -397,7 +399,7 @@
         return '<%s at %08x%s>' % (self.__class__.__name__, id(self), ver)
 
 
-    def handleUnmanaged(self, obj, unmanaged):
+    def _handle_unmanaged(self, obj, unmanaged):
         # Add an event handler to unmanaged subobjects.
         # The event handler calls self.register() when it changes.
         for o in unmanaged:
@@ -408,12 +410,12 @@
                 else:
                     assert o._p_oid == 'unmanaged'
                     if o._p_changed is not None:
-                        o._p_jar.saveState(o)
+                        o._p_jar.save_state(o)
 
 
     # IObjectDatabase implementation
 
-    getObject = _persistent_load
+    get = _persistent_load
 
     def identify(self, obj):
         try:
@@ -432,11 +434,11 @@
         return self._storage.new_oid()
 
 
-    def getClass(self, module, name):
+    def get_class(self, module, name):
         return self._db._classFactory(self, module, name)
 
 
-    def checkSerials(self):
+    def check_serials(self):
         """Verifies that all cached objects are in sync with the data.
 
         This is useful for finding gateways that generate inconsistent
@@ -445,7 +447,7 @@
         for oid, ob in self._cache.items():
             if ob._p_changed is not None:
                 p, serial = self._storage.load(oid, self._version)
-                if serial != self.getSerial(ob):
+                if serial != self._get_serial(ob):
                     raise StorageError(
                         "Inconsistent serial for oid %s" % repr(oid))
     
@@ -466,7 +468,7 @@
     _serials = None
     serial_cleanup_threshold = 1000
 
-    def getSerial(self, ob):
+    def _get_serial(self, ob):
         oid = ob._p_oid
         if oid is None or self._cache.get(oid, None) is not ob:
             return HASH0
@@ -475,7 +477,7 @@
             return HASH0
         return serials.get(oid, HASH0)
 
-    def setSerial(self, ob, s):
+    def _set_serial(self, ob, s):
         oid = ob._p_oid
         assert oid is not None
         if s is None:
@@ -526,7 +528,7 @@
                 if change:
                     obj._p_changed = 0
                 #obj._p_serial = serial
-                self.setSerial(obj, serial)
+                self._set_serial(obj, serial)
         else:
             for oid, serial in store_return:
                 if not isinstance(serial, StringType):
@@ -540,7 +542,7 @@
                     if change:
                         obj._p_changed = 0
                     #obj._p_serial = serial
-                    self.setSerial(obj, serial)
+                    self._set_serial(obj, serial)
 
 
 
@@ -561,9 +563,9 @@
     def __init__(self, real_jar, real_oid, obj):
         self.real_jar = real_jar
         self.real_oid = real_oid
-        self.saveState(obj)
+        self.save_state(obj)
 
-    def saveState(self, obj):
+    def save_state(self, obj):
         s = obj.__getstate__()
         if isinstance(s, DictType):
             s = s.copy()


=== Products/Ape/lib/apelib/zodb3/db.py 1.7 => 1.8 ===
--- Products/Ape/lib/apelib/zodb3/db.py:1.7	Tue Feb 17 00:25:13 2004
+++ Products/Ape/lib/apelib/zodb3/db.py	Sat Mar 20 01:34:25 2004
@@ -28,7 +28,7 @@
 from interfaces import IResourceAccess
 
 
-def callConfFactory(factory, kw):
+def call_conf_factory(factory, kw):
     """Returns (conf, conns) given the name of a factory and arguments.
     """
     pos = factory.rfind('.')
@@ -66,7 +66,7 @@
         if conf_resource is None:
             if factory is not None:
                 # Use a configuration factory
-                conf, connections = callConfFactory(factory, kw)
+                conf, connections = call_conf_factory(factory, kw)
                 conf_resource = StaticResource(conf)
             else:
                 if kw:
@@ -130,7 +130,7 @@
         # Create the root object if it doesn't exist
         c = self.open()
         try:
-            c._prepareRoot()
+            c._prepare_root()
         finally:
             c.close()
 


=== Products/Ape/lib/apelib/zodb3/scanner.py 1.5 => 1.6 ===
--- Products/Ape/lib/apelib/zodb3/scanner.py:1.5	Sat Feb 28 15:06:28 2004
+++ Products/Ape/lib/apelib/zodb3/scanner.py	Sat Mar 20 01:34:25 2004
@@ -54,7 +54,7 @@
         self.next_scan = time() + scan_interval
 
 
-    def newConnection(self):
+    def new_connection(self):
         """Returns a ConnectionScanControl to attach to a new connection.
         """
         self.lock.acquire()
@@ -66,7 +66,7 @@
             self.lock.release()
 
 
-    def setConnectionOIDs(self, conn_id, oids):
+    def set_connection_oids(self, conn_id, oids):
         """Records the OIDs a connection is using and periodically scans.
         """
         changed = 0
@@ -86,7 +86,7 @@
         finally:
             self.lock.release()
         if changed:
-            self.storage.scanner.setOIDs(new_oids)
+            self.storage.scanner.set_oids(new_oids)
 
 
     def elapsed(self):
@@ -105,7 +105,7 @@
         LOG('Ape', DEBUG, 'Scanning %d objects.' % len(self.oids))
         scanner = self.storage.scanner
         inv = scanner.scan()
-        scanner.pruneFuture()
+        scanner.prune_future()
         LOG('Ape', DEBUG,
             'Finished scanning. %d objects changed.' % len(inv))
         if inv:
@@ -133,7 +133,7 @@
     def elapsed(self):
         """Returns true if the connection-specific scan interval has elapsed.
 
-        The interval prevents connections from calling setOIDs() with
+        The interval prevents connections from calling set_oids() with
         excessive frequency.
         """
         now = time()
@@ -142,10 +142,10 @@
             return 1
         return 0
 
-    def setOIDs(self, oids):
+    def set_oids(self, oids):
         """Records the OIDs this connection is using.
         """
-        self.pool_ctl.setConnectionOIDs(self.conn_id, oids)
+        self.pool_ctl.set_connection_oids(self.conn_id, oids)
 
 
 class Scanner:
@@ -160,7 +160,7 @@
         self.lock = allocate_lock()
         self.storage = None
 
-    def setOIDs(self, oids):
+    def set_oids(self, oids):
         """Sets the list of OIDs to scan.
 
         Gathers source information about new OIDs and discards
@@ -189,7 +189,7 @@
                 LOG('Ape', DEBUG, 'Getting sources for %d oids.'
                     % len(new_sources))
                 for oid in new_sources.keys():
-                    new_sources[oid] = self.storage.getPollSources(oid)
+                    new_sources[oid] = self.storage.get_sources(oid)
             else:
                 LOG('Ape', DEBUG, "Can't get sources for %d oids. "
                     "Assuming no sources!" % len(new_sources))
@@ -209,7 +209,7 @@
                 self.lock.release()
 
 
-    def afterLoad(self, oid, sources):
+    def after_load(self, oid, sources):
         """Called by the storage after an object is loaded.
         """
         if sources is None:
@@ -262,7 +262,7 @@
         return to_invalidate.keys()
 
 
-    def pruneFuture(self):
+    def prune_future(self):
         """Prunes the cache of future source information.
         """
         if self.future:
@@ -279,7 +279,7 @@
                 'Future sources cache size: %d objects.' % len(self.future))
 
 
-    def afterCommit(self, oid, sources):
+    def after_commit(self, oid, sources):
         """Records changes to sources after commit..
         """
         self.current[oid] = sources


=== Products/Ape/lib/apelib/zodb3/serializers.py 1.8 => 1.9 ===
--- Products/Ape/lib/apelib/zodb3/serializers.py:1.8	Tue Mar 16 22:59:37 2004
+++ Products/Ape/lib/apelib/zodb3/serializers.py	Sat Mar 20 01:34:25 2004
@@ -33,14 +33,14 @@
 from apelib.core.schemas import RowSequenceSchema, FieldSchema
 
 
-def isPersistent(obj):
+def is_persistent(obj):
     try:
         return isinstance(obj, Persistent)
     except TypeError:
         # XXX Python 2.1 thinks Persistent is not a class
         return 0
 
-def encodeToText(s, keys, unmanaged_count=0):
+def encode_to_text(s, keys, unmanaged_count=0):
     """Encodes a binary pickle using base 64.
 
     Note that Python's text pickle format encodes unicode using full
@@ -62,7 +62,7 @@
     text = base64.encodestring(s)
     return '%s\n%s' % ('\n'.join(comments), text)
 
-def decodeFromText(s):
+def decode_from_text(s):
     """Decodes using base 64, ignoring leading comments.
     """
     i = s.rfind('#')
@@ -87,21 +87,21 @@
     # persistent objects and a pickle containing items that are not
     # references.
     schema1 = RowSequenceSchema()
-    schema1.addField('key', 'string', 1)
-    schema1.addField('oid', 'string')
-    schema1.addField('classification', 'classification')
+    schema1.add('key', 'string', 1)
+    schema1.add('oid', 'string')
+    schema1.add('classification', 'classification')
     schema2 = FieldSchema('data', 'string')
     schema = {'references': schema1, 'others': schema2}
 
-    def canSerialize(self, obj):
+    def can_serialize(self, obj):
         return isinstance(obj, PersistentMapping)
 
     def serialize(self, event):
-        assert self.canSerialize(event.obj)
+        assert self.can_serialize(event.obj)
         refs = []
         others = {}
         for key, value in event.obj.items():
-            if isPersistent(value):
+            if is_persistent(value):
                 oid = event.obj_db.identify(value)
                 if oid is None:
                     oid = event.conf.oid_gen.new_oid(event, key, True)
@@ -113,17 +113,17 @@
                 others[key] = value
         event.ignore(('data', '_container'))
         if others:
-            s = encodeToText(dumps(others, 1), others.keys())
+            s = encode_to_text(dumps(others, 1), others.keys())
         else:
             s = ''
         return {'references': refs, 'others': s}
 
     def deserialize(self, event, state):
-        assert self.canSerialize(event.obj)
+        assert self.can_serialize(event.obj)
         data = {}
         s = state['others']
         if s:
-            s = decodeFromText(s)
+            s = decode_from_text(s)
             if s:
                 data = loads(s)
                 for key, value in data.items():
@@ -142,12 +142,12 @@
     __implements__ = ISerializer
     schema = None  # No storage
 
-    def canSerialize(self, obj):
+    def can_serialize(self, obj):
         return 1
 
     def serialize(self, event):
         assert IFullSerializationEvent.isImplementedBy(event)
-        attrs = event.getSerializedAttributeNames()
+        attrs = event.get_seralized_attributes()
         attrs_map = {}
         for attr in attrs:
             attrs_map[attr] = 1
@@ -172,8 +172,8 @@
 
     schema = FieldSchema('data', 'string')
 
-    def canSerialize(self, obj):
-        return isPersistent(obj)
+    def can_serialize(self, obj):
+        return is_persistent(obj)
 
     def serialize(self, event):
         assert IFullSerializationEvent.isImplementedBy(event)
@@ -187,7 +187,7 @@
         for key in state.keys():
             if key.startswith('_v_'):
                 del state[key]
-        for attrname in event.getSerializedAttributeNames():
+        for attrname in event.get_seralized_attributes():
             if state.has_key(attrname):
                 del state[attrname]
         if not state:
@@ -198,9 +198,9 @@
         p = Pickler(outfile, 1)  # Binary pickle
         unmanaged = []
 
-        def persistent_id(ob, identifyInternal=event.identifyInternal,
+        def persistent_id(ob, identify_internal=event.identify_internal,
                           unmanaged=unmanaged):
-            ref = identifyInternal(ob)
+            ref = identify_internal(ob)
             if ref is None:
                 if hasattr(ob, '_p_oid'):
                     # Persistent objects that end up in the remainder
@@ -249,7 +249,7 @@
         event.upos.extend(unmanaged)
 
         s = outfile.getvalue()
-        return encodeToText(s, state.keys(), len(unmanaged))
+        return encode_to_text(s, state.keys(), len(unmanaged))
 
 
     def deserialize(self, event, state):
@@ -264,12 +264,12 @@
             if state.startswith('#'):
                 # Text-encoded pickles start with a pound sign.
                 # (A pound sign is not a valid pickle opcode.)
-                data = decodeFromText(state)
+                data = decode_from_text(state)
             else:
                 data = state
             infile = StringIO(data)
             u = Unpickler(infile)
-            u.persistent_load = event.resolveInternal
+            u.persistent_load = event.resolve_internal
             s = u.load()
             event.obj.__dict__.update(s)
             try:
@@ -292,10 +292,10 @@
 
     schema = FieldSchema('mtime', 'int')
 
-    def canSerialize(self, obj):
-        return isPersistent(obj)
+    def can_serialize(self, obj):
+        return is_persistent(obj)
 
-    def setTime(self, obj, t):
+    def _set_time(self, obj, t):
         """Sets the last modification time of a Persistent obj to float t.
         """
         args = time.gmtime(t)[:5] + (t%60,)
@@ -306,14 +306,14 @@
         if event.obj._p_changed:
             # Indicate that this object just changed.  Note that the time
             # is a guess.
-            self.setTime(event.obj, now)
+            self._set_time(event.obj, now)
         return now
 
     def deserialize(self, event, state):
-        self.setTime(event.obj, state)
+        self._set_time(event.obj, state)
 
 
-def findUnmanaged(obj, managed):
+def find_unmanaged(obj, managed):
     """Gathers the list of unmanaged subobjects from an object.
 
     'managed' is a list of subobjects known to be managed.


=== Products/Ape/lib/apelib/zodb3/storage.py 1.14 => 1.15 ===
--- Products/Ape/lib/apelib/zodb3/storage.py:1.14	Tue Mar 16 22:59:11 2004
+++ Products/Ape/lib/apelib/zodb3/storage.py	Sat Mar 20 01:34:25 2004
@@ -39,12 +39,12 @@
         self.conf_resource = conf_resource
         gwio = GatewayIO(conf_resource.access(self), connections)
         self._gwio = gwio
-        self._conn_list = gwio.getConnectionList()
-        gwio.openConnections()
-        gwio.initDatabases(clear_all)
+        self._conn_list = gwio.get_connection_list()
+        gwio.open_connections()
+        gwio.init_databases(clear_all)
         names = []
         sort_keys = []
-        for c in gwio.getConnectionList():
+        for c in gwio.get_connection_list():
             names.append(c.getName())
             sort_keys.append(c.sortKey())
         self._sort_key = tuple(sort_keys)
@@ -72,8 +72,8 @@
             return self._tid
         return self._serial
 
-    def initDatabases(self, clear_all=0):
-        self._gwio.initDatabases(clear_all=clear_all)
+    def init_databases(self, clear_all=0):
+        self._gwio.init_databases(clear_all=clear_all)
 
     def hash64(self, value):
         """Returns an 8-byte hash value.
@@ -104,8 +104,8 @@
             if DEBUG:
                 self._loaded_hashes[oid] = hash_value
             if self.scanner is not None:
-                sources = event.mapper.gateway.getPollSources(event)
-                self.scanner.afterLoad(oid, sources)
+                sources = event.mapper.gateway.get_sources(event)
+                self.scanner.after_load(oid, sources)
             return data, h
         finally:
             self._lock_release()
@@ -172,10 +172,10 @@
 
         return new_h64
 
-    def getPollSources(self, oid):
+    def get_sources(self, oid):
         self._lock_acquire()
         try:
-            return self._gwio.getPollSources(oid)
+            return self._gwio.get_sources(oid)
         finally:
             self._lock_release()
 
@@ -209,8 +209,8 @@
             del self.changed[tid]
             if self.scanner:
                 for oid in oids:
-                    sources = self._gwio.getPollSources(oid)
-                    self.scanner.afterCommit(oid, sources)
+                    sources = self._gwio.get_sources(oid)
+                    self.scanner.after_commit(oid, sources)
 
     def _vote(self):
         for c in self._conn_list:


=== Products/Ape/lib/apelib/zodb3/utils.py 1.3 => 1.4 ===
--- Products/Ape/lib/apelib/zodb3/utils.py:1.3	Mon Feb  2 10:07:22 2004
+++ Products/Ape/lib/apelib/zodb3/utils.py	Sat Mar 20 01:34:25 2004
@@ -21,7 +21,7 @@
 from types import StringType
 
 
-def copyOf(source):
+def zodb_copy(source):
     """Copies a ZODB object, loading subobjects as needed.
 
     Re-ghostifies objects along the way to save memory.


=== Products/Ape/lib/apelib/zodb3/zodbtables.py 1.3 => 1.4 ===
--- Products/Ape/lib/apelib/zodb3/zodbtables.py:1.3	Mon Mar  1 09:13:54 2004
+++ Products/Ape/lib/apelib/zodb3/zodbtables.py	Sat Mar 20 01:34:25 2004
@@ -48,7 +48,7 @@
         self.columns = []
         self.column_names = {}
 
-    def addColumn(self, name, primary=0, indexed=0):
+    def add(self, name, primary=0, indexed=0):
         if name in self.reserved_names:
             raise ValueError, "Column name %s is reserved" % repr(name)
         if self.column_names.has_key(name):
@@ -56,7 +56,7 @@
         self.column_names[name] = 1
         self.columns.append(Column(name, primary, indexed))
 
-    def getColumns(self):
+    def get_columns(self):
         return tuple(self.columns)
 
     def __repr__(self):
@@ -89,7 +89,7 @@
     def __init__(self, schema=None):
         if schema is not None:
             self.schema = schema
-        columns = schema.getColumns()
+        columns = schema.get_columns()
         self.col_info = []  # [(tuple position, column),]
         self.positions = {}
         for i in range(len(columns)):
@@ -294,7 +294,7 @@
         return count
 
 
-    def getRecordClass(self):
+    def get_record_class(self):
         klass = self._v_record_class
         if klass is None:
             schema = {'rid': 0}
@@ -310,11 +310,11 @@
         rids = self._select_rids(self.tuplify(filter))
         if rids is None:
             # All
-            klass = self.getRecordClass()
+            klass = self.get_record_class()
             return [klass(rec) for rec in self.data.values()]
         elif rids:
             # Some
-            klass = self.getRecordClass()
+            klass = self.get_record_class()
             data = self.data
             return [klass(data[rid]) for rid in rids]
         else:




More information about the Zope-CVS mailing list