Commit 61a3376b authored by Gintautas Miliauskas's avatar Gintautas Miliauskas

Harmless stuff: fixed several typos and removed trailing whitespace in a few

places.
parent 293a358c
...@@ -13,7 +13,7 @@ ...@@ -13,7 +13,7 @@
############################################################################## ##############################################################################
"""Database connection support """Database connection support
$Id: Connection.py,v 1.145 2004/04/08 18:12:25 tim_one Exp $""" $Id: Connection.py,v 1.146 2004/04/09 11:11:32 gintautasm Exp $"""
import logging import logging
import sys import sys
...@@ -60,7 +60,7 @@ class Connection(ExportImport, object): ...@@ -60,7 +60,7 @@ class Connection(ExportImport, object):
Connection that loaded them. When a transaction commits, it uses Connection that loaded them. When a transaction commits, it uses
the Connection to store modified objects. the Connection to store modified objects.
The typical use of ZODB is for each thread to have its own Typical use of ZODB is for each thread to have its own
Connection and that no thread should have more than one Connection Connection and that no thread should have more than one Connection
to the same database. A thread is associated with a Connection by to the same database. A thread is associated with a Connection by
loading objects from that Connection. Objects loaded by one loading objects from that Connection. Objects loaded by one
...@@ -168,7 +168,7 @@ class Connection(ExportImport, object): ...@@ -168,7 +168,7 @@ class Connection(ExportImport, object):
# the lock must be held when reading _invalidated. # the lock must be held when reading _invalidated.
# XXX It sucks that we have to hold the lock to read # XXX It sucks that we have to hold the lock to read
# _invalidated. Normally, _invalidated is written by call # _invalidated. Normally, _invalidated is written by calling
# dict.update, which will execute atomically by virtue of the # dict.update, which will execute atomically by virtue of the
# GIL. But some storage might generate oids where hash or # GIL. But some storage might generate oids where hash or
# compare invokes Python code. In that case, the GIL can't # compare invokes Python code. In that case, the GIL can't
...@@ -200,8 +200,8 @@ class Connection(ExportImport, object): ...@@ -200,8 +200,8 @@ class Connection(ExportImport, object):
def setLocalTransaction(self): def setLocalTransaction(self):
"""Use a transaction bound to the connection rather than the thread""" """Use a transaction bound to the connection rather than the thread"""
# XXX mark this method as depcrecated? note that it's # XXX mark this method as deprecated?
# signature changed? # Note that its signature has changed.
if self._txn_mgr is transaction: if self._txn_mgr is transaction:
self._txn_mgr = transaction.TransactionManager() self._txn_mgr = transaction.TransactionManager()
...@@ -277,9 +277,9 @@ class Connection(ExportImport, object): ...@@ -277,9 +277,9 @@ class Connection(ExportImport, object):
"""Add a new object 'obj' to the database and assign it an oid. """Add a new object 'obj' to the database and assign it an oid.
A persistent object is normally added to the database and A persistent object is normally added to the database and
assigned an oid when it becomes reachable an object already in assigned an oid when it becomes reachable to an object already in
the database. In some cases, it is useful to create a new the database. In some cases, it is useful to create a new
object and uses its oid (_p_oid) in a single transaction. object and use its oid (_p_oid) in a single transaction.
This method assigns a new oid regardless of whether the object This method assigns a new oid regardless of whether the object
is reachable. is reachable.
...@@ -357,7 +357,7 @@ class Connection(ExportImport, object): ...@@ -357,7 +357,7 @@ class Connection(ExportImport, object):
self._db.classFactory) self._db.classFactory)
def _resetCache(self): def _resetCache(self):
"""Creates a new cache, discarding the old. """Creates a new cache, discarding the old one.
See the docstring for the resetCaches() function. See the docstring for the resetCaches() function.
""" """
...@@ -509,9 +509,9 @@ class Connection(ExportImport, object): ...@@ -509,9 +509,9 @@ class Connection(ExportImport, object):
serial = getattr(obj, "_p_serial", z64) serial = getattr(obj, "_p_serial", z64)
if serial == z64: if serial == z64:
# new object # obj is a new object
self._creating.append(oid) self._creating.append(oid)
# If this object was added, it is now in _creating, so can # Because obj was added, it is now in _creating, so it can
# be removed from _added. # be removed from _added.
self._added.pop(oid, None) self._added.pop(oid, None)
else: else:
...@@ -528,7 +528,7 @@ class Connection(ExportImport, object): ...@@ -528,7 +528,7 @@ class Connection(ExportImport, object):
try: try:
self._cache[oid] = obj self._cache[oid] = obj
except: except:
# Dang, I bet its wrapped: # Dang, I bet it's wrapped:
if hasattr(obj, 'aq_base'): if hasattr(obj, 'aq_base'):
self._cache[oid] = obj.aq_base self._cache[oid] = obj.aq_base
else: else:
...@@ -538,7 +538,7 @@ class Connection(ExportImport, object): ...@@ -538,7 +538,7 @@ class Connection(ExportImport, object):
self._added_during_commit = None self._added_during_commit = None
def commit_sub(self, t): def commit_sub(self, t):
"""Commit all work done in all subtransactions for this transaction""" """Commit all work done in all subtransactions for this transaction."""
if self._tmp is None: if self._tmp is None:
return return
src = self._storage src = self._storage
...@@ -559,7 +559,7 @@ class Connection(ExportImport, object): ...@@ -559,7 +559,7 @@ class Connection(ExportImport, object):
self._handle_serial(s, oid, change=False) self._handle_serial(s, oid, change=False)
def abort_sub(self, t): def abort_sub(self, t):
"""Abort work done in all subtransactions for this transaction""" """Abort work done in all subtransactions for this transaction."""
if self._tmp is None: if self._tmp is None:
return return
src = self._storage src = self._storage
...@@ -708,6 +708,7 @@ class Connection(ExportImport, object): ...@@ -708,6 +708,7 @@ class Connection(ExportImport, object):
# There is a harmless data race with self._invalidated. A # There is a harmless data race with self._invalidated. A
# dict update could go on in another thread, but we don't care # dict update could go on in another thread, but we don't care
# because we have to check again after the load anyway. # because we have to check again after the load anyway.
if (obj._p_oid in self._invalidated if (obj._p_oid in self._invalidated
and not myhasattr(obj, "_p_independent")): and not myhasattr(obj, "_p_independent")):
# If the object has _p_independent(), we will handle it below. # If the object has _p_independent(), we will handle it below.
...@@ -787,7 +788,7 @@ class Connection(ExportImport, object): ...@@ -787,7 +788,7 @@ class Connection(ExportImport, object):
def oldstate(self, obj, tid): def oldstate(self, obj, tid):
"""Return copy of obj that was written by tid. """Return copy of obj that was written by tid.
XXX The returned object does not have the typical metdata XXX The returned object does not have the typical metadata
(_p_jar, _p_oid, _p_serial) set. I'm not sure how references (_p_jar, _p_oid, _p_serial) set. I'm not sure how references
to other peristent objects are handled. to other peristent objects are handled.
...@@ -899,12 +900,12 @@ class Connection(ExportImport, object): ...@@ -899,12 +900,12 @@ class Connection(ExportImport, object):
del obj._p_changed # transition from changed to ghost del obj._p_changed # transition from changed to ghost
else: else:
if change: if change:
obj._p_changed = 0 # trans. from changed to uptodate obj._p_changed = 0 # transition from changed to up-to-date
obj._p_serial = serial obj._p_serial = serial
def tpc_finish(self, transaction): def tpc_finish(self, transaction):
# It's important that the storage call the function we pass # It's important that the storage calls the function we pass
# while it still has it's lock. We don't want another thread # while it still has its lock. We don't want another thread
# to be able to read any updated data until we've had a chance # to be able to read any updated data until we've had a chance
# to send an invalidation message to all of the other # to send an invalidation message to all of the other
# connections! # connections!
...@@ -939,10 +940,10 @@ class Connection(ExportImport, object): ...@@ -939,10 +940,10 @@ class Connection(ExportImport, object):
def setDebugInfo(self, *args): def setDebugInfo(self, *args):
self._debug_info = self._debug_info + args self._debug_info = self._debug_info + args
def getTransferCounts(self, clear=0): def getTransferCounts(self, clear=False):
"""Returns the number of objects loaded and stored. """Returns the number of objects loaded and stored.
Set the clear argument to reset the counters. If clear is True, reset the counters.
""" """
res = self._load_count, self._store_count res = self._load_count, self._store_count
if clear: if clear:
......
...@@ -154,7 +154,7 @@ class BaseObjectWriter: ...@@ -154,7 +154,7 @@ class BaseObjectWriter:
>>> bob._p_jar is jar >>> bob._p_jar is jar
True True
If the object already has a persistent id, it is not changed: If the object already has a persistent id, the id is not changed:
>>> bob._p_oid = 24 >>> bob._p_oid = 24
>>> oid, cls = writer.persistent_id(bob) >>> oid, cls = writer.persistent_id(bob)
...@@ -173,7 +173,7 @@ class BaseObjectWriter: ...@@ -173,7 +173,7 @@ class BaseObjectWriter:
"""foreign database connection """foreign database connection
Constructor arguments used by __new__(), as returned by Constructor arguments used by __new__(), as returned by
__getnewargs__(), can affect memory allocation, but also may __getnewargs__(), can affect memory allocation, but may also
change over the life of the object. This makes it useless to change over the life of the object. This makes it useless to
cache even the object's class. cache even the object's class.
...@@ -210,7 +210,7 @@ class BaseObjectWriter: ...@@ -210,7 +210,7 @@ class BaseObjectWriter:
# Not persistent, pickle normally # Not persistent, pickle normally
return None return None
# Any persistent object mosy have an oid: # Any persistent object must have an oid:
try: try:
oid = obj._p_oid oid = obj._p_oid
except AttributeError: except AttributeError:
...@@ -237,7 +237,7 @@ class BaseObjectWriter: ...@@ -237,7 +237,7 @@ class BaseObjectWriter:
if oid is None: if oid is None:
# Here we are causing the object to be saved in # Here we are causing the object to be saved in
# the database. One could argue that we shouldn't # the database. One could argue that we shouldn't
# do this, because a wekref should not cause an object # do this, because a weakref should not cause an object
# to be added. We'll be optimistic, though, and # to be added. We'll be optimistic, though, and
# assume that the object will be added eventually. # assume that the object will be added eventually.
...@@ -275,7 +275,7 @@ class BaseObjectWriter: ...@@ -275,7 +275,7 @@ class BaseObjectWriter:
def serialize(self, obj): def serialize(self, obj):
# We don't use __class__ here, because obj could be a persistent proxy. # We don't use __class__ here, because obj could be a persistent proxy.
# We don't want to be folled by proxies. # We don't want to be fooled by proxies.
klass = type(obj) klass = type(obj)
newargs = getattr(obj, "__getnewargs__", None) newargs = getattr(obj, "__getnewargs__", None)
...@@ -375,7 +375,7 @@ class BaseObjectReader: ...@@ -375,7 +375,7 @@ class BaseObjectReader:
if args is None: if args is None:
args = () args = ()
else: else:
# Definately new style direct class reference # Definitely new style direct class reference
args = () args = ()
if issubclass(klass, Broken): if issubclass(klass, Broken):
...@@ -527,7 +527,7 @@ def referencesf(p, rootl=None): ...@@ -527,7 +527,7 @@ def referencesf(p, rootl=None):
# tuples, so that we wrap oids that are lists or tuples in # tuples, so that we wrap oids that are lists or tuples in
# tuples. # tuples.
# #
# - oids may *not* be false. I'm not sure why. # - oids may *not* be False. I'm not sure why.
out = [] out = []
for v in rootl: for v in rootl:
......
...@@ -50,7 +50,7 @@ class ConnectionDotAdd(unittest.TestCase): ...@@ -50,7 +50,7 @@ class ConnectionDotAdd(unittest.TestCase):
self.datamgr.add(obj) self.datamgr.add(obj)
self.assertEqual(obj._p_oid, oid) self.assertEqual(obj._p_oid, oid)
# Cannot add an object from a diffrerent connection. # Cannot add an object from a different connection.
obj2 = StubObject() obj2 = StubObject()
obj2._p_jar = object() obj2._p_jar = object()
self.assertRaises(InvalidObjectReference, self.datamgr.add, obj2) self.assertRaises(InvalidObjectReference, self.datamgr.add, obj2)
...@@ -71,10 +71,11 @@ class ConnectionDotAdd(unittest.TestCase): ...@@ -71,10 +71,11 @@ class ConnectionDotAdd(unittest.TestCase):
self.datamgr.add(obj) self.datamgr.add(obj)
oid = obj._p_oid oid = obj._p_oid
# This case simulates when an error occurred committing some other # Simulate an error while committing some other object.
# object, so tpc_abort is called, clearing everything.
self.datamgr.tpc_begin(self.transaction) self.datamgr.tpc_begin(self.transaction)
# Let's pretend something bad happens here. # Let's pretend something bad happens here.
# Call tpc_abort, clearing everything.
self.datamgr.tpc_abort(self.transaction) self.datamgr.tpc_abort(self.transaction)
self.assert_(obj._p_oid is None) self.assert_(obj._p_oid is None)
self.assert_(obj._p_jar is None) self.assert_(obj._p_jar is None)
...@@ -135,7 +136,7 @@ class ConnectionDotAdd(unittest.TestCase): ...@@ -135,7 +136,7 @@ class ConnectionDotAdd(unittest.TestCase):
class UserMethodTests(unittest.TestCase): class UserMethodTests(unittest.TestCase):
# XXX add isn't tested here, because there are a bunch of traditional # XXX add isn't tested here, because there are is bunch of traditional
# unit tests for it. # unit tests for it.
# XXX the version tests would require a storage that supports versions # XXX the version tests would require a storage that supports versions
...@@ -217,8 +218,8 @@ class UserMethodTests(unittest.TestCase): ...@@ -217,8 +218,8 @@ class UserMethodTests(unittest.TestCase):
r"""doctest of close() method r"""doctest of close() method
This is a minimal test, because most of the interesting This is a minimal test, because most of the interesting
effects on closing a connection involved its interaction the effects on closing a connection involve its interaction with the
database and transaction. database and the transaction.
>>> db = databaseFromString("<zodb>\n<mappingstorage/>\n</zodb>") >>> db = databaseFromString("<zodb>\n<mappingstorage/>\n</zodb>")
>>> cn = db.open() >>> cn = db.open()
...@@ -228,8 +229,7 @@ class UserMethodTests(unittest.TestCase): ...@@ -228,8 +229,7 @@ class UserMethodTests(unittest.TestCase):
>>> cn.close() >>> cn.close()
>>> cn.close() >>> cn.close()
It's not possible to load or store objects once the storage is It's not possible to load or store objects once the storage is closed.
closed.
>>> cn.get(p64(0)) >>> cn.get(p64(0))
Traceback (most recent call last): Traceback (most recent call last):
...@@ -348,7 +348,7 @@ class UserMethodTests(unittest.TestCase): ...@@ -348,7 +348,7 @@ class UserMethodTests(unittest.TestCase):
warnings, one from the Connection and one from the warnings, one from the Connection and one from the
cPickleCache. Maybe we should drop the cPickleCache warning, cPickleCache. Maybe we should drop the cPickleCache warning,
but it's there for now. When passed an argument, it acts like but it's there for now. When passed an argument, it acts like
cacheGC(). When t isn't passed an argument it acts like cacheGC(). When it isn't passed an argument it acts like
cacheMinimize(). cacheMinimize().
>>> r._p_activate() >>> r._p_activate()
...@@ -368,8 +368,7 @@ class UserMethodTests(unittest.TestCase): ...@@ -368,8 +368,7 @@ class UserMethodTests(unittest.TestCase):
>>> category.__name__ >>> category.__name__
'DeprecationWarning' 'DeprecationWarning'
We have to uninstall the hook so that other warnings don't get We have to uninstall the hook so that other warnings don't get lost.
lost.
>>> hook.uninstall() >>> hook.uninstall()
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment