Commit 169c07a0 authored by Jason Madden's avatar Jason Madden

The Python PickleCache is close enough to the C PickleCache that ZODB's...

The Python PickleCache is close enough to the C PickleCache that ZODB's testCache passes. Required some small updates and additions to our own test cases; the update_object_size_estimation is not tested here yet.
parent 7ebf1284
......@@ -81,7 +81,7 @@ class Persistent(object):
jar = self.__jar
oid = self.__oid
if jar is not None:
if oid and jar._cache.get(oid):
if self._p_is_in_cache():
raise ValueError("can't delete _p_jar of cached object")
self.__setattr__('_Persistent__jar', None)
_OSA(self, '_Persistent__flags', None)
......@@ -95,9 +95,11 @@ class Persistent(object):
def _set_oid(self, value):
if value == self.__oid:
return
if value is not None:
if not isinstance(value, OID_TYPE):
raise ValueError('Invalid OID type: %s' % value)
# The C implementation allows *any* value to be
# used as the _p_oid.
#if value is not None:
# if not isinstance(value, OID_TYPE):
# raise ValueError('Invalid OID type: %s' % value)
if self.__jar is not None and self.__oid is not None:
raise ValueError('Already assigned an OID by our jar')
_OSA(self, '_Persistent__oid', value)
......@@ -356,6 +358,15 @@ class Persistent(object):
idict = getattr(self, '__dict__', None)
if idict is not None:
idict.clear()
# Implementation detail: deactivating/invalidating
# updates the size of the cache (if we have one)
# by telling it this object no longer takes any bytes
# (-1 is a magic number to compensate for the implementation,
# which always adds one to the size given)
cache = getattr(self.__jar, '_cache', None)
if cache is not None:
cache.update_object_size_estimation(self.__oid,
-1)
def _p_getattr(self, name):
""" See IPersistent.
......@@ -411,16 +422,31 @@ class Persistent(object):
if (self.__jar is not None and
self.__oid is not None and
self._p_state >= 0):
# This scenario arises in ZODB: ZODB.serialize.ObjectWriter
# The KeyError arises in ZODB: ZODB.serialize.ObjectWriter
# can assign a jar and an oid to newly seen persistent objects,
# but because they are newly created, they aren't in the
# pickle cache yet. There doesn't seem to be a way to distinguish
# that at this level, all we can do is catch it
# that at this level, all we can do is catch it.
# The AttributeError arises in ZODB test cases
try:
self.__jar._cache.mru(self.__oid)
except KeyError:
cache = self.__jar._cache
except AttributeError:
pass
else:
try:
cache.mru(self.__oid)
except KeyError:
pass
def _p_is_in_cache(self):
oid = self.__oid
if not oid:
return False
jar = self.__jar
cache = getattr(jar, '_cache', None)
if cache is not None:
return cache.get(oid) is self
def _estimated_size_in_24_bits(value):
if value > 1073741696:
......
......@@ -11,6 +11,7 @@
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
from __future__ import print_function
import gc
import weakref
......@@ -21,6 +22,10 @@ from persistent.interfaces import GHOST
from persistent.interfaces import IPickleCache
from persistent.interfaces import STICKY
from persistent.interfaces import OID_TYPE
from persistent import Persistent
# Tests may modify this to add additional types
_CACHEABLE_TYPES = (type, Persistent)
class RingNode(object):
# 32 byte fixed size wrapper.
......@@ -33,9 +38,17 @@ class RingNode(object):
@implementer(IPickleCache)
class PickleCache(object):
total_estimated_size = 0
cache_size_bytes = 0
def __init__(self, jar, target_size=0, cache_size_bytes=0):
# TODO: forward-port Dieter's bytes stuff
self.jar = jar
# We expect the jars to be able to have a pointer to
# us; this is a reference cycle, but certain
# aspects of invalidation and accessing depend on it.
# TODO: track this on the persistent objects themself?
jar._cache = self
self.target_size = target_size
self.drain_resistance = 0
self.non_ghost_count = 0
......@@ -43,6 +56,7 @@ class PickleCache(object):
self.data = weakref.WeakValueDictionary()
self.ring = RingNode(None)
self.ring.next = self.ring.prev = self.ring
self.cache_size_bytes = cache_size_bytes
# IPickleCache API
def __len__(self):
......@@ -62,12 +76,37 @@ class PickleCache(object):
def __setitem__(self, oid, value):
""" See IPickleCache.
"""
if not isinstance(oid, OID_TYPE): # XXX bytes
raise ValueError('OID must be %s: %s' % (OID_TYPE, oid))
# The order of checks matters for C compatibility;
# the ZODB tests depend on this
# The C impl requires either a type or a Persistent subclass
if not isinstance(value, _CACHEABLE_TYPES):
raise TypeError("Cache values must be persistent objects.")
value_oid = value._p_oid
if not isinstance(oid, OID_TYPE) or not isinstance(value_oid, OID_TYPE): # XXX bytes
raise TypeError('OID must be %s: key=%s _p_oid=%s' % (OID_TYPE, oid, value_oid))
if value_oid != oid:
raise ValueError("Cache key does not match oid")
# XXX
if oid in self.persistent_classes or oid in self.data:
if self.data[oid] is not value:
raise KeyError('Duplicate OID: %s' % oid)
# Raise the same type of exception as the C impl with the same
# message.
raise ValueError('A different object already has the same oid')
# Match the C impl: it requires a jar
jar = getattr(value, '_p_jar', None)
if jar is None and type(value) is not type:
raise ValueError("Cached object jar missing")
# It also requires that it cannot be cached more than one place
existing_cache = getattr(jar, '_cache', None)
if (existing_cache is not None
and existing_cache is not self
and existing_cache.data.get(oid) is not None):
raise ValueError("Object already in another cache")
if type(value) is type:
self.persistent_classes[oid] = value
else:
......@@ -82,7 +121,7 @@ class PickleCache(object):
""" See IPickleCache.
"""
if not isinstance(oid, OID_TYPE):
raise ValueError('OID must be %s: %s' % (OID_TYPE, oid))
raise TypeError('OID must be %s: %s' % (OID_TYPE, oid))
if oid in self.persistent_classes:
del self.persistent_classes[oid]
else:
......@@ -229,7 +268,17 @@ class PickleCache(object):
def update_object_size_estimation(self, oid, new_size):
""" See IPickleCache.
"""
pass #pragma NO COVER
value = self.data.get(oid)
if value is not None:
# Recall that while the argument is given in bytes,
# we have to work with 64-block chunks (plus one)
# to match the C implementation. Hence the convoluted
# arithmetic
new_size_in_24 = _estimated_size_in_24_bits(new_size)
p_est_size_in_24 = value._Persistent__size
new_est_size_in_bytes = (new_size_in_24 - p_est_size_in_24) * 64
self.total_estimated_size += new_est_size_in_bytes
cache_size = property(lambda self: self.target_size)
cache_drain_resistance = property(lambda self: self.drain_resistance)
......@@ -262,3 +311,8 @@ class PickleCache(object):
node = node.next
elif oid in self.persistent_classes:
del self.persistent_classes[oid]
def _estimated_size_in_24_bits(value):
if value > 1073741696:
return 16777215
return (value//64) + 1
......@@ -1404,6 +1404,8 @@ class PyPersistentTests(unittest.TestCase, _Persistent_Base):
return self._data.get(oid)
def __delitem__(self, oid):
del self._data[oid]
def update_object_size_estimation(self, oid, new_size):
pass
return _Cache(jar)
......
......@@ -17,6 +17,16 @@ _marker = object()
class PickleCacheTests(unittest.TestCase):
def setUp(self):
import persistent.picklecache
self.orig_types = persistent.picklecache._CACHEABLE_TYPES
persistent.picklecache._CACHEABLE_TYPES += (DummyPersistent,)
def tearDown(self):
import persistent.picklecache
persistent.picklecache._CACHEABLE_TYPES = self.orig_types
def _getTargetClass(self):
from persistent.picklecache import PickleCache
return PickleCache
......@@ -79,12 +89,12 @@ class PickleCacheTests(unittest.TestCase):
self.assertTrue(cache.get('nonesuch', default) is default)
def test___setitem___non_string_oid_raises_ValueError(self):
def test___setitem___non_string_oid_raises_TypeError(self):
cache = self._makeOne()
try:
cache[object()] = self._makePersist()
except ValueError:
except TypeError:
pass
else:
self.fail("Didn't raise ValueError with non-string OID.")
......@@ -93,21 +103,21 @@ class PickleCacheTests(unittest.TestCase):
from persistent._compat import _b
KEY = _b('original')
cache = self._makeOne()
original = self._makePersist()
original = self._makePersist(oid=KEY)
cache[KEY] = original
cache[KEY] = original
def test___setitem___duplicate_oid_raises_KeyError(self):
def test___setitem___duplicate_oid_raises_ValueError(self):
from persistent._compat import _b
KEY = _b('original')
cache = self._makeOne()
original = self._makePersist()
original = self._makePersist(oid=KEY)
cache[KEY] = original
duplicate = self._makePersist()
duplicate = self._makePersist(oid=KEY)
try:
cache[KEY] = duplicate
except KeyError:
except ValueError:
pass
else:
self.fail("Didn't raise KeyError with duplicate OID.")
......@@ -117,7 +127,7 @@ class PickleCacheTests(unittest.TestCase):
from persistent._compat import _b
KEY = _b('ghost')
cache = self._makeOne()
ghost = self._makePersist(state=GHOST)
ghost = self._makePersist(state=GHOST, oid=KEY)
cache[KEY] = ghost
......@@ -130,13 +140,28 @@ class PickleCacheTests(unittest.TestCase):
self.assertTrue(items[0][1] is ghost)
self.assertTrue(cache[KEY] is ghost)
def test___setitem___non_ghost(self):
def test___setitem___mismatch_key_oid(self):
from persistent.interfaces import UPTODATE
from persistent._compat import _b
KEY = _b('uptodate')
cache = self._makeOne()
uptodate = self._makePersist(state=UPTODATE)
try:
cache[KEY] = uptodate
except ValueError:
pass
else:
self.fail("Didn't raise ValueError when the key didn't match the OID")
def test___setitem___non_ghost(self):
from persistent.interfaces import UPTODATE
from persistent._compat import _b
KEY = _b('uptodate')
cache = self._makeOne()
uptodate = self._makePersist(state=UPTODATE, oid=KEY)
cache[KEY] = uptodate
self.assertEqual(len(cache), 1)
......@@ -153,7 +178,7 @@ class PickleCacheTests(unittest.TestCase):
from persistent._compat import _b
KEY = _b('pclass')
class pclass(object):
pass
_p_oid = KEY
cache = self._makeOne()
cache[KEY] = pclass
......@@ -167,12 +192,12 @@ class PickleCacheTests(unittest.TestCase):
self.assertTrue(cache[KEY] is pclass)
self.assertTrue(cache.get(KEY) is pclass)
def test___delitem___non_string_oid_raises_ValueError(self):
def test___delitem___non_string_oid_raises_TypeError(self):
cache = self._makeOne()
try:
del cache[object()]
except ValueError:
except TypeError:
pass
else:
self.fail("Didn't raise ValueError with non-string OID.")
......@@ -194,7 +219,7 @@ class PickleCacheTests(unittest.TestCase):
KEY = _b('pclass')
cache = self._makeOne()
class pclass(object):
pass
_p_oid = KEY
cache = self._makeOne()
cache[KEY] = pclass
......@@ -208,7 +233,7 @@ class PickleCacheTests(unittest.TestCase):
from persistent._compat import _b
KEY = _b('uptodate')
cache = self._makeOne()
uptodate = self._makePersist(state=UPTODATE)
uptodate = self._makePersist(state=UPTODATE, oid=KEY)
cache[KEY] = uptodate
......@@ -219,9 +244,9 @@ class PickleCacheTests(unittest.TestCase):
from persistent.interfaces import GHOST
from persistent._compat import _b
cache = self._makeOne()
ghost = self._makePersist(state=GHOST)
KEY = _b('ghost')
ghost = self._makePersist(state=GHOST, oid=KEY)
cache[KEY] = ghost
del cache[KEY]
......@@ -231,11 +256,11 @@ class PickleCacheTests(unittest.TestCase):
from persistent.interfaces import UPTODATE
from persistent._compat import _b
cache = self._makeOne()
remains = self._makePersist(state=UPTODATE)
uptodate = self._makePersist(state=UPTODATE)
REMAINS = _b('remains')
UPTODATE = _b('uptodate')
remains = self._makePersist(state=UPTODATE, oid=REMAINS)
uptodate = self._makePersist(state=UPTODATE, oid=UPTODATE)
cache[REMAINS] = remains
cache[UPTODATE] = uptodate
......@@ -549,8 +574,17 @@ class PickleCacheTests(unittest.TestCase):
from persistent._compat import _b
KEY = _b('123')
cache = self._makeOne()
candidate = self._makePersist(oid=None, jar=None)
candidate = self._makePersist(oid=KEY)
cache[KEY] = candidate
# Now, normally we can't get in the cache without an oid and jar
# (the C implementation doesn't allow it), so if we try to create
# a ghost, we get the value error
self.assertRaises(ValueError, cache.new_ghost, KEY, candidate)
candidate._p_oid = None
self.assertRaises(ValueError, cache.new_ghost, KEY, candidate)
# if we're sneaky and remove the OID and jar, then we get the duplicate
# key error
candidate._p_jar = None
self.assertRaises(KeyError, cache.new_ghost, KEY, candidate)
def test_new_ghost_success_already_ghost(self):
......@@ -740,7 +774,7 @@ class PickleCacheTests(unittest.TestCase):
from persistent._compat import _b
KEY = _b('123')
class Pclass(object):
_p_oid = None
_p_oid = KEY
_p_jar = None
cache = self._makeOne()
cache[KEY] = Pclass
......@@ -754,7 +788,7 @@ class PickleCacheTests(unittest.TestCase):
from persistent._compat import _b
KEY = _b('pclass')
class pclass(object):
pass
_p_oid = KEY
cache = self._makeOne()
pclass._p_state = UPTODATE
cache[KEY] = pclass
......@@ -775,7 +809,7 @@ class PickleCacheTests(unittest.TestCase):
from persistent._compat import _b
KEY = _b('uptodate')
cache = self._makeOne()
uptodate = self._makePersist(state=UPTODATE)
uptodate = self._makePersist(state=UPTODATE, oid=KEY)
cache[KEY] = uptodate
gc.collect() # pypy vs. refcounting
......@@ -795,7 +829,7 @@ class PickleCacheTests(unittest.TestCase):
from persistent._compat import _b
KEY = _b('ghost')
cache = self._makeOne()
ghost = self._makePersist(state=GHOST)
ghost = self._makePersist(state=GHOST, oid=KEY)
cache[KEY] = ghost
gc.collect() # pypy vs. refcounting
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment