Commit a9cda7fb authored by Jim Fulton's avatar Jim Fulton

Bug Fixed:

Object state management wasn't dome correctly when classes
  implemented custom _p_deavtivate methods.
  (https://bugs.launchpad.net/zodb/+bug/185066)
parent 384c7903
......@@ -2,6 +2,16 @@
Change History
================
3.10.0a1 (2009-12-??)
=====================
Bugs Fixed
----------
- Object state management wasn't dome correctly when classes
implemented custom _p_deavtivate methods.
(https://bugs.launchpad.net/zodb/+bug/185066)
3.9.4 (2009-12-14)
==================
......
......@@ -241,24 +241,16 @@ class Connection(ExportImport, object):
if obj is not None:
return obj
# This appears to be an MVCC violation because we are loading
# the must recent data when perhaps we shouldnt. The key is
# that we are only creating a ghost!
# A disadvantage to this optimization is that _p_serial cannot be
# trusted until the object has been loaded, which affects both MVCC
# and historical connections.
p, serial = self._storage.load(oid, '')
obj = self._reader.getGhost(p)
# Avoid infiniate loop if obj tries to load its state before
# it is added to the cache and it's state refers to it.
# (This will typically be the case for non-ghostifyable objects,
# like persistent caches.)
self._pre_cache[oid] = obj
obj._p_oid = oid
obj._p_jar = self
obj._p_changed = None
obj._p_serial = serial
self._cache.new_ghost(oid, obj)
self._pre_cache.pop(oid)
self._cache[oid] = obj
return obj
def cacheMinimize(self):
......
......@@ -511,14 +511,7 @@ class ObjectReader:
return self._conn.get(oid)
# TODO: should be done by connection
obj._p_oid = oid
obj._p_jar = self._conn
# When an object is created, it is put in the UPTODATE
# state. We must explicitly deactivate it to turn it into
# a ghost.
obj._p_changed = None
self._cache[oid] = obj
self._cache.new_ghost(oid, obj)
return obj
def load_multi_persistent(self, database_name, oid, klass):
......@@ -552,16 +545,6 @@ class ObjectReader:
loaders['n'] = load_multi_oid
def _new_object(self, klass, args):
if not args and not myhasattr(klass, "__getnewargs__"):
obj = klass.__new__(klass)
else:
obj = klass(*args)
if not isinstance(klass, type):
obj.__dict__.clear()
return obj
def getClassName(self, pickle):
unpickler = self._get_unpickler(pickle)
klass = unpickler.load()
......
......@@ -556,6 +556,29 @@ def connection_root_convenience():
<root: rather_long_name rather_long_name2 rather_long_name4 ...>
"""
class proper_ghost_initialization_with_empty__p_deactivate_class(Persistent):
def _p_deactivate(self):
pass
def proper_ghost_initialization_with_empty__p_deactivate():
"""
See https://bugs.launchpad.net/zodb/+bug/185066
>>> db = ZODB.tests.util.DB()
>>> conn = db.open()
>>> C = proper_ghost_initialization_with_empty__p_deactivate_class
>>> conn.root.x = x = C()
>>> conn.root.x.y = 1
>>> transaction.commit()
>>> conn2 = db.open()
>>> conn2.root.x._p_changed
>>> conn2.root.x.y
1
"""
class _PlayPersistent(Persistent):
def setValueWithSize(self, size=0): self.value = size*' '
__init__ = setValueWithSize
......
......@@ -691,6 +691,103 @@ cc_update_object_size_estimation(ccobject *self, PyObject *args)
Py_RETURN_NONE;
}
static PyObject*
cc_new_ghost(ccobject *self, PyObject *args)
{
PyObject *tmp, *key, *v;
if (!PyArg_ParseTuple(args, "OO:new_ghost", &key, &v))
return NULL;
/* Sanity check the value given to make sure it is allowed in the cache */
if (PyType_Check(v))
{
/* Its a persistent class, such as a ZClass. Thats ok. */
}
else if (v->ob_type->tp_basicsize < sizeof(cPersistentObject))
{
/* If it's not an instance of a persistent class, (ie Python
classes that derive from persistent.Persistent, BTrees,
etc), report an error.
TODO: checking sizeof() seems a poor test.
*/
PyErr_SetString(PyExc_TypeError,
"Cache values must be persistent objects.");
return NULL;
}
/* Can't access v->oid directly because the object might be a
* persistent class.
*/
tmp = PyObject_GetAttr(v, py__p_oid);
if (tmp == NULL)
return NULL;
Py_DECREF(tmp);
if (tmp != Py_None)
{
PyErr_SetString(PyExc_AssertionError,
"New ghost object must not have an oid");
return NULL;
}
/* useful sanity check, but not strictly an invariant of this class */
tmp = PyObject_GetAttr(v, py__p_jar);
if (tmp == NULL)
return NULL;
Py_DECREF(tmp);
if (tmp != Py_None)
{
PyErr_SetString(PyExc_AssertionError,
"New ghost object must not have a jar");
return NULL;
}
tmp = PyDict_GetItem(self->data, key);
if (tmp)
{
Py_DECREF(tmp);
PyErr_SetString(PyExc_AssertionError,
"The given oid is already in the cache");
return NULL;
}
if (PyType_Check(v))
{
if (PyObject_SetAttr(v, py__p_jar, self->jar) < 0)
return NULL;
if (PyObject_SetAttr(v, py__p_oid, key) < 0)
return NULL;
if (PyDict_SetItem(self->data, key, v) < 0)
return NULL;
self->klass_count++;
}
else
{
cPersistentObject *p = (cPersistentObject *)v;
if(p->cache != NULL)
{
PyErr_SetString(PyExc_AssertionError, "Already in a cache");
return NULL;
}
if (PyDict_SetItem(self->data, key, v) < 0)
return NULL;
/* the dict should have a borrowed reference */
Py_DECREF(v);
Py_INCREF(self);
p->cache = (PerCache *)self;
Py_INCREF(self->jar);
p->jar = self->jar;
Py_INCREF(key);
p->oid = key;
p->state = cPersistent_GHOST_STATE;
}
Py_RETURN_NONE;
}
static struct PyMethodDef cc_methods[] = {
{"items", (PyCFunction)cc_items, METH_NOARGS,
......@@ -724,6 +821,8 @@ static struct PyMethodDef cc_methods[] = {
"update_object_size_estimation(oid, new_size) -- "
"update the caches size estimation for *oid* "
"(if this is known to the cache)."},
{"new_ghost", (PyCFunction)cc_new_ghost, METH_VARARGS,
"new_ghost() -- Initialize a ghost and add it to the cache."},
{NULL, NULL} /* sentinel */
};
......
......@@ -40,6 +40,80 @@ def test_delitem():
"""
def new_ghost():
"""
Creating ghosts (from scratch, as opposed to ghostifying a non-ghost)
in the curremt implementation is rather tricky. IPeristent doesn't
really provide the right interface given that:
- _p_deactivate and _p_invalidate are overridable and could assume
that the object's state is properly initialized.
- Assigning _p_changed to None or deleting it just calls _p_deactivate
or _p_invalidate.
The current cache implementation is intimately tied up with the
persistence implementation and has internal access to the persistence
state. The cache implementation can update the persistence state for
newly created and ininitialized objects directly.
The future persistence and cache implementations will be far more
decoupled. The persistence implementation will only manage object
state and generate object-usage events. The cache implemnentation(s)
will be rersponsible for managing persistence-related (meta-)state,
such as _p_state, _p_changed, _p_oid, etc. So in that future
implemention, the cache will be more central to managing object
persistence information.
Caches have a new_ghost method that:
- adds an object to the cache, and
- initializes its persistence data.
>>> import persistent
>>> class C(persistent.Persistent):
... pass
>>> jar = object()
>>> cache = persistent.PickleCache(jar, 10, 100)
>>> ob = C.__new__(C)
>>> cache.new_ghost('1', ob)
>>> ob._p_changed
>>> ob._p_jar is jar
True
>>> ob._p_oid
'1'
>>> cache.cache_non_ghost_count, cache.total_estimated_size
(0, 0)
Peristent meta classes work too:
>>> import ZODB.persistentclass
>>> class PC:
... __metaclass__ = ZODB.persistentclass.PersistentMetaClass
>>> PC._p_oid
>>> PC._p_jar
>>> PC._p_serial
>>> PC._p_changed
False
>>> cache.new_ghost('2', PC)
>>> PC._p_oid
'2'
>>> PC._p_jar is jar
True
>>> PC._p_serial
>>> PC._p_changed
False
"""
from zope.testing.doctest import DocTestSuite
import unittest
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment