Commit 605281d8 authored by Tim Peters's avatar Tim Peters

Merge revs 27788 and 27795 from Zope trunk.

See Collector #1350.

ghostify() and unghostify() detect one form of thread insanity
now.  In a debug build, they abort the process if it happens.
In a release build, unghostify() raises SystemError, and
ghostify() ignores it (ghostify() can't raise an exception).
parent a2a02f22
What's new in ZODB3 3.3.1?
==========================
Release date: xx-xxx-2004
persistent
----------
Collector #1350: ZODB has a default one-thread-per-connection model, and
two threads should never do operations on a single connection
simultaneously. However, ZODB can't detect violations, and this happened
in an early stage of Zope 2.8 development. The low-level ``ghostify()``
and ``unghostify()`` routines in ``cPerisistence.c`` were changed to give
some help in detecting this when it happens. In a debug build, both abort
the process if thread interference is detected. This is extreme, but
impossible to overlook. In a release build, ``unghostify()`` raises
``SystemError`` if thread damage is detected; ``ghostify()`` ignores the
problem in a release build (``ghostify()`` is supposed to be so simple that
it "can't fail").
What's new in ZODB3 3.3? What's new in ZODB3 3.3?
======================== ========================
Release date: 06-Oct-2004 Release date: 06-Oct-2004
......
...@@ -58,6 +58,24 @@ init_strings(void) ...@@ -58,6 +58,24 @@ init_strings(void)
return 0; return 0;
} }
#ifdef Py_DEBUG
static void
fatal_1350(cPersistentObject *self, const char *caller, const char *detail)
{
char buf[1000];
PyOS_snprintf(buf, sizeof(buf),
"cPersistence.c %s(): object at %p with type %.200s\n"
"%s.\n"
"The only known cause is multiple threads trying to ghost and\n"
"unghost the object simultaneously.\n"
"That's not legal, but ZODB can't stop it.\n"
"See Collector #1350.\n",
caller, self, self->ob_type->tp_name, detail);
Py_FatalError(buf);
}
#endif
static void ghostify(cPersistentObject*); static void ghostify(cPersistentObject*);
/* Load the state of the object, unghostifying it. Upon success, return 1. /* Load the state of the object, unghostifying it. Upon success, return 1.
...@@ -88,6 +106,18 @@ unghostify(cPersistentObject *self) ...@@ -88,6 +106,18 @@ unghostify(cPersistentObject *self)
} }
self->state = cPersistent_UPTODATE_STATE; self->state = cPersistent_UPTODATE_STATE;
Py_DECREF(r); Py_DECREF(r);
if (self->cache && self->ring.r_next == NULL) {
#ifdef Py_DEBUG
fatal_1350(self, "unghostify",
"is not in the cache despite that we just "
"unghostified it");
#else
PyErr_Format(PyExc_SystemError, "object at %p with type "
"%.200s not in the cache despite that we just "
"unghostified it", self, self->ob_type->tp_name);
return -1;
#endif
}
} }
return 1; return 1;
} }
...@@ -134,13 +164,19 @@ ghostify(cPersistentObject *self) ...@@ -134,13 +164,19 @@ ghostify(cPersistentObject *self)
return; return;
} }
/* If the cache is still active, we must unlink the object. */ if (self->ring.r_next == NULL) {
if (self->ring.r_next) { /* There's no way to raise an error in this routine. */
/* if we're ghostifying an object, we better have some non-ghosts */ #ifdef Py_DEBUG
assert(self->cache->non_ghost_count > 0); fatal_1350(self, "ghostify", "claims to be in a cache but isn't");
self->cache->non_ghost_count--; #else
ring_del(&self->ring); return;
#endif
} }
/* If we're ghostifying an object, we better have some non-ghosts. */
assert(self->cache->non_ghost_count > 0);
self->cache->non_ghost_count--;
ring_del(&self->ring);
self->state = cPersistent_GHOST_STATE; self->state = cPersistent_GHOST_STATE;
dictptr = _PyObject_GetDictPtr((PyObject *)self); dictptr = _PyObject_GetDictPtr((PyObject *)self);
if (dictptr && *dictptr) { if (dictptr && *dictptr) {
...@@ -249,7 +285,7 @@ pickle_slotnames(PyTypeObject *cls) ...@@ -249,7 +285,7 @@ pickle_slotnames(PyTypeObject *cls)
return slotnames; return slotnames;
} }
slotnames = PyObject_CallFunctionObjArgs(copy_reg_slotnames, slotnames = PyObject_CallFunctionObjArgs(copy_reg_slotnames,
(PyObject*)cls, NULL); (PyObject*)cls, NULL);
if (slotnames && !(slotnames == Py_None || PyList_Check(slotnames))) { if (slotnames && !(slotnames == Py_None || PyList_Check(slotnames))) {
PyErr_SetString(PyExc_TypeError, PyErr_SetString(PyExc_TypeError,
...@@ -257,7 +293,7 @@ pickle_slotnames(PyTypeObject *cls) ...@@ -257,7 +293,7 @@ pickle_slotnames(PyTypeObject *cls)
Py_DECREF(slotnames); Py_DECREF(slotnames);
return NULL; return NULL;
} }
return slotnames; return slotnames;
} }
...@@ -288,7 +324,7 @@ pickle_copy_dict(PyObject *state) ...@@ -288,7 +324,7 @@ pickle_copy_dict(PyObject *state)
if (PyObject_SetItem(copy, key, value) < 0) if (PyObject_SetItem(copy, key, value) < 0)
goto err; goto err;
} }
return copy; return copy;
err: err:
Py_DECREF(copy); Py_DECREF(copy);
...@@ -366,13 +402,13 @@ pickle___getstate__(PyObject *self) ...@@ -366,13 +402,13 @@ pickle___getstate__(PyObject *self)
} }
} }
if (n) if (n)
state = Py_BuildValue("(NO)", state, slots); state = Py_BuildValue("(NO)", state, slots);
end: end:
Py_XDECREF(slotnames); Py_XDECREF(slotnames);
Py_XDECREF(slots); Py_XDECREF(slots);
return state; return state;
} }
...@@ -381,12 +417,12 @@ pickle_setattrs_from_dict(PyObject *self, PyObject *dict) ...@@ -381,12 +417,12 @@ pickle_setattrs_from_dict(PyObject *self, PyObject *dict)
{ {
PyObject *key, *value; PyObject *key, *value;
int pos = 0; int pos = 0;
if (!PyDict_Check(dict)) { if (!PyDict_Check(dict)) {
PyErr_SetString(PyExc_TypeError, "Expected dictionary"); PyErr_SetString(PyExc_TypeError, "Expected dictionary");
return -1; return -1;
} }
while (PyDict_Next(dict, &pos, &key, &value)) { while (PyDict_Next(dict, &pos, &key, &value)) {
if (PyObject_SetAttr(self, key, value) < 0) if (PyObject_SetAttr(self, key, value) < 0)
return -1; return -1;
...@@ -451,7 +487,7 @@ pickle___setstate__(PyObject *self, PyObject *state) ...@@ -451,7 +487,7 @@ pickle___setstate__(PyObject *self, PyObject *state)
return Py_None; return Py_None;
} }
static char pickle___reduce__doc[] = static char pickle___reduce__doc[] =
"Reduce an object to contituent parts for serialization\n" "Reduce an object to contituent parts for serialization\n"
; ;
...@@ -475,18 +511,18 @@ pickle___reduce__(PyObject *self) ...@@ -475,18 +511,18 @@ pickle___reduce__(PyObject *self)
PyErr_Clear(); PyErr_Clear();
l = 0; l = 0;
} }
args = PyTuple_New(l+1); args = PyTuple_New(l+1);
if (args == NULL) if (args == NULL)
goto end; goto end;
Py_INCREF(self->ob_type); Py_INCREF(self->ob_type);
PyTuple_SET_ITEM(args, 0, (PyObject*)(self->ob_type)); PyTuple_SET_ITEM(args, 0, (PyObject*)(self->ob_type));
for (i = 0; i < l; i++) { for (i = 0; i < l; i++) {
Py_INCREF(PyTuple_GET_ITEM(bargs, i)); Py_INCREF(PyTuple_GET_ITEM(bargs, i));
PyTuple_SET_ITEM(args, i+1, PyTuple_GET_ITEM(bargs, i)); PyTuple_SET_ITEM(args, i+1, PyTuple_GET_ITEM(bargs, i));
} }
state = PyObject_CallMethodObjArgs(self, py___getstate__, NULL); state = PyObject_CallMethodObjArgs(self, py___getstate__, NULL);
if (!state) if (!state)
goto end; goto end;
...@@ -680,7 +716,7 @@ Per__p_getattr(cPersistentObject *self, PyObject *name) ...@@ -680,7 +716,7 @@ Per__p_getattr(cPersistentObject *self, PyObject *name)
} }
else else
result = Py_True; result = Py_True;
Py_INCREF(result); Py_INCREF(result);
Done: Done:
...@@ -688,7 +724,7 @@ Per__p_getattr(cPersistentObject *self, PyObject *name) ...@@ -688,7 +724,7 @@ Per__p_getattr(cPersistentObject *self, PyObject *name)
return result; return result;
} }
/* /*
XXX we should probably not allow assignment of __class__ and __dict__. XXX we should probably not allow assignment of __class__ and __dict__.
*/ */
...@@ -1012,7 +1048,7 @@ static struct PyMethodDef Per_methods[] = { ...@@ -1012,7 +1048,7 @@ static struct PyMethodDef Per_methods[] = {
{"__getstate__", (PyCFunction)Per__getstate__, METH_NOARGS, {"__getstate__", (PyCFunction)Per__getstate__, METH_NOARGS,
pickle___getstate__doc }, pickle___getstate__doc },
{"__setstate__", (PyCFunction)pickle___setstate__, METH_O, {"__setstate__", (PyCFunction)pickle___setstate__, METH_O,
pickle___setstate__doc}, pickle___setstate__doc},
{"__reduce__", (PyCFunction)pickle___reduce__, METH_NOARGS, {"__reduce__", (PyCFunction)pickle___reduce__, METH_NOARGS,
pickle___reduce__doc}, pickle___reduce__doc},
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment