Commit b2bc7098 authored by Rich Prohaska's avatar Rich Prohaska Committed by Yoni Fogel

#3520 lock tree test coverage refs[t:3520]

git-svn-id: file:///svn/toku/tokudb@42986 c7de825b-a66e-492c-adef-691d508d4ae1
parent e2196367
...@@ -33,14 +33,10 @@ int toku_idlth_create(toku_idlth** pidlth) { ...@@ -33,14 +33,10 @@ int toku_idlth_create(toku_idlth** pidlth) {
int r = ENOSYS; int r = ENOSYS;
assert(pidlth); assert(pidlth);
toku_idlth* tmp = NULL; toku_idlth* tmp = NULL;
tmp = (toku_idlth*) toku_malloc(sizeof(*tmp)); tmp = (toku_idlth*) toku_xmalloc(sizeof(*tmp));
if (!tmp) { r = ENOMEM; goto cleanup; }
memset(tmp, 0, sizeof(*tmp)); memset(tmp, 0, sizeof(*tmp));
tmp->num_buckets = __toku_idlth_init_size; tmp->num_buckets = __toku_idlth_init_size;
tmp->buckets = (toku_idlth_elt*) tmp->buckets = (toku_idlth_elt*) toku_xmalloc(tmp->num_buckets * sizeof(*tmp->buckets));
toku_malloc(tmp->num_buckets * sizeof(*tmp->buckets));
if (!tmp->buckets) { r = ENOMEM; goto cleanup; }
memset(tmp->buckets, 0, tmp->num_buckets * sizeof(*tmp->buckets)); memset(tmp->buckets, 0, tmp->num_buckets * sizeof(*tmp->buckets));
toku__invalidate_scan(tmp); toku__invalidate_scan(tmp);
tmp->iter_head.next_in_iteration = &tmp->iter_head; tmp->iter_head.next_in_iteration = &tmp->iter_head;
...@@ -48,13 +44,6 @@ int toku_idlth_create(toku_idlth** pidlth) { ...@@ -48,13 +44,6 @@ int toku_idlth_create(toku_idlth** pidlth) {
*pidlth = tmp; *pidlth = tmp;
r = 0; r = 0;
cleanup:
if (r != 0) {
if (tmp) {
if (tmp->buckets) { toku_free(tmp->buckets); }
toku_free(tmp);
}
}
return r; return r;
} }
...@@ -129,8 +118,7 @@ int toku_idlth_insert(toku_idlth* idlth, DICTIONARY_ID dict_id) { ...@@ -129,8 +118,7 @@ int toku_idlth_insert(toku_idlth* idlth, DICTIONARY_ID dict_id) {
uint32_t index = toku__idlth_hash(idlth, dict_id); uint32_t index = toku__idlth_hash(idlth, dict_id);
/* Allocate a new one. */ /* Allocate a new one. */
toku_idlth_elt* element = (toku_idlth_elt*) toku_malloc(sizeof(*element)); toku_idlth_elt* element = (toku_idlth_elt*) toku_xmalloc(sizeof(*element));
if (!element) { r = ENOMEM; goto cleanup; }
memset(element, 0, sizeof(*element)); memset(element, 0, sizeof(*element));
element->value.dict_id = dict_id; element->value.dict_id = dict_id;
...@@ -144,7 +132,6 @@ int toku_idlth_insert(toku_idlth* idlth, DICTIONARY_ID dict_id) { ...@@ -144,7 +132,6 @@ int toku_idlth_insert(toku_idlth* idlth, DICTIONARY_ID dict_id) {
idlth->num_keys++; idlth->num_keys++;
r = 0; r = 0;
cleanup:
return r; return r;
} }
......
...@@ -32,14 +32,10 @@ int toku_rth_create(toku_rth** prth) { ...@@ -32,14 +32,10 @@ int toku_rth_create(toku_rth** prth) {
int r = ENOSYS; int r = ENOSYS;
assert(prth); assert(prth);
toku_rth* tmp = NULL; toku_rth* tmp = NULL;
tmp = (toku_rth*) toku_malloc(sizeof(*tmp)); tmp = (toku_rth*) toku_xmalloc(sizeof(*tmp));
if (!tmp) { r = ENOMEM; goto cleanup; }
memset(tmp, 0, sizeof(*tmp)); memset(tmp, 0, sizeof(*tmp));
tmp->num_buckets = __toku_rth_init_size; tmp->num_buckets = __toku_rth_init_size;
tmp->buckets = (toku_rth_elt*) tmp->buckets = (toku_rth_elt*) toku_xmalloc(tmp->num_buckets * sizeof(*tmp->buckets));
toku_malloc(tmp->num_buckets * sizeof(*tmp->buckets));
if (!tmp->buckets) { r = ENOMEM; goto cleanup; }
memset(tmp->buckets, 0, tmp->num_buckets * sizeof(*tmp->buckets)); memset(tmp->buckets, 0, tmp->num_buckets * sizeof(*tmp->buckets));
toku__invalidate_scan(tmp); toku__invalidate_scan(tmp);
tmp->iter_head.next_in_iteration = &tmp->iter_head; tmp->iter_head.next_in_iteration = &tmp->iter_head;
...@@ -47,13 +43,6 @@ int toku_rth_create(toku_rth** prth) { ...@@ -47,13 +43,6 @@ int toku_rth_create(toku_rth** prth) {
*prth = tmp; *prth = tmp;
r = 0; r = 0;
cleanup:
if (r != 0) {
if (tmp) {
if (tmp->buckets) { toku_free(tmp->buckets); }
toku_free(tmp);
}
}
return r; return r;
} }
...@@ -128,8 +117,7 @@ int toku_rth_insert(toku_rth* rth, TXNID key) { ...@@ -128,8 +117,7 @@ int toku_rth_insert(toku_rth* rth, TXNID key) {
uint32_t index = toku__rth_hash(rth, key); uint32_t index = toku__rth_hash(rth, key);
/* Allocate a new one. */ /* Allocate a new one. */
toku_rth_elt* element = (toku_rth_elt*) toku_malloc(sizeof(*element)); toku_rth_elt* element = (toku_rth_elt*) toku_xmalloc(sizeof(*element));
if (!element) { r = ENOMEM; goto cleanup; }
memset(element, 0, sizeof(*element)); memset(element, 0, sizeof(*element));
element->value.hash_key = key; element->value.hash_key = key;
element->next_in_iteration = rth->iter_head.next_in_iteration; element->next_in_iteration = rth->iter_head.next_in_iteration;
...@@ -142,7 +130,6 @@ int toku_rth_insert(toku_rth* rth, TXNID key) { ...@@ -142,7 +130,6 @@ int toku_rth_insert(toku_rth* rth, TXNID key) {
rth->num_keys++; rth->num_keys++;
r = 0; r = 0;
cleanup:
return r; return r;
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment