Commit a032f14b authored by Marko Mäkelä's avatar Marko Mäkelä

MDEV-33559 matched_rec::block should be allocated from the buffer pool

matched_rec::rec_buf[], matched_rec::bufp: Remove.

matched_rec::block: Make this a pointer to something that
is allocated by buf_block_alloc(). In this way, the only
case where buf_block_t is constructed outside buf_pool
is ALTER TABLE...IMPORT TABLESPACE.

rtr_info::heap: Remove. This was only used for allocating matched_rec,
which now is smaller.

mtr_t::memmove(): Simplify some code to avoid GCC 9.4.0 -Wconversion
in the 10.6 branch as a result of these changes.

Reviewed by: Debarun Banerjee
parent ea810b04
......@@ -498,7 +498,7 @@ rtr_pcur_move_to_next(
mutex_exit(&rtr_info->matches->rtr_match_mutex);
cursor->btr_cur.page_cur.rec = rec.r_rec;
cursor->btr_cur.page_cur.block = &rtr_info->matches->block;
cursor->btr_cur.page_cur.block = rtr_info->matches->block;
DEBUG_SYNC_C("rtr_pcur_move_to_next_return");
return(true);
......@@ -939,22 +939,14 @@ rtr_create_rtr_info(
rtr_info->index = index;
if (init_matches) {
rtr_info->heap = mem_heap_create(sizeof(*(rtr_info->matches)));
rtr_info->matches = static_cast<matched_rec_t*>(
mem_heap_zalloc(
rtr_info->heap,
sizeof(*rtr_info->matches)));
ut_zalloc_nokey(sizeof *rtr_info->matches));
rtr_info->matches->matched_recs
= UT_NEW_NOKEY(rtr_rec_vector());
rtr_info->matches->bufp = page_align(rtr_info->matches->rec_buf
+ UNIV_PAGE_SIZE_MAX + 1);
mutex_create(LATCH_ID_RTR_MATCH_MUTEX,
&rtr_info->matches->rtr_match_mutex);
rw_lock_create(PFS_NOT_INSTRUMENTED,
&(rtr_info->matches->block.lock),
SYNC_LEVEL_VARYING);
}
rtr_info->path = UT_NEW_NOKEY(rtr_node_path_t());
......@@ -1016,7 +1008,6 @@ rtr_init_rtr_info(
rtr_info->mbr.ymin = 0.0;
rtr_info->mbr.ymax = 0.0;
rtr_info->thr = NULL;
rtr_info->heap = NULL;
rtr_info->cursor = NULL;
rtr_info->index = NULL;
rtr_info->need_prdt_lock = false;
......@@ -1095,17 +1086,15 @@ rtr_clean_rtr_info(
if (free_all) {
if (rtr_info->matches) {
if (rtr_info->matches->matched_recs != NULL) {
UT_DELETE(rtr_info->matches->matched_recs);
if (rtr_info->matches->block) {
buf_block_free(rtr_info->matches->block);
rtr_info->matches->block = nullptr;
}
rw_lock_free(&(rtr_info->matches->block.lock));
UT_DELETE(rtr_info->matches->matched_recs);
mutex_destroy(&rtr_info->matches->rtr_match_mutex);
}
if (rtr_info->heap) {
mem_heap_free(rtr_info->heap);
ut_free(rtr_info->matches);
}
if (initialized) {
......@@ -1215,7 +1204,7 @@ rtr_check_discard_page(
if (rtr_info->matches) {
mutex_enter(&rtr_info->matches->rtr_match_mutex);
if ((&rtr_info->matches->block)->page.id().page_no()
if (rtr_info->matches->block->page.id().page_no()
== pageno) {
if (!rtr_info->matches->matched_recs->empty()) {
rtr_info->matches->matched_recs->clear();
......@@ -1425,7 +1414,7 @@ rtr_leaf_push_match_rec(
ulint data_len;
rtr_rec_t rtr_rec;
buf = match_rec->block.frame + match_rec->used;
buf = match_rec->block->frame + match_rec->used;
ut_ad(page_rec_is_leaf(rec));
copy = rec_copy(buf, rec, offsets);
......@@ -1522,43 +1511,6 @@ rtr_non_leaf_insert_stack_push(
new_seq, level, child_no, my_cursor, mbr_inc);
}
/** Copy a buf_block_t, except "block->lock".
@param[in,out] matches copy to match->block
@param[in] block block to copy */
static
void
rtr_copy_buf(
matched_rec_t* matches,
const buf_block_t* block)
{
/* Copy all members of "block" to "matches->block" except "lock".
We skip "lock" because it is not used
from the dummy buf_block_t we create here and because memcpy()ing
it generates (valid) compiler warnings that the vtable pointer
will be copied. */
new (&matches->block.page) buf_page_t(block->page);
matches->block.frame = block->frame;
matches->block.unzip_LRU = block->unzip_LRU;
ut_d(matches->block.in_unzip_LRU_list = block->in_unzip_LRU_list);
ut_d(matches->block.in_withdraw_list = block->in_withdraw_list);
/* Skip buf_block_t::lock */
matches->block.modify_clock = block->modify_clock;
#ifdef BTR_CUR_HASH_ADAPT
matches->block.n_hash_helps = block->n_hash_helps;
matches->block.n_fields = block->n_fields;
matches->block.left_side = block->left_side;
#if defined UNIV_AHI_DEBUG || defined UNIV_DEBUG
matches->block.n_pointers = 0;
#endif /* UNIV_AHI_DEBUG || UNIV_DEBUG */
matches->block.curr_n_fields = block->curr_n_fields;
matches->block.curr_left_side = block->curr_left_side;
matches->block.index = block->index;
#endif /* BTR_CUR_HASH_ADAPT */
ut_d(matches->block.debug_latch = NULL);
}
/****************************************************************//**
Generate a shadow copy of the page block header to save the
matched records */
......@@ -1572,17 +1524,18 @@ rtr_init_match(
{
ut_ad(matches->matched_recs->empty());
matches->locked = false;
rtr_copy_buf(matches, block);
matches->block.frame = matches->bufp;
matches->valid = false;
/* We have to copy PAGE_W*_SUPREMUM_END bytes so that we can
if (!matches->block) {
matches->block = buf_block_alloc();
}
matches->block->page.init(block->page.id());
/* We have to copy PAGE_*_SUPREMUM_END bytes so that we can
use infimum/supremum of this page as normal btr page for search. */
memcpy(matches->block.frame, page, page_is_comp(page)
? PAGE_NEW_SUPREMUM_END
: PAGE_OLD_SUPREMUM_END);
matches->used = page_is_comp(page)
? PAGE_NEW_SUPREMUM_END
: PAGE_OLD_SUPREMUM_END;
memcpy(matches->block->frame, page, matches->used);
#ifdef RTR_SEARCH_DIAGNOSTIC
ulint pageno = page_get_page_no(page);
fprintf(stderr, "INNODB_RTR: Searching leaf page %d\n",
......@@ -2002,7 +1955,7 @@ rtr_cur_search_with_match(
#endif /* UNIV_DEBUG */
/* Pop the last match record and position on it */
match_rec->matched_recs->pop_back();
page_cur_position(test_rec.r_rec, &match_rec->block,
page_cur_position(test_rec.r_rec, match_rec->block,
cursor);
}
} else {
......
......@@ -66,10 +66,7 @@ typedef std::vector<rtr_rec_t, ut_allocator<rtr_rec_t> > rtr_rec_vector;
/* Structure for matched records on the leaf page */
typedef struct matched_rec {
byte* bufp; /*!< aligned buffer point */
byte rec_buf[UNIV_PAGE_SIZE_MAX * 2];
/*!< buffer used to copy matching rec */
buf_block_t block; /*!< the shadow buffer block */
buf_block_t* block; /*!< the shadow buffer block */
ulint used; /*!< memory used */
rtr_rec_vector* matched_recs; /*!< vector holding the matching rec */
ib_mutex_t rtr_match_mutex;/*!< mutex protect the match_recs
......@@ -113,7 +110,6 @@ typedef struct rtr_info{
on each level and leaf level */
rtr_mbr_t mbr; /*!< the search MBR */
que_thr_t* thr; /*!< the search thread */
mem_heap_t* heap; /*!< memory heap */
btr_cur_t* cursor; /*!< cursor used for search */
dict_index_t* index; /*!< index it is searching */
bool need_prdt_lock;
......
......@@ -347,11 +347,12 @@ inline void mtr_t::memmove(const buf_block_t &b, ulint d, ulint s, ulint len)
ut_ad(d >= 8);
ut_ad(s >= 8);
ut_ad(len);
ut_ad(s <= ulint(srv_page_size));
ut_ad(s + len <= ulint(srv_page_size));
ut_d(const ulint ps= srv_page_size);
ut_ad(s <= ps);
ut_ad(s + len <= ps);
ut_ad(s != d);
ut_ad(d <= ulint(srv_page_size));
ut_ad(d + len <= ulint(srv_page_size));
ut_ad(d <= ps);
ut_ad(d + len <= ps);
set_modified(b);
if (m_log_mode != MTR_LOG_ALL)
......@@ -359,17 +360,17 @@ inline void mtr_t::memmove(const buf_block_t &b, ulint d, ulint s, ulint len)
static_assert(MIN_4BYTE > UNIV_PAGE_SIZE_MAX, "consistency");
size_t lenlen= (len < MIN_2BYTE ? 1 : len < MIN_3BYTE ? 2 : 3);
/* The source offset is encoded relative to the destination offset,
with the sign in the least significant bit. */
if (s > d)
s= (s - d) << 1;
else
s= (d - s) << 1 | 1;
with the sign in the least significant bit.
Because the source offset 0 is not possible, our encoding
subtracts 1 from the offset. */
const uint16_t S= s > d
? uint16_t((s - d - 1) << 1)
: uint16_t((d - s - 1) << 1 | 1);
/* The source offset 0 is not possible. */
s-= 1 << 1;
size_t slen= (s < MIN_2BYTE ? 1 : s < MIN_3BYTE ? 2 : 3);
size_t slen= (S < MIN_2BYTE ? 1 : S < MIN_3BYTE ? 2 : 3);
byte *l= log_write<MEMMOVE>(b.page.id(), &b.page, lenlen + slen, true, d);
l= mlog_encode_varint(l, len);
l= mlog_encode_varint(l, s);
l= mlog_encode_varint(l, S);
m_log.close(l);
m_last_offset= static_cast<uint16_t>(d + len);
}
......
......@@ -1151,10 +1151,10 @@ sel_set_rtr_rec_lock(
ut_ad(page_align(first_rec) == cur_block->frame);
ut_ad(match->valid);
rw_lock_x_lock(&(match->block.lock));
rw_lock_x_lock(&match->block->lock);
retry:
cur_block = btr_pcur_get_block(pcur);
ut_ad(rw_lock_own_flagged(&match->block.lock,
ut_ad(rw_lock_own_flagged(&match->block->lock,
RW_LOCK_FLAG_X | RW_LOCK_FLAG_S));
ut_ad(page_is_leaf(buf_block_get_frame(cur_block)));
......@@ -1255,7 +1255,7 @@ sel_set_rtr_rec_lock(
ULINT_UNDEFINED, &heap);
err = lock_sec_rec_read_check_and_lock(
0, &match->block, rtr_rec->r_rec, index,
0, match->block, rtr_rec->r_rec, index,
my_offsets, static_cast<lock_mode>(mode),
type, thr);
......@@ -1271,7 +1271,7 @@ sel_set_rtr_rec_lock(
match->locked = true;
func_end:
rw_lock_x_unlock(&(match->block.lock));
rw_lock_x_unlock(&match->block->lock);
if (heap != NULL) {
mem_heap_free(heap);
}
......@@ -3352,7 +3352,7 @@ Row_sel_get_clust_rec_for_mysql::operator()(
if (dict_index_is_spatial(sec_index)
&& btr_cur->rtr_info->matches
&& (page_align(rec)
== btr_cur->rtr_info->matches->block.frame
== btr_cur->rtr_info->matches->block->frame
|| rec != btr_pcur_get_rec(prebuilt->pcur))) {
#ifdef UNIV_DEBUG
rtr_info_t* rtr_info = btr_cur->rtr_info;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment