Commit 8cb75b98 authored by Oleksandr Byelkin's avatar Oleksandr Byelkin

Merge branch '10.9' into bb-10.9-release

parents 10ed5276 11d6de70
......@@ -2295,5 +2295,16 @@ SELECT * FROM JSON_TABLE('{"foo":["bar","qux"]}','$**.*[0]' COLUMNS(col1 CHAR(8)
col1
bar
#
# MDEV-29212: json_overlaps() does not check nested key-value pair correctly
#
SET @json1 = '{"kk":{"k1":"v1","k2":"v2"}}';
SET @json2 = '{"kk":{"k1":"v1","k2":"v2","k3":"v3"}}';
SELECT JSON_OVERLAPS(@json2, @json1);
JSON_OVERLAPS(@json2, @json1)
0
SELECT JSON_OVERLAPS(@json1, @json2);
JSON_OVERLAPS(@json1, @json2)
0
#
# End of 10.9 Test
#
......@@ -1547,6 +1547,15 @@ SELECT JSON_EXISTS(@json, '$[2][2][1 to 4]');
SELECT * FROM JSON_TABLE('{"foo":["bar","qux"]}','$**.*[0]' COLUMNS(col1 CHAR(8) PATH '$[0]')) AS jt;
--echo #
--echo # MDEV-29212: json_overlaps() does not check nested key-value pair correctly
--echo #
SET @json1 = '{"kk":{"k1":"v1","k2":"v2"}}';
SET @json2 = '{"kk":{"k1":"v1","k2":"v2","k3":"v3"}}';
SELECT JSON_OVERLAPS(@json2, @json1);
SELECT JSON_OVERLAPS(@json1, @json2);
--echo #
--echo # End of 10.9 Test
--echo #
......@@ -4360,7 +4360,7 @@ bool json_compare_arr_and_obj(json_engine_t *js, json_engine_t *value)
return TRUE;
*value= loc_val;
}
if (!json_value_scalar(js))
if (js->value_type == JSON_VALUE_ARRAY)
json_skip_level(js);
}
return FALSE;
......@@ -4446,10 +4446,49 @@ int json_find_overlap_with_array(json_engine_t *js, json_engine_t *value,
}
int compare_nested_object(json_engine_t *js, json_engine_t *value)
{
int result= 0;
const char *value_begin= (const char*)value->s.c_str-1;
const char *js_begin= (const char*)js->s.c_str-1;
json_skip_level(value);
json_skip_level(js);
const char *value_end= (const char*)value->s.c_str;
const char *js_end= (const char*)js->s.c_str;
String a(value_begin, value_end-value_begin,value->s.cs);
String b(js_begin, js_end-js_begin, js->s.cs);
DYNAMIC_STRING a_res, b_res;
if (init_dynamic_string(&a_res, NULL, 4096, 1024) ||
init_dynamic_string(&b_res, NULL, 4096, 1024))
{
goto error;
}
if (json_normalize(&a_res, a.ptr(), a.length(), value->s.cs) ||
json_normalize(&b_res, b.ptr(), b.length(), value->s.cs))
{
goto error;
}
result= strcmp(a_res.str, b_res.str) ? 0 : 1;
error:
dynstr_free(&a_res);
dynstr_free(&b_res);
return MY_TEST(result);
}
int json_find_overlap_with_object(json_engine_t *js, json_engine_t *value,
bool compare_whole)
{
if (value->value_type == JSON_VALUE_OBJECT)
{
if (compare_whole)
{
return compare_nested_object(js, value);
}
else
{
/* Find at least one common key-value pair */
json_string_t key_name;
......@@ -4489,33 +4528,49 @@ int json_find_overlap_with_object(json_engine_t *js, json_engine_t *value,
found_value= check_overlaps(js, value, true);
if (found_value)
{
if (!compare_whole)
/*
We have found at least one common key-value pair now.
No need to check for more key-value pairs. So skip remaining
jsons and return TRUE.
*/
json_skip_current_level(js, value);
return TRUE;
*js= loc_js;
}
else
{
if (compare_whole)
{
json_skip_current_level(js, value);
return FALSE;
}
/*
Key is found but value is not found. We have already
exhausted both values for current key. Hence "reset"
only js (first argument i.e json document) and
continue.
*/
*js= loc_js;
continue;
}
}
else
{
if (compare_whole)
{
json_skip_current_level(js, value);
/*
key is not found. So no need to check for value for that key.
Read the value anyway so we get the "type" of json value.
If is is non-scalar then skip the entire value
(scalar values get exhausted while reading so no need to skip them).
Then reset the json doc again.
*/
if (json_read_value(value))
return FALSE;
}
json_skip_key(value);
if (!json_value_scalar(value))
json_skip_level(value);
*js= loc_js;
}
}
/*
At this point we have already returned true if any intersection exists.
So skip jsons if not exhausted and return false.
*/
json_skip_current_level(js, value);
return compare_whole ? TRUE : FALSE;
return FALSE;
}
}
else if (value->value_type == JSON_VALUE_ARRAY)
{
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment