diff options
author | Sergei Golubchik <serg@mariadb.org> | 2021-07-02 19:13:26 +0200 |
---|---|---|
committer | Sergei Golubchik <serg@mariadb.org> | 2021-07-02 19:13:26 +0200 |
commit | 7808cf9a8b4430262df23b6410de61c499ee740d (patch) | |
tree | 852be714b0165e8320b06fd4181c4ba758847df4 | |
parent | 8cb1a5d431fcfb1cc87c915f8e41137ab234b894 (diff) | |
download | mariadb-git-bb-10.6-serg-MDEV-24909.tar.gz |
MDEV-24909 JSON functions don't respect KILL QUERY / max_statement_time limitbb-10.6-serg-MDEV-24909
pass the pointer to thd->killed down to the json library,
check it while scanning,
use thd->check_killed() to generate the proper error message
-rw-r--r-- | include/json_lib.h | 1 | ||||
-rw-r--r-- | mysql-test/main/func_json_notembedded.result | 91 | ||||
-rw-r--r-- | mysql-test/main/func_json_notembedded.test | 61 | ||||
-rw-r--r-- | sql/item_jsonfunc.cc | 38 | ||||
-rw-r--r-- | strings/json_lib.c | 5 |
5 files changed, 188 insertions, 8 deletions
diff --git a/include/json_lib.h b/include/json_lib.h index bb649928eaa..e570e2a9d17 100644 --- a/include/json_lib.h +++ b/include/json_lib.h @@ -222,6 +222,7 @@ typedef struct st_json_engine_t int stack[JSON_DEPTH_LIMIT]; /* Keeps the stack of nested JSON structures. */ int stack_p; /* The 'stack' pointer. */ + volatile uchar *killed_ptr; } json_engine_t; diff --git a/mysql-test/main/func_json_notembedded.result b/mysql-test/main/func_json_notembedded.result new file mode 100644 index 00000000000..7d169cb858e --- /dev/null +++ b/mysql-test/main/func_json_notembedded.result @@ -0,0 +1,91 @@ +set global max_allowed_packet=1073741824; +connect u,localhost,root; +# +# MDEV-24909 JSON functions don't respect KILL QUERY / max_statement_time limit +# +set group_concat_max_len= 4294967295; +set @obj=concat('{', repeat('"a":"b",', 12500000), '"c":"d"}'); +set @arr=concat('[', repeat('1234567,', 12500000), '2345678]'); +select length(@obj), length(@arr); +length(@obj) length(@arr) +100000009 100000009 +set @now=unix_timestamp(); +set statement max_statement_time= 1 for select json_array_append(@arr, '$[0]', 1); +ERROR 70100: Query execution was interrupted (max_statement_time exceeded) +select greatest(unix_timestamp()-@now, 2); +greatest(unix_timestamp()-@now, 2) +2 +set @now=unix_timestamp(); +set statement max_statement_time= 1 for select json_array_insert(@arr, '$[0]', 1); +ERROR 70100: Query execution was interrupted (max_statement_time exceeded) +select greatest(unix_timestamp()-@now, 2); +greatest(unix_timestamp()-@now, 2) +2 +set @now=unix_timestamp(); +set statement max_statement_time= 1 for select json_insert(@obj, '$.meta', 1); +ERROR 70100: Query execution was interrupted (max_statement_time exceeded) +select greatest(unix_timestamp()-@now, 2); +greatest(unix_timestamp()-@now, 2) +2 +set @now=unix_timestamp(); +set statement max_statement_time= 1 for select json_compact(@arr); +ERROR 70100: Query execution was interrupted (max_statement_time exceeded) +select greatest(unix_timestamp()-@now, 2); +greatest(unix_timestamp()-@now, 2) +2 +set @now=unix_timestamp(); +set statement max_statement_time= 1 for select json_detailed(@arr); +ERROR 70100: Query execution was interrupted (max_statement_time exceeded) +select greatest(unix_timestamp()-@now, 2); +greatest(unix_timestamp()-@now, 2) +2 +set @now=unix_timestamp(); +set statement max_statement_time= 1 for select json_loose(@arr); +ERROR 70100: Query execution was interrupted (max_statement_time exceeded) +select greatest(unix_timestamp()-@now, 2); +greatest(unix_timestamp()-@now, 2) +2 +set @now=unix_timestamp(); +set statement max_statement_time= 1 for select json_merge(@obj, @arr); +ERROR 70100: Query execution was interrupted (max_statement_time exceeded) +select greatest(unix_timestamp()-@now, 2); +greatest(unix_timestamp()-@now, 2) +2 +set @now=unix_timestamp(); +set statement max_statement_time= 1 for select json_merge_patch(@obj, @obj); +ERROR 70100: Query execution was interrupted (max_statement_time exceeded) +select greatest(unix_timestamp()-@now, 2); +greatest(unix_timestamp()-@now, 2) +2 +set @now=unix_timestamp(); +set statement max_statement_time= 1 for select json_merge_preserve(@obj, @arr); +ERROR 70100: Query execution was interrupted (max_statement_time exceeded) +select greatest(unix_timestamp()-@now, 2); +greatest(unix_timestamp()-@now, 2) +2 +set @now=unix_timestamp(); +set statement max_statement_time= 1 for select json_remove(@obj,'$.foo'); +ERROR 70100: Query execution was interrupted (max_statement_time exceeded) +select greatest(unix_timestamp()-@now, 2); +greatest(unix_timestamp()-@now, 2) +2 +set @now=unix_timestamp(); +set statement max_statement_time= 1 for select json_replace(@obj,'$.foo',1); +ERROR 70100: Query execution was interrupted (max_statement_time exceeded) +select greatest(unix_timestamp()-@now, 2); +greatest(unix_timestamp()-@now, 2) +2 +set @now=unix_timestamp(); +set statement max_statement_time= 1 for select json_set(@arr,'$[1000]',1); +ERROR 70100: Query execution was interrupted (max_statement_time exceeded) +select greatest(unix_timestamp()-@now, 2); +greatest(unix_timestamp()-@now, 2) +2 +set @now=unix_timestamp(); +disconnect u; +connection default; +set group_concat_max_len= default; +set global max_allowed_packet=default; +# +# End of 10.5 tests +# diff --git a/mysql-test/main/func_json_notembedded.test b/mysql-test/main/func_json_notembedded.test new file mode 100644 index 00000000000..8108cf524fd --- /dev/null +++ b/mysql-test/main/func_json_notembedded.test @@ -0,0 +1,61 @@ +source include/have_sequence.inc; +source include/not_embedded.inc; + +set global max_allowed_packet=1073741824; +connect u,localhost,root; + +--echo # +--echo # MDEV-24909 JSON functions don't respect KILL QUERY / max_statement_time limit +--echo # +set group_concat_max_len= 4294967295; + +set @obj=concat('{', repeat('"a":"b",', 12500000), '"c":"d"}'); +set @arr=concat('[', repeat('1234567,', 12500000), '2345678]'); +select length(@obj), length(@arr); + +set @now=unix_timestamp(); +--error ER_STATEMENT_TIMEOUT +set statement max_statement_time= 1 for select json_array_append(@arr, '$[0]', 1); +select greatest(unix_timestamp()-@now, 2); set @now=unix_timestamp(); +--error ER_STATEMENT_TIMEOUT +set statement max_statement_time= 1 for select json_array_insert(@arr, '$[0]', 1); +select greatest(unix_timestamp()-@now, 2); set @now=unix_timestamp(); +--error ER_STATEMENT_TIMEOUT +set statement max_statement_time= 1 for select json_insert(@obj, '$.meta', 1); +select greatest(unix_timestamp()-@now, 2); set @now=unix_timestamp(); +--error ER_STATEMENT_TIMEOUT +set statement max_statement_time= 1 for select json_compact(@arr); +select greatest(unix_timestamp()-@now, 2); set @now=unix_timestamp(); +--error ER_STATEMENT_TIMEOUT +set statement max_statement_time= 1 for select json_detailed(@arr); +select greatest(unix_timestamp()-@now, 2); set @now=unix_timestamp(); +--error ER_STATEMENT_TIMEOUT +set statement max_statement_time= 1 for select json_loose(@arr); +select greatest(unix_timestamp()-@now, 2); set @now=unix_timestamp(); +--error ER_STATEMENT_TIMEOUT +set statement max_statement_time= 1 for select json_merge(@obj, @arr); +select greatest(unix_timestamp()-@now, 2); set @now=unix_timestamp(); +--error ER_STATEMENT_TIMEOUT +set statement max_statement_time= 1 for select json_merge_patch(@obj, @obj); +select greatest(unix_timestamp()-@now, 2); set @now=unix_timestamp(); +--error ER_STATEMENT_TIMEOUT +set statement max_statement_time= 1 for select json_merge_preserve(@obj, @arr); +select greatest(unix_timestamp()-@now, 2); set @now=unix_timestamp(); +--error ER_STATEMENT_TIMEOUT +set statement max_statement_time= 1 for select json_remove(@obj,'$.foo'); +select greatest(unix_timestamp()-@now, 2); set @now=unix_timestamp(); +--error ER_STATEMENT_TIMEOUT +set statement max_statement_time= 1 for select json_replace(@obj,'$.foo',1); +select greatest(unix_timestamp()-@now, 2); set @now=unix_timestamp(); +--error ER_STATEMENT_TIMEOUT +set statement max_statement_time= 1 for select json_set(@arr,'$[1000]',1); +select greatest(unix_timestamp()-@now, 2); set @now=unix_timestamp(); + +disconnect u; +connection default; +set group_concat_max_len= default; +set global max_allowed_packet=default; + +--echo # +--echo # End of 10.5 tests +--echo # diff --git a/sql/item_jsonfunc.cc b/sql/item_jsonfunc.cc index 2525b465935..c43af910f98 100644 --- a/sql/item_jsonfunc.cc +++ b/sql/item_jsonfunc.cc @@ -1653,6 +1653,7 @@ String *Item_func_json_array_append::val_str(String *str) uint n_arg, n_path; size_t str_rest_len; const uchar *ar_end; + THD *thd= current_thd; DBUG_ASSERT(fixed()); @@ -1680,6 +1681,7 @@ String *Item_func_json_array_append::val_str(String *str) json_scan_start(&je, js->charset(),(const uchar *) js->ptr(), (const uchar *) js->ptr() + js->length()); + je.killed_ptr= (uchar*)&thd->killed; c_path->cur_step= c_path->p.steps; @@ -1760,7 +1762,8 @@ String *Item_func_json_array_append::val_str(String *str) json_scan_start(&je, js->charset(),(const uchar *) js->ptr(), (const uchar *) js->ptr() + js->length()); - if (json_nice(&je, str, Item_func_json_format::LOOSE)) + je.killed_ptr= (uchar*)&thd->killed; + if (json_nice(&je, str, Item_func_json_format::LOOSE) || thd->check_killed()) goto js_error; return str; @@ -1779,6 +1782,7 @@ String *Item_func_json_array_insert::val_str(String *str) json_engine_t je; String *js= args[0]->val_json(&tmp_js); uint n_arg, n_path; + THD *thd= current_thd; DBUG_ASSERT(fixed()); @@ -1816,6 +1820,7 @@ String *Item_func_json_array_insert::val_str(String *str) json_scan_start(&je, js->charset(),(const uchar *) js->ptr(), (const uchar *) js->ptr() + js->length()); + je.killed_ptr= (uchar*)&thd->killed; c_path->cur_step= c_path->p.steps; @@ -1899,7 +1904,8 @@ String *Item_func_json_array_insert::val_str(String *str) json_scan_start(&je, js->charset(),(const uchar *) js->ptr(), (const uchar *) js->ptr() + js->length()); - if (json_nice(&je, str, Item_func_json_format::LOOSE)) + je.killed_ptr= (uchar*)&thd->killed; + if (json_nice(&je, str, Item_func_json_format::LOOSE) || thd->check_killed()) goto js_error; return str; @@ -2165,6 +2171,7 @@ String *Item_func_json_merge::val_str(String *str) json_engine_t je1, je2; String *js1= args[0]->val_json(&tmp_js1), *js2=NULL; uint n_arg; + THD *thd= current_thd; LINT_INIT(js2); if (args[0]->null_value) @@ -2181,9 +2188,11 @@ String *Item_func_json_merge::val_str(String *str) json_scan_start(&je1, js1->charset(),(const uchar *) js1->ptr(), (const uchar *) js1->ptr() + js1->length()); + je1.killed_ptr= (uchar*)&thd->killed; json_scan_start(&je2, js2->charset(),(const uchar *) js2->ptr(), (const uchar *) js2->ptr() + js2->length()); + je2.killed_ptr= (uchar*)&thd->killed; if (do_merge(str, &je1, &je2)) goto error_return; @@ -2205,7 +2214,8 @@ String *Item_func_json_merge::val_str(String *str) json_scan_start(&je1, js1->charset(),(const uchar *) js1->ptr(), (const uchar *) js1->ptr() + js1->length()); - if (json_nice(&je1, str, Item_func_json_format::LOOSE)) + je1.killed_ptr= (uchar*)&thd->killed; + if (json_nice(&je1, str, Item_func_json_format::LOOSE) || thd->killed) goto error_return; null_value= 0; @@ -2216,6 +2226,7 @@ error_return: report_json_error(js1, &je1, 0); if (je2.s.error) report_json_error(js2, &je2, n_arg); + thd->check_killed(); // to get the error message right null_return: null_value= 1; return NULL; @@ -2465,6 +2476,7 @@ String *Item_func_json_merge_patch::val_str(String *str) String *js1= args[0]->val_json(&tmp_js1), *js2=NULL; uint n_arg; bool empty_result, merge_to_null; + THD *thd= current_thd; /* To report errors properly if some JSON is invalid. */ je1.s.error= je2.s.error= 0; @@ -2481,6 +2493,7 @@ String *Item_func_json_merge_patch::val_str(String *str) json_scan_start(&je2, js2->charset(),(const uchar *) js2->ptr(), (const uchar *) js2->ptr() + js2->length()); + je2.killed_ptr= (uchar*)&thd->killed; if (merge_to_null) { @@ -2502,6 +2515,7 @@ String *Item_func_json_merge_patch::val_str(String *str) json_scan_start(&je1, js1->charset(),(const uchar *) js1->ptr(), (const uchar *) js1->ptr() + js1->length()); + je1.killed_ptr= (uchar*)&thd->killed; if (do_merge_patch(str, &je1, &je2, &empty_result)) goto error_return; @@ -2530,7 +2544,8 @@ cont_point: json_scan_start(&je1, js1->charset(),(const uchar *) js1->ptr(), (const uchar *) js1->ptr() + js1->length()); - if (json_nice(&je1, str, Item_func_json_format::LOOSE)) + je1.killed_ptr= (uchar*)&thd->killed; + if (json_nice(&je1, str, Item_func_json_format::LOOSE) || thd->killed) goto error_return; null_value= 0; @@ -2541,6 +2556,7 @@ error_return: report_json_error(js1, &je1, 0); if (je2.s.error) report_json_error(js2, &je2, n_arg); + thd->check_killed(); // to get the error message right null_return: null_value= 1; return NULL; @@ -2777,6 +2793,7 @@ String *Item_func_json_insert::val_str(String *str) String *js= args[0]->val_json(&tmp_js); uint n_arg, n_path; json_string_t key_name; + THD *thd= current_thd; DBUG_ASSERT(fixed()); @@ -2817,6 +2834,7 @@ String *Item_func_json_insert::val_str(String *str) json_scan_start(&je, js->charset(),(const uchar *) js->ptr(), (const uchar *) js->ptr() + js->length()); + je.killed_ptr= (uchar*)&thd->killed; if (c_path->p.last_step < c_path->p.steps) goto v_found; @@ -2998,7 +3016,8 @@ continue_point: json_scan_start(&je, js->charset(),(const uchar *) js->ptr(), (const uchar *) js->ptr() + js->length()); - if (json_nice(&je, str, Item_func_json_format::LOOSE)) + je.killed_ptr= (uchar*)&thd->killed; + if (json_nice(&je, str, Item_func_json_format::LOOSE) || thd->check_killed()) goto js_error; return str; @@ -3028,6 +3047,7 @@ String *Item_func_json_remove::val_str(String *str) String *js= args[0]->val_json(&tmp_js); uint n_arg, n_path; json_string_t key_name; + THD *thd= current_thd; DBUG_ASSERT(fixed()); @@ -3074,6 +3094,7 @@ String *Item_func_json_remove::val_str(String *str) json_scan_start(&je, js->charset(),(const uchar *) js->ptr(), (const uchar *) js->ptr() + js->length()); + je.killed_ptr= (uchar*)&thd->killed; c_path->cur_step= c_path->p.steps; @@ -3182,7 +3203,8 @@ v_found: json_scan_start(&je, js->charset(),(const uchar *) js->ptr(), (const uchar *) js->ptr() + js->length()); - if (json_nice(&je, str, Item_func_json_format::LOOSE)) + je.killed_ptr= (uchar*)&thd->killed; + if (json_nice(&je, str, Item_func_json_format::LOOSE) || thd->check_killed()) goto js_error; null_value= 0; @@ -3578,6 +3600,7 @@ String *Item_func_json_format::val_str(String *str) String *js= args[0]->val_json(&tmp_js); json_engine_t je; int tab_size= 4; + THD *thd= current_thd; if ((null_value= args[0]->null_value)) return 0; @@ -3601,8 +3624,9 @@ String *Item_func_json_format::val_str(String *str) json_scan_start(&je, js->charset(), (const uchar *) js->ptr(), (const uchar *) js->ptr()+js->length()); + je.killed_ptr= (uchar*)&thd->killed; - if (json_nice(&je, str, fmt, tab_size)) + if (json_nice(&je, str, fmt, tab_size) || thd->check_killed()) { null_value= 1; report_json_error(js, &je, 0); diff --git a/strings/json_lib.c b/strings/json_lib.c index 49f29903ed5..697db18136b 100644 --- a/strings/json_lib.c +++ b/strings/json_lib.c @@ -807,10 +807,13 @@ static json_state_handler json_actions[NR_JSON_STATES][NR_C_CLASSES]= int json_scan_start(json_engine_t *je, CHARSET_INFO *i_cs, const uchar *str, const uchar *end) { + static const uchar die_another_day= 0; + json_string_setup(&je->s, i_cs, str, end); je->stack[0]= JST_DONE; je->stack_p= 0; je->state= JST_VALUE; + je->killed_ptr = (uchar*)&die_another_day; return 0; } @@ -971,7 +974,7 @@ int json_scan_next(json_engine_t *j) int t_next; get_first_nonspace(&j->s, &t_next, &j->sav_c_len); - return json_actions[j->state][t_next](j); + return *j->killed_ptr || json_actions[j->state][t_next](j); } |