summaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorDmitry Shulga <Dmitry.Shulga@oracle.com>2011-03-15 17:36:12 +0600
committerDmitry Shulga <Dmitry.Shulga@oracle.com>2011-03-15 17:36:12 +0600
commit6c2f5e306ca3fa621cd25cedd49181d6e0d5cbc6 (patch)
treea5dc8beae05172872e0c5adc5f06bb1144c0be41 /sql
parent8da2b4f5d756c3b6993cd4be734cf2a49eeb81a5 (diff)
downloadmariadb-git-6c2f5e306ca3fa621cd25cedd49181d6e0d5cbc6.tar.gz
Fixed Bug#11764168 "56976: SEVERE DENIAL OF SERVICE IN PREPARED STATEMENTS".
The problem was that server didn't check resulting size of prepared statement argument which was set using mysql_send_long_data() API. By calling mysql_send_long_data() several times it was possible to create overly big string and thus force server to allocate memory for it. There was no way to limit this allocation. The solution is to add check for size of result string against value of max_long_data_size start-up parameter. When intermediate string exceeds max_long_data_size value an appropriate error message is emitted. We can't use existing max_allowed_packet parameter for this purpose since its value is limited by 1GB and therefore using it as a limit for data set through mysql_send_long_data() API would have been an incompatible change. Newly introduced max_long_data_size parameter gets value from max_allowed_packet parameter unless its value is specified explicitly. This new parameter is marked as deprecated and will be eventually replaced by max_allowed_packet parameter. Value of max_long_data_size parameter can be set only at server startup.
Diffstat (limited to 'sql')
-rw-r--r--sql/item.cc10
-rw-r--r--sql/mysql_priv.h1
-rw-r--r--sql/mysqld.cc28
-rw-r--r--sql/set_var.cc6
-rw-r--r--sql/sql_prepare.cc56
5 files changed, 87 insertions, 14 deletions
diff --git a/sql/item.cc b/sql/item.cc
index c01671aff56..357cc6d7fe4 100644
--- a/sql/item.cc
+++ b/sql/item.cc
@@ -2742,6 +2742,16 @@ bool Item_param::set_longdata(const char *str, ulong length)
(here), and first have to concatenate all pieces together,
write query to the binary log and only then perform conversion.
*/
+ if (str_value.length() + length > max_long_data_size)
+ {
+ my_message(ER_UNKNOWN_ERROR,
+ "Parameter of prepared statement which is set through "
+ "mysql_send_long_data() is longer than "
+ "'max_long_data_size' bytes",
+ MYF(0));
+ DBUG_RETURN(true);
+ }
+
if (str_value.append(str, length, &my_charset_bin))
DBUG_RETURN(TRUE);
state= LONG_DATA_VALUE;
diff --git a/sql/mysql_priv.h b/sql/mysql_priv.h
index 67631b265ab..8f9a9080d12 100644
--- a/sql/mysql_priv.h
+++ b/sql/mysql_priv.h
@@ -1965,6 +1965,7 @@ extern my_bool relay_log_purge, opt_innodb_safe_binlog, opt_innodb;
extern uint test_flags,select_errors,ha_open_options;
extern uint protocol_version, mysqld_port, dropping_tables;
extern uint delay_key_write_options;
+extern ulong max_long_data_size;
#endif /* MYSQL_SERVER */
#if defined MYSQL_SERVER || defined INNODB_COMPATIBILITY_HOOKS
extern MYSQL_PLUGIN_IMPORT uint lower_case_table_names;
diff --git a/sql/mysqld.cc b/sql/mysqld.cc
index 694d64c81df..46376a08ec9 100644
--- a/sql/mysqld.cc
+++ b/sql/mysqld.cc
@@ -409,6 +409,7 @@ TYPELIB log_output_typelib= {array_elements(log_output_names)-1,"",
/* the default log output is log tables */
static bool lower_case_table_names_used= 0;
+static bool max_long_data_size_used= false;
static bool volatile select_thread_in_use, signal_thread_in_use;
static bool volatile ready_to_exit;
static my_bool opt_debugging= 0, opt_external_locking= 0, opt_console= 0;
@@ -574,6 +575,11 @@ ulong delayed_insert_errors,flush_time;
ulong specialflag=0;
ulong binlog_cache_use= 0, binlog_cache_disk_use= 0;
ulong max_connections, max_connect_errors;
+/*
+ Maximum length of parameter value which can be set through
+ mysql_send_long_data() call.
+*/
+ulong max_long_data_size;
uint max_user_connections= 0;
/**
Limit of the total number of prepared statements in the server.
@@ -5800,7 +5806,8 @@ enum options_mysqld
OPT_SLOW_QUERY_LOG_FILE,
OPT_IGNORE_BUILTIN_INNODB,
OPT_BINLOG_DIRECT_NON_TRANS_UPDATE,
- OPT_DEFAULT_CHARACTER_SET_OLD
+ OPT_DEFAULT_CHARACTER_SET_OLD,
+ OPT_MAX_LONG_DATA_SIZE
};
@@ -6880,6 +6887,13 @@ thread is in the relay logs.",
&global_system_variables.max_length_for_sort_data,
&max_system_variables.max_length_for_sort_data, 0, GET_ULONG,
REQUIRED_ARG, 1024, 4, 8192*1024L, 0, 1, 0},
+ {"max_long_data_size", OPT_MAX_LONG_DATA_SIZE,
+ "The maximum size of prepared statement parameter which can be provided "
+ "through mysql_send_long_data() API call. "
+ "Deprecated option; use max_allowed_packet instead.",
+ &max_long_data_size,
+ &max_long_data_size, 0, GET_ULONG,
+ REQUIRED_ARG, 1024*1024L, 1024, UINT_MAX32, MALLOC_OVERHEAD, 1, 0},
{"max_prepared_stmt_count", OPT_MAX_PREPARED_STMT_COUNT,
"Maximum number of prepared statements in the server.",
&max_prepared_stmt_count, &max_prepared_stmt_count,
@@ -8688,6 +8702,10 @@ mysqld_get_one_option(int optid,
}
break;
#endif /* defined(ENABLED_DEBUG_SYNC) */
+ case OPT_MAX_LONG_DATA_SIZE:
+ max_long_data_size_used= true;
+ WARN_DEPRECATED(NULL, VER_CELOSIA, "--max_long_data_size", "--max_allowed_packet");
+ break;
}
return 0;
}
@@ -8849,6 +8867,14 @@ static int get_options(int *argc,char **argv)
else
pool_of_threads_scheduler(&thread_scheduler); /* purecov: tested */
#endif
+
+ /*
+ If max_long_data_size is not specified explicitly use
+ value of max_allowed_packet.
+ */
+ if (!max_long_data_size_used)
+ max_long_data_size= global_system_variables.max_allowed_packet;
+
return 0;
}
diff --git a/sql/set_var.cc b/sql/set_var.cc
index 831b68bbe14..333fb90c795 100644
--- a/sql/set_var.cc
+++ b/sql/set_var.cc
@@ -394,6 +394,12 @@ static sys_var_thd_ulong sys_max_seeks_for_key(&vars, "max_seeks_for_key",
&SV::max_seeks_for_key);
static sys_var_thd_ulong sys_max_length_for_sort_data(&vars, "max_length_for_sort_data",
&SV::max_length_for_sort_data);
+static sys_var_const sys_max_long_data_size(&vars,
+ "max_long_data_size",
+ OPT_GLOBAL, SHOW_LONG,
+ (uchar*)
+ &max_long_data_size);
+
#ifndef TO_BE_DELETED /* Alias for max_join_size */
static sys_var_thd_ha_rows sys_sql_max_join_size(&vars, "sql_max_join_size",
&SV::max_join_size,
diff --git a/sql/sql_prepare.cc b/sql/sql_prepare.cc
index da03bfbfedb..a94d1e519db 100644
--- a/sql/sql_prepare.cc
+++ b/sql/sql_prepare.cc
@@ -2730,6 +2730,32 @@ void mysql_sql_stmt_close(THD *thd)
}
}
+
+class Set_longdata_error_handler : public Internal_error_handler
+{
+public:
+ Set_longdata_error_handler(Prepared_statement *statement)
+ : stmt(statement)
+ { }
+
+public:
+ bool handle_error(uint sql_errno,
+ const char *message,
+ MYSQL_ERROR::enum_warning_level level,
+ THD *)
+ {
+ stmt->state= Query_arena::ERROR;
+ stmt->last_errno= sql_errno;
+ strncpy(stmt->last_error, message, MYSQL_ERRMSG_SIZE);
+
+ return TRUE;
+ }
+
+private:
+ Prepared_statement *stmt;
+};
+
+
/**
Handle long data in pieces from client.
@@ -2786,16 +2812,19 @@ void mysql_stmt_get_longdata(THD *thd, char *packet, ulong packet_length)
param= stmt->param_array[param_number];
+ Set_longdata_error_handler err_handler(stmt);
+ /*
+ Install handler that will catch any errors that can be generated
+ during execution of Item_param::set_longdata() and propagate
+ them to Statement::last_error.
+ */
+ thd->push_internal_handler(&err_handler);
#ifndef EMBEDDED_LIBRARY
- if (param->set_longdata(packet, (ulong) (packet_end - packet)))
+ param->set_longdata(packet, (ulong) (packet_end - packet));
#else
- if (param->set_longdata(thd->extra_data, thd->extra_length))
+ param->set_longdata(thd->extra_data, thd->extra_length);
#endif
- {
- stmt->state= Query_arena::ERROR;
- stmt->last_errno= ER_OUTOFMEMORY;
- sprintf(stmt->last_error, ER(ER_OUTOFMEMORY), 0);
- }
+ thd->pop_internal_handler();
general_log_print(thd, thd->command, NullS);
@@ -3257,6 +3286,13 @@ Prepared_statement::execute_loop(String *expanded_query,
bool error;
int reprepare_attempt= 0;
+ /* Check if we got an error when sending long data */
+ if (state == Query_arena::ERROR)
+ {
+ my_message(last_errno, last_error, MYF(0));
+ return TRUE;
+ }
+
if (set_parameters(expanded_query, packet, packet_end))
return TRUE;
@@ -3497,12 +3533,6 @@ bool Prepared_statement::execute(String *expanded_query, bool open_cursor)
status_var_increment(thd->status_var.com_stmt_execute);
- /* Check if we got an error when sending long data */
- if (state == Query_arena::ERROR)
- {
- my_message(last_errno, last_error, MYF(0));
- return TRUE;
- }
if (flags & (uint) IS_IN_USE)
{
my_error(ER_PS_NO_RECURSION, MYF(0));