summaryrefslogtreecommitdiff
path: root/sql/sql_union.cc
diff options
context:
space:
mode:
authorMichael Widenius <monty@askmonty.org>2011-03-09 19:45:48 +0200
committerMichael Widenius <monty@askmonty.org>2011-03-09 19:45:48 +0200
commite6b0be38f9ea03b4f657b210755f6891ce2a30f0 (patch)
treeb939c86916f6ab3745286dd9e19928e0c4041f13 /sql/sql_union.cc
parent251a5fa97598c8f0e7e0759880503d0932ca2918 (diff)
downloadmariadb-git-e6b0be38f9ea03b4f657b210755f6891ce2a30f0.tar.gz
Bug fix for lp:732124 union + limit returns wrong result
mysql-test/r/union.result: Added test for lp:732124 mysql-test/t/union.test: Added test for lp:732124 sql/sp_rcontext.cc: Updated function definition for ::send_data() sql/sp_rcontext.h: Updated function definition for ::send_data() sql/sql_analyse.cc: Test if send_data() returned an error sql/sql_class.cc: Updated function definition for ::send_data() sql/sql_class.h: Changed select_result::send_data(List<Item> &items) to return -1 in case of duplicate row that should not be counted as part of LIMIT sql/sql_cursor.cc: Check if send_data returned error sql/sql_delete.cc: Updated function definition for ::send_data() sql/sql_insert.cc: Updated function definition for ::send_data() sql/sql_select.cc: Don't count rows which send_data() tells you to ignore sql/sql_union.cc: Inform caller that the row should be ignored. This is the real bug fix for lp:732124 sql/sql_update.cc: Updated function definition for ::send_data()
Diffstat (limited to 'sql/sql_union.cc')
-rw-r--r--sql/sql_union.cc10
1 files changed, 9 insertions, 1 deletions
diff --git a/sql/sql_union.cc b/sql/sql_union.cc
index 2cedce497b6..d173c761311 100644
--- a/sql/sql_union.cc
+++ b/sql/sql_union.cc
@@ -49,7 +49,7 @@ int select_union::prepare(List<Item> &list, SELECT_LEX_UNIT *u)
}
-bool select_union::send_data(List<Item> &values)
+int select_union::send_data(List<Item> &values)
{
int error= 0;
if (unit->offset_limit_cnt)
@@ -63,6 +63,14 @@ bool select_union::send_data(List<Item> &values)
if ((error= table->file->ha_write_row(table->record[0])))
{
+ if (error == HA_ERR_FOUND_DUPP_KEY)
+ {
+ /*
+ Inform upper level that we found a duplicate key, that should not
+ be counted as part of limit
+ */
+ return -1;
+ }
/* create_internal_tmp_table_from_heap will generate error if needed */
if (table->file->is_fatal_error(error, HA_CHECK_DUP) &&
create_internal_tmp_table_from_heap(thd, table, &tmp_table_param, error, 1))