diff options
author | Oleksii Chuprykov <ochuprykov@mirantis.com> | 2014-10-27 13:33:11 +0200 |
---|---|---|
committer | Oleksii Chuprykov <ochuprykov@mirantis.com> | 2014-11-19 16:02:42 +0200 |
commit | 4b2058b8c14197e019b076640e2b3d41aa8a8fe0 (patch) | |
tree | 7fcac317dd077f29727b00ce2b0182a2a2fd354b | |
parent | 8bb12c02d44652b488627d0bbd0f3f131322fcd9 (diff) | |
download | oslo-db-4b2058b8c14197e019b076640e2b3d41aa8a8fe0.tar.gz |
Add exception filter for _sqlite_dupe_key_error
We can get 'PRIMARY KEY must be unique' error on some
platforms and versions of sqlite library while trying to
insert the row with the same primary key that already exist.
In this case oslo.db should raise DBDuplicateEntry error.
Add corresponding filter to _sqlite_dupe_key_error
Closes-Bug: #1386145
Change-Id: Ifafd6a8e0b613a31e596043071aef4d410a976f2
-rw-r--r-- | oslo/db/sqlalchemy/exc_filters.py | 19 | ||||
-rw-r--r-- | tests/sqlalchemy/test_exc_filters.py | 6 |
2 files changed, 22 insertions, 3 deletions
diff --git a/oslo/db/sqlalchemy/exc_filters.py b/oslo/db/sqlalchemy/exc_filters.py index b3da401..3c3956f 100644 --- a/oslo/db/sqlalchemy/exc_filters.py +++ b/oslo/db/sqlalchemy/exc_filters.py @@ -147,7 +147,8 @@ def _default_dupe_key_error(integrity_error, match, engine_name, @filters("sqlite", sqla_exc.IntegrityError, (r"^.*columns?(?P<columns>[^)]+)(is|are)\s+not\s+unique$", - r"^.*UNIQUE\s+constraint\s+failed:\s+(?P<columns>.+)$")) + r"^.*UNIQUE\s+constraint\s+failed:\s+(?P<columns>.+)$", + r"^.*PRIMARY\s+KEY\s+must\s+be\s+unique.*$")) def _sqlite_dupe_key_error(integrity_error, match, engine_name, is_disconnect): """Filter for SQLite duplicate key error. @@ -162,9 +163,21 @@ def _sqlite_dupe_key_error(integrity_error, match, engine_name, is_disconnect): 1 column - (IntegrityError) UNIQUE constraint failed: tbl.k1 N columns - (IntegrityError) UNIQUE constraint failed: tbl.k1, tbl.k2 + sqlite since 3.8.2: + (IntegrityError) PRIMARY KEY must be unique + """ - columns = match.group('columns') - columns = [c.split('.')[-1] for c in columns.strip().split(", ")] + columns = [] + # NOTE(ochuprykov): We can get here by last filter in which there are no + # groups. Trying to access the substring that matched by + # the group will lead to IndexError. In this case just + # pass empty list to exception.DBDuplicateEntry + try: + columns = match.group('columns') + columns = [c.split('.')[-1] for c in columns.strip().split(", ")] + except IndexError: + pass + raise exception.DBDuplicateEntry(columns, integrity_error) diff --git a/tests/sqlalchemy/test_exc_filters.py b/tests/sqlalchemy/test_exc_filters.py index 68c583c..6145c62 100644 --- a/tests/sqlalchemy/test_exc_filters.py +++ b/tests/sqlalchemy/test_exc_filters.py @@ -334,6 +334,12 @@ class TestDuplicate(TestsExceptionFilter): "sqlite", 'UNIQUE constraint failed: tbl.a, tbl.b') + def test_sqlite_dupe_primary_key(self): + self._run_dupe_constraint_test( + "sqlite", + "PRIMARY KEY must be unique 'insert into t values(10)'", + expected_columns=[]) + def test_mysql_mysqldb(self): self._run_dupe_constraint_test( "mysql", |