summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDaniele Varrazzo <daniele.varrazzo@gmail.com>2021-05-20 16:59:43 +0200
committerDaniele Varrazzo <daniele.varrazzo@gmail.com>2021-05-20 16:59:43 +0200
commit19ddbc47ca9520971a6406b19ac26060afd6fc5b (patch)
treeb36f5b1da7f80694f1d62f6a7cfe80e86638f1f1
parentcdc83d64dbbabd1252e3940bf47e0537e9de740e (diff)
parent3db4abcfa43cca5f729fac6bdbd68845f519395e (diff)
downloadpsycopg2-19ddbc47ca9520971a6406b19ac26060afd6fc5b.tar.gz
Merge branch 'rm-2.7'
-rw-r--r--NEWS4
-rw-r--r--doc/src/conf.py9
-rw-r--r--doc/src/install.rst3
-rwxr-xr-xdoc/src/tools/lib/dbapi_extension.py1
-rw-r--r--doc/src/tools/lib/sql_role.py1
-rw-r--r--doc/src/tools/lib/ticket_role.py1
-rw-r--r--doc/src/tools/make_sqlstate_docs.py7
-rw-r--r--lib/_ipaddress.py5
-rw-r--r--lib/_json.py17
-rw-r--r--lib/_lru_cache.py104
-rw-r--r--lib/_range.py24
-rw-r--r--lib/compat.py19
-rw-r--r--lib/extensions.py6
-rw-r--r--lib/extras.py160
-rw-r--r--lib/pool.py2
-rw-r--r--lib/sql.py38
-rw-r--r--lib/tz.py2
-rw-r--r--psycopg/adapter_asis.c4
-rw-r--r--psycopg/adapter_binary.c9
-rw-r--r--psycopg/adapter_pdecimal.c4
-rw-r--r--psycopg/adapter_pfloat.c4
-rw-r--r--psycopg/adapter_pint.c10
-rw-r--r--psycopg/lobject_int.c4
-rw-r--r--psycopg/microprotocols.c6
-rw-r--r--psycopg/psycopgmodule.c26
-rw-r--r--psycopg/python.h60
-rw-r--r--psycopg/typecast.c8
-rw-r--r--psycopg/typecast_basic.c23
-rw-r--r--psycopg/typecast_binary.c40
-rw-r--r--psycopg/utils.c21
-rwxr-xr-xscripts/appveyor.py13
-rwxr-xr-xscripts/make_errorcodes.py17
-rwxr-xr-xscripts/make_errors.py11
-rwxr-xr-xscripts/refcounter.py21
-rw-r--r--setup.py52
-rwxr-xr-xtests/__init__.py5
-rw-r--r--tests/dbapi20.py11
-rw-r--r--tests/dbapi20_tpc.py2
-rwxr-xr-xtests/test_async.py3
-rwxr-xr-xtests/test_async_keyword.py225
-rwxr-xr-xtests/test_cancel.py1
-rwxr-xr-xtests/test_connection.py43
-rwxr-xr-xtests/test_copy.py59
-rwxr-xr-xtests/test_cursor.py34
-rwxr-xr-xtests/test_dates.py4
-rwxr-xr-xtests/test_errcodes.py2
-rwxr-xr-xtests/test_extras_dictcursor.py81
-rwxr-xr-xtests/test_fast_executemany.py10
-rwxr-xr-xtests/test_green.py6
-rwxr-xr-xtests/test_ipaddress.py1
-rwxr-xr-xtests/test_lobject.py12
-rwxr-xr-xtests/test_module.py10
-rwxr-xr-xtests/test_notify.py18
-rwxr-xr-xtests/test_quote.py70
-rwxr-xr-xtests/test_replication.py10
-rwxr-xr-xtests/test_sql.py18
-rwxr-xr-xtests/test_types_basic.py132
-rwxr-xr-xtests/test_types_extras.py114
-rwxr-xr-xtests/test_with.py6
-rw-r--r--tests/testconfig.py10
-rw-r--r--tests/testutils.py54
-rw-r--r--tox.ini2
62 files changed, 372 insertions, 1307 deletions
diff --git a/NEWS b/NEWS
index bdd30df..58a7fb3 100644
--- a/NEWS
+++ b/NEWS
@@ -4,7 +4,7 @@ Current release
What's new in psycopg 2.9
-------------------------
-- Dropped support for Python 3.4, 3.5 (:tickets:#1000, #1197).
+- Dropped support for Python 2.7, 3.4, 3.5 (:tickets:#1198, #1000, #1197).
- Reclassified SQLSTATE connection exceptions (08XXX) as
`~psycopg2.errors.OperationalError` (subclass of previously used
`~psycopg2.errors.DatabaseError`) (:ticket:`#1148`).
@@ -25,7 +25,7 @@ What's new in psycopg 2.8.6
(:ticket:`#1101`).
- Fixed search of mxDateTime headers in virtualenvs (:ticket:`#996`).
- Added missing values from errorcodes (:ticket:`#1133`).
-- `cursor.query` reports the query of the last :sql:`COPY` opearation too
+- `cursor.query` reports the query of the last :sql:`COPY` operation too
(:ticket:`#1141`).
- `~psycopg2.errorcodes` map and `~psycopg2.errors` classes updated to
PostgreSQL 13.
diff --git a/doc/src/conf.py b/doc/src/conf.py
index 1b65c34..94f154e 100644
--- a/doc/src/conf.py
+++ b/doc/src/conf.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
#
# Psycopg documentation build configuration file, created by
# sphinx-quickstart on Sun Feb 7 13:48:41 2010.
@@ -48,9 +47,9 @@ source_suffix = '.rst'
master_doc = 'index'
# General information about the project.
-project = u'Psycopg'
+project = 'Psycopg'
copyright = (
- u'2001-2020, Federico Di Gregorio, Daniele Varrazzo, The Psycopg Team'
+ '2001-2020, Federico Di Gregorio, Daniele Varrazzo, The Psycopg Team'
)
# The version info for the project you're documenting, acts as replacement for
@@ -235,8 +234,8 @@ latex_documents = [
(
'index',
'psycopg.tex',
- u'Psycopg Documentation',
- u'Federico Di Gregorio',
+ 'Psycopg Documentation',
+ 'Federico Di Gregorio',
'manual',
)
]
diff --git a/doc/src/install.rst b/doc/src/install.rst
index a685cde..c771c9b 100644
--- a/doc/src/install.rst
+++ b/doc/src/install.rst
@@ -131,8 +131,7 @@ The current `!psycopg2` implementation supports:
..
NOTE: keep consistent with setup.py and the /features/ page.
-- Python version 2.7
-- Python 3 versions from 3.6 to 3.9
+- Python versions from 3.6 to 3.9
- PostgreSQL server versions from 7.4 to 13
- PostgreSQL client library version from 9.1
diff --git a/doc/src/tools/lib/dbapi_extension.py b/doc/src/tools/lib/dbapi_extension.py
index 23d9165..7fc776a 100755
--- a/doc/src/tools/lib/dbapi_extension.py
+++ b/doc/src/tools/lib/dbapi_extension.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
"""
extension
~~~~~~~~~
diff --git a/doc/src/tools/lib/sql_role.py b/doc/src/tools/lib/sql_role.py
index 43347b4..1731546 100644
--- a/doc/src/tools/lib/sql_role.py
+++ b/doc/src/tools/lib/sql_role.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
"""
sql role
~~~~~~~~
diff --git a/doc/src/tools/lib/ticket_role.py b/doc/src/tools/lib/ticket_role.py
index 0ee3d63..8ba87cb 100644
--- a/doc/src/tools/lib/ticket_role.py
+++ b/doc/src/tools/lib/ticket_role.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
"""
ticket role
~~~~~~~~~~~
diff --git a/doc/src/tools/make_sqlstate_docs.py b/doc/src/tools/make_sqlstate_docs.py
index 3e1a026..16fd9c9 100644
--- a/doc/src/tools/make_sqlstate_docs.py
+++ b/doc/src/tools/make_sqlstate_docs.py
@@ -2,7 +2,6 @@
"""Create the docs table of the sqlstate errors.
"""
-from __future__ import print_function
import re
import sys
@@ -26,8 +25,8 @@ def main():
for k in sorted(sqlstate_errors):
exc = sqlstate_errors[k]
lines.append(Line(
- "``%s``" % k, "`!%s`" % exc.__name__,
- "`!%s`" % get_base_exception(exc).__name__, k))
+ f"``{k}``", f"`!{exc.__name__}`",
+ f"`!{get_base_exception(exc).__name__}`", k))
widths = [max(len(l[c]) for l in lines) for c in range(3)]
h = Line(*(['=' * w for w in widths] + [None]))
@@ -40,7 +39,7 @@ def main():
for l in lines:
cls = l.sqlstate[:2] if l.sqlstate else None
if cls and cls != sqlclass:
- print("**Class %s**: %s" % (cls, sqlclasses[cls]))
+ print(f"**Class {cls}**: {sqlclasses[cls]}")
print(h1)
sqlclass = cls
diff --git a/lib/_ipaddress.py b/lib/_ipaddress.py
index 9350c8d..4094c27 100644
--- a/lib/_ipaddress.py
+++ b/lib/_ipaddress.py
@@ -26,7 +26,6 @@
from psycopg2.extensions import (
new_type, new_array_type, register_type, register_adapter, QuotedString)
-from psycopg2.compat import text_type
# The module is imported on register_ipaddress
ipaddress = None
@@ -78,13 +77,13 @@ def cast_interface(s, cur=None):
if s is None:
return None
# Py2 version force the use of unicode. meh.
- return ipaddress.ip_interface(text_type(s))
+ return ipaddress.ip_interface(str(s))
def cast_network(s, cur=None):
if s is None:
return None
- return ipaddress.ip_network(text_type(s))
+ return ipaddress.ip_network(str(s))
def adapt_ipaddress(obj):
diff --git a/lib/_json.py b/lib/_json.py
index eac3797..4ceaba0 100644
--- a/lib/_json.py
+++ b/lib/_json.py
@@ -32,7 +32,6 @@ import json
from psycopg2._psycopg import ISQLQuote, QuotedString
from psycopg2._psycopg import new_type, new_array_type, register_type
-from psycopg2.compat import PY2
# oids from PostgreSQL 9.2
@@ -44,7 +43,7 @@ JSONB_OID = 3802
JSONBARRAY_OID = 3807
-class Json(object):
+class Json:
"""
An `~psycopg2.extensions.ISQLQuote` wrapper to adapt a Python object to
:sql:`json` data type.
@@ -82,13 +81,9 @@ class Json(object):
qs.prepare(self._conn)
return qs.getquoted()
- if PY2:
- def __str__(self):
- return self.getquoted()
- else:
- def __str__(self):
- # getquoted is binary in Py3
- return self.getquoted().decode('ascii', 'replace')
+ def __str__(self):
+ # getquoted is binary
+ return self.getquoted().decode('ascii', 'replace')
def register_json(conn_or_curs=None, globally=False, loads=None,
@@ -168,7 +163,7 @@ def _create_json_typecasters(oid, array_oid, loads=None, name='JSON'):
JSON = new_type((oid, ), name, typecast_json)
if array_oid is not None:
- JSONARRAY = new_array_type((array_oid, ), "%sARRAY" % name, JSON)
+ JSONARRAY = new_array_type((array_oid, ), f"{name}ARRAY", JSON)
else:
JSONARRAY = None
@@ -199,6 +194,6 @@ def _get_json_oids(conn_or_curs, name='json'):
conn.rollback()
if not r:
- raise conn.ProgrammingError("%s data type not found" % name)
+ raise conn.ProgrammingError(f"{name} data type not found")
return r
diff --git a/lib/_lru_cache.py b/lib/_lru_cache.py
deleted file mode 100644
index 1e2c52d..0000000
--- a/lib/_lru_cache.py
+++ /dev/null
@@ -1,104 +0,0 @@
-"""
-LRU cache implementation for Python 2.7
-
-Ported from http://code.activestate.com/recipes/578078/ and simplified for our
-use (only support maxsize > 0 and positional arguments).
-"""
-
-from collections import namedtuple
-from functools import update_wrapper
-from threading import RLock
-
-_CacheInfo = namedtuple("CacheInfo", ["hits", "misses", "maxsize", "currsize"])
-
-
-def lru_cache(maxsize=100):
- """Least-recently-used cache decorator.
-
- Arguments to the cached function must be hashable.
-
- See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
-
- """
- def decorating_function(user_function):
-
- cache = dict()
- stats = [0, 0] # make statistics updateable non-locally
- HITS, MISSES = 0, 1 # names for the stats fields
- cache_get = cache.get # bound method to lookup key or return None
- _len = len # localize the global len() function
- lock = RLock() # linkedlist updates aren't threadsafe
- root = [] # root of the circular doubly linked list
- root[:] = [root, root, None, None] # initialize by pointing to self
- nonlocal_root = [root] # make updateable non-locally
- PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields
-
- assert maxsize and maxsize > 0, "maxsize %s not supported" % maxsize
-
- def wrapper(*args):
- # size limited caching that tracks accesses by recency
- key = args
- with lock:
- link = cache_get(key)
- if link is not None:
- # record recent use of the key by moving it to the
- # front of the list
- root, = nonlocal_root
- link_prev, link_next, key, result = link
- link_prev[NEXT] = link_next
- link_next[PREV] = link_prev
- last = root[PREV]
- last[NEXT] = root[PREV] = link
- link[PREV] = last
- link[NEXT] = root
- stats[HITS] += 1
- return result
- result = user_function(*args)
- with lock:
- root, = nonlocal_root
- if key in cache:
- # getting here means that this same key was added to the
- # cache while the lock was released. since the link
- # update is already done, we need only return the
- # computed result and update the count of misses.
- pass
- elif _len(cache) >= maxsize:
- # use the old root to store the new key and result
- oldroot = root
- oldroot[KEY] = key
- oldroot[RESULT] = result
- # empty the oldest link and make it the new root
- root = nonlocal_root[0] = oldroot[NEXT]
- oldkey = root[KEY]
- # oldvalue = root[RESULT]
- root[KEY] = root[RESULT] = None
- # now update the cache dictionary for the new links
- del cache[oldkey]
- cache[key] = oldroot
- else:
- # put result in a new link at the front of the list
- last = root[PREV]
- link = [last, root, key, result]
- last[NEXT] = root[PREV] = cache[key] = link
- stats[MISSES] += 1
- return result
-
- def cache_info():
- """Report cache statistics"""
- with lock:
- return _CacheInfo(stats[HITS], stats[MISSES], maxsize, len(cache))
-
- def cache_clear():
- """Clear the cache and cache statistics"""
- with lock:
- cache.clear()
- root = nonlocal_root[0]
- root[:] = [root, root, None, None]
- stats[:] = [0, 0]
-
- wrapper.__wrapped__ = user_function
- wrapper.cache_info = cache_info
- wrapper.cache_clear = cache_clear
- return update_wrapper(wrapper, user_function)
-
- return decorating_function
diff --git a/lib/_range.py b/lib/_range.py
index b668fb6..a59bab8 100644
--- a/lib/_range.py
+++ b/lib/_range.py
@@ -30,10 +30,9 @@ import re
from psycopg2._psycopg import ProgrammingError, InterfaceError
from psycopg2.extensions import ISQLQuote, adapt, register_adapter
from psycopg2.extensions import new_type, new_array_type, register_type
-from psycopg2.compat import string_types
-class Range(object):
+class Range:
"""Python representation for a PostgreSQL |range|_ type.
:param lower: lower bound for the range. `!None` means unbound
@@ -48,7 +47,7 @@ class Range(object):
def __init__(self, lower=None, upper=None, bounds='[)', empty=False):
if not empty:
if bounds not in ('[)', '(]', '()', '[]'):
- raise ValueError("bound flags not valid: %r" % bounds)
+ raise ValueError(f"bound flags not valid: {bounds!r}")
self._lower = lower
self._upper = upper
@@ -58,9 +57,9 @@ class Range(object):
def __repr__(self):
if self._bounds is None:
- return "%s(empty=True)" % self.__class__.__name__
+ return f"{self.__class__.__name__}(empty=True)"
else:
- return "%s(%r, %r, %r)" % (self.__class__.__name__,
+ return "{}({!r}, {!r}, {!r})".format(self.__class__.__name__,
self._lower, self._upper, self._bounds)
def __str__(self):
@@ -239,7 +238,7 @@ def register_range(pgrange, pyrange, conn_or_curs, globally=False):
return caster
-class RangeAdapter(object):
+class RangeAdapter:
"""`ISQLQuote` adapter for `Range` subclasses.
This is an abstract class: concrete classes must set a `name` class
@@ -287,7 +286,7 @@ class RangeAdapter(object):
+ b", '" + r._bounds.encode('utf8') + b"')"
-class RangeCaster(object):
+class RangeCaster:
"""Helper class to convert between `Range` and PostgreSQL range types.
Objects of this class are usually created by `register_range()`. Manual
@@ -315,7 +314,7 @@ class RangeCaster(object):
# an implementation detail and is not documented. It is currently used
# for the numeric ranges.
self.adapter = None
- if isinstance(pgrange, string_types):
+ if isinstance(pgrange, str):
self.adapter = type(pgrange, (RangeAdapter,), {})
self.adapter.name = pgrange
else:
@@ -332,7 +331,7 @@ class RangeCaster(object):
self.range = None
try:
- if isinstance(pyrange, string_types):
+ if isinstance(pyrange, str):
self.range = type(pyrange, (Range,), {})
if issubclass(pyrange, Range) and pyrange is not Range:
self.range = pyrange
@@ -392,7 +391,7 @@ where typname = %s and ns.nspname = %s;
if not rec:
raise ProgrammingError(
- "PostgreSQL type '%s' not found" % name)
+ f"PostgreSQL type '{name}' not found")
type, subtype, array = rec
@@ -424,7 +423,7 @@ where typname = %s and ns.nspname = %s;
m = self._re_range.match(s)
if m is None:
- raise InterfaceError("failed to parse range: '%s'" % s)
+ raise InterfaceError(f"failed to parse range: '{s}'")
lower = m.group(3)
if lower is None:
@@ -504,8 +503,7 @@ class NumberRangeAdapter(RangeAdapter):
else:
upper = ''
- return ("'%s%s,%s%s'" % (
- r._bounds[0], lower, upper, r._bounds[1])).encode('ascii')
+ return (f"'{r._bounds[0]}{lower},{upper}{r._bounds[1]}'").encode('ascii')
# TODO: probably won't work with infs, nans and other tricky cases.
diff --git a/lib/compat.py b/lib/compat.py
deleted file mode 100644
index 54606a8..0000000
--- a/lib/compat.py
+++ /dev/null
@@ -1,19 +0,0 @@
-import sys
-
-__all__ = ['string_types', 'text_type', 'lru_cache']
-
-if sys.version_info[0] == 2:
- # Python 2
- PY2 = True
- PY3 = False
- string_types = basestring,
- text_type = unicode
- from ._lru_cache import lru_cache
-
-else:
- # Python 3
- PY2 = False
- PY3 = True
- string_types = str,
- text_type = str
- from functools import lru_cache
diff --git a/lib/extensions.py b/lib/extensions.py
index c4a6618..1de6607 100644
--- a/lib/extensions.py
+++ b/lib/extensions.py
@@ -106,7 +106,7 @@ def register_adapter(typ, callable):
# The SQL_IN class is the official adapter for tuples starting from 2.0.6.
-class SQL_IN(object):
+class SQL_IN:
"""Adapt any iterable to an SQL quotable object."""
def __init__(self, seq):
self._seq = seq
@@ -130,7 +130,7 @@ class SQL_IN(object):
return str(self.getquoted())
-class NoneAdapter(object):
+class NoneAdapter:
"""Adapt None to NULL.
This adapter is not used normally as a fast path in mogrify uses NULL,
@@ -168,7 +168,7 @@ def make_dsn(dsn=None, **kwargs):
tmp.update(kwargs)
kwargs = tmp
- dsn = " ".join(["%s=%s" % (k, _param_escape(str(v)))
+ dsn = " ".join(["{}={}".format(k, _param_escape(str(v)))
for (k, v) in kwargs.items()])
# verify that the returned dsn is valid
diff --git a/lib/extras.py b/lib/extras.py
index 135a3fb..be39165 100644
--- a/lib/extras.py
+++ b/lib/extras.py
@@ -38,7 +38,7 @@ from psycopg2 import extensions as _ext
from .extensions import cursor as _cursor
from .extensions import connection as _connection
from .extensions import adapt as _A, quote_ident
-from .compat import PY2, PY3, lru_cache
+from functools import lru_cache
from psycopg2._psycopg import ( # noqa
REPLICATION_PHYSICAL, REPLICATION_LOGICAL,
@@ -72,47 +72,47 @@ class DictCursorBase(_cursor):
else:
raise NotImplementedError(
"DictCursorBase can't be instantiated without a row factory.")
- super(DictCursorBase, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self._query_executed = False
self._prefetch = False
self.row_factory = row_factory
def fetchone(self):
if self._prefetch:
- res = super(DictCursorBase, self).fetchone()
+ res = super().fetchone()
if self._query_executed:
self._build_index()
if not self._prefetch:
- res = super(DictCursorBase, self).fetchone()
+ res = super().fetchone()
return res
def fetchmany(self, size=None):
if self._prefetch:
- res = super(DictCursorBase, self).fetchmany(size)
+ res = super().fetchmany(size)
if self._query_executed:
self._build_index()
if not self._prefetch:
- res = super(DictCursorBase, self).fetchmany(size)
+ res = super().fetchmany(size)
return res
def fetchall(self):
if self._prefetch:
- res = super(DictCursorBase, self).fetchall()
+ res = super().fetchall()
if self._query_executed:
self._build_index()
if not self._prefetch:
- res = super(DictCursorBase, self).fetchall()
+ res = super().fetchall()
return res
def __iter__(self):
try:
if self._prefetch:
- res = super(DictCursorBase, self).__iter__()
+ res = super().__iter__()
first = next(res)
if self._query_executed:
self._build_index()
if not self._prefetch:
- res = super(DictCursorBase, self).__iter__()
+ res = super().__iter__()
first = next(res)
yield first
@@ -126,7 +126,7 @@ class DictConnection(_connection):
"""A connection that uses `DictCursor` automatically."""
def cursor(self, *args, **kwargs):
kwargs.setdefault('cursor_factory', self.cursor_factory or DictCursor)
- return super(DictConnection, self).cursor(*args, **kwargs)
+ return super().cursor(*args, **kwargs)
class DictCursor(DictCursorBase):
@@ -137,18 +137,18 @@ class DictCursor(DictCursorBase):
def __init__(self, *args, **kwargs):
kwargs['row_factory'] = DictRow
- super(DictCursor, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self._prefetch = True
def execute(self, query, vars=None):
self.index = OrderedDict()
self._query_executed = True
- return super(DictCursor, self).execute(query, vars)
+ return super().execute(query, vars)
def callproc(self, procname, vars=None):
self.index = OrderedDict()
self._query_executed = True
- return super(DictCursor, self).callproc(procname, vars)
+ return super().callproc(procname, vars)
def _build_index(self):
if self._query_executed and self.description:
@@ -169,22 +169,22 @@ class DictRow(list):
def __getitem__(self, x):
if not isinstance(x, (int, slice)):
x = self._index[x]
- return super(DictRow, self).__getitem__(x)
+ return super().__getitem__(x)
def __setitem__(self, x, v):
if not isinstance(x, (int, slice)):
x = self._index[x]
- super(DictRow, self).__setitem__(x, v)
+ super().__setitem__(x, v)
def items(self):
- g = super(DictRow, self).__getitem__
+ g = super().__getitem__
return ((n, g(self._index[n])) for n in self._index)
def keys(self):
return iter(self._index)
def values(self):
- g = super(DictRow, self).__getitem__
+ g = super().__getitem__
return (g(self._index[n]) for n in self._index)
def get(self, x, default=None):
@@ -201,7 +201,7 @@ class DictRow(list):
def __reduce__(self):
# this is apparently useless, but it fixes #1073
- return super(DictRow, self).__reduce__()
+ return super().__reduce__()
def __getstate__(self):
return self[:], self._index.copy()
@@ -210,27 +210,12 @@ class DictRow(list):
self[:] = data[0]
self._index = data[1]
- if PY2:
- iterkeys = keys
- itervalues = values
- iteritems = items
- has_key = __contains__
-
- def keys(self):
- return list(self.iterkeys())
-
- def values(self):
- return tuple(self.itervalues())
-
- def items(self):
- return list(self.iteritems())
-
class RealDictConnection(_connection):
"""A connection that uses `RealDictCursor` automatically."""
def cursor(self, *args, **kwargs):
kwargs.setdefault('cursor_factory', self.cursor_factory or RealDictCursor)
- return super(RealDictConnection, self).cursor(*args, **kwargs)
+ return super().cursor(*args, **kwargs)
class RealDictCursor(DictCursorBase):
@@ -243,17 +228,17 @@ class RealDictCursor(DictCursorBase):
"""
def __init__(self, *args, **kwargs):
kwargs['row_factory'] = RealDictRow
- super(RealDictCursor, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
def execute(self, query, vars=None):
self.column_mapping = []
self._query_executed = True
- return super(RealDictCursor, self).execute(query, vars)
+ return super().execute(query, vars)
def callproc(self, procname, vars=None):
self.column_mapping = []
self._query_executed = True
- return super(RealDictCursor, self).callproc(procname, vars)
+ return super().callproc(procname, vars)
def _build_index(self):
if self._query_executed and self.description:
@@ -271,7 +256,7 @@ class RealDictRow(OrderedDict):
else:
cursor = None
- super(RealDictRow, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
if cursor is not None:
# Required for named cursors
@@ -287,20 +272,20 @@ class RealDictRow(OrderedDict):
if RealDictRow in self:
# We are in the row building phase
mapping = self[RealDictRow]
- super(RealDictRow, self).__setitem__(mapping[key], value)
+ super().__setitem__(mapping[key], value)
if key == len(mapping) - 1:
# Row building finished
del self[RealDictRow]
return
- super(RealDictRow, self).__setitem__(key, value)
+ super().__setitem__(key, value)
class NamedTupleConnection(_connection):
"""A connection that uses `NamedTupleCursor` automatically."""
def cursor(self, *args, **kwargs):
kwargs.setdefault('cursor_factory', self.cursor_factory or NamedTupleCursor)
- return super(NamedTupleConnection, self).cursor(*args, **kwargs)
+ return super().cursor(*args, **kwargs)
class NamedTupleCursor(_cursor):
@@ -324,18 +309,18 @@ class NamedTupleCursor(_cursor):
def execute(self, query, vars=None):
self.Record = None
- return super(NamedTupleCursor, self).execute(query, vars)
+ return super().execute(query, vars)
def executemany(self, query, vars):
self.Record = None
- return super(NamedTupleCursor, self).executemany(query, vars)
+ return super().executemany(query, vars)
def callproc(self, procname, vars=None):
self.Record = None
- return super(NamedTupleCursor, self).callproc(procname, vars)
+ return super().callproc(procname, vars)
def fetchone(self):
- t = super(NamedTupleCursor, self).fetchone()
+ t = super().fetchone()
if t is not None:
nt = self.Record
if nt is None:
@@ -343,14 +328,14 @@ class NamedTupleCursor(_cursor):
return nt._make(t)
def fetchmany(self, size=None):
- ts = super(NamedTupleCursor, self).fetchmany(size)
+ ts = super().fetchmany(size)
nt = self.Record
if nt is None:
nt = self.Record = self._make_nt()
return list(map(nt._make, ts))
def fetchall(self):
- ts = super(NamedTupleCursor, self).fetchall()
+ ts = super().fetchall()
nt = self.Record
if nt is None:
nt = self.Record = self._make_nt()
@@ -358,7 +343,7 @@ class NamedTupleCursor(_cursor):
def __iter__(self):
try:
- it = super(NamedTupleCursor, self).__iter__()
+ it = super().__iter__()
t = next(it)
nt = self.Record
@@ -436,7 +421,7 @@ class LoggingConnection(_connection):
def _logtofile(self, msg, curs):
msg = self.filter(msg, curs)
if msg:
- if PY3 and isinstance(msg, bytes):
+ if isinstance(msg, bytes):
msg = msg.decode(_ext.encodings[self.encoding], 'replace')
self._logobj.write(msg + _os.linesep)
@@ -453,7 +438,7 @@ class LoggingConnection(_connection):
def cursor(self, *args, **kwargs):
self._check()
kwargs.setdefault('cursor_factory', self.cursor_factory or LoggingCursor)
- return super(LoggingConnection, self).cursor(*args, **kwargs)
+ return super().cursor(*args, **kwargs)
class LoggingCursor(_cursor):
@@ -461,13 +446,13 @@ class LoggingCursor(_cursor):
def execute(self, query, vars=None):
try:
- return super(LoggingCursor, self).execute(query, vars)
+ return super().execute(query, vars)
finally:
self.connection.log(self.query, self)
def callproc(self, procname, vars=None):
try:
- return super(LoggingCursor, self).callproc(procname, vars)
+ return super().callproc(procname, vars)
finally:
self.connection.log(self.query, self)
@@ -490,9 +475,9 @@ class MinTimeLoggingConnection(LoggingConnection):
def filter(self, msg, curs):
t = (_time.time() - curs.timestamp) * 1000
if t > self._mintime:
- if PY3 and isinstance(msg, bytes):
+ if isinstance(msg, bytes):
msg = msg.decode(_ext.encodings[self.encoding], 'replace')
- return msg + _os.linesep + " (execution time: %d ms)" % t
+ return f"{msg}{_os.linesep} (execution time: {t} ms)"
def cursor(self, *args, **kwargs):
kwargs.setdefault('cursor_factory',
@@ -516,14 +501,14 @@ class LogicalReplicationConnection(_replicationConnection):
def __init__(self, *args, **kwargs):
kwargs['replication_type'] = REPLICATION_LOGICAL
- super(LogicalReplicationConnection, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
class PhysicalReplicationConnection(_replicationConnection):
def __init__(self, *args, **kwargs):
kwargs['replication_type'] = REPLICATION_PHYSICAL
- super(PhysicalReplicationConnection, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
class StopReplication(Exception):
@@ -544,7 +529,7 @@ class ReplicationCursor(_replicationCursor):
def create_replication_slot(self, slot_name, slot_type=None, output_plugin=None):
"""Create streaming replication slot."""
- command = "CREATE_REPLICATION_SLOT %s " % quote_ident(slot_name, self)
+ command = f"CREATE_REPLICATION_SLOT {quote_ident(slot_name, self)} "
if slot_type is None:
slot_type = self.connection.replication_type
@@ -555,7 +540,7 @@ class ReplicationCursor(_replicationCursor):
"output plugin name is required to create "
"logical replication slot")
- command += "LOGICAL %s" % quote_ident(output_plugin, self)
+ command += f"LOGICAL {quote_ident(output_plugin, self)}"
elif slot_type == REPLICATION_PHYSICAL:
if output_plugin is not None:
@@ -567,14 +552,14 @@ class ReplicationCursor(_replicationCursor):
else:
raise psycopg2.ProgrammingError(
- "unrecognized replication type: %s" % repr(slot_type))
+ f"unrecognized replication type: {repr(slot_type)}")
self.execute(command)
def drop_replication_slot(self, slot_name):
"""Drop streaming replication slot."""
- command = "DROP_REPLICATION_SLOT %s" % quote_ident(slot_name, self)
+ command = f"DROP_REPLICATION_SLOT {quote_ident(slot_name, self)}"
self.execute(command)
def start_replication(
@@ -589,7 +574,7 @@ class ReplicationCursor(_replicationCursor):
if slot_type == REPLICATION_LOGICAL:
if slot_name:
- command += "SLOT %s " % quote_ident(slot_name, self)
+ command += f"SLOT {quote_ident(slot_name, self)} "
else:
raise psycopg2.ProgrammingError(
"slot name is required for logical replication")
@@ -598,19 +583,18 @@ class ReplicationCursor(_replicationCursor):
elif slot_type == REPLICATION_PHYSICAL:
if slot_name:
- command += "SLOT %s " % quote_ident(slot_name, self)
+ command += f"SLOT {quote_ident(slot_name, self)} "
# don't add "PHYSICAL", before 9.4 it was just START_REPLICATION XXX/XXX
else:
raise psycopg2.ProgrammingError(
- "unrecognized replication type: %s" % repr(slot_type))
+ f"unrecognized replication type: {repr(slot_type)}")
if type(start_lsn) is str:
lsn = start_lsn.split('/')
- lsn = "%X/%08X" % (int(lsn[0], 16), int(lsn[1], 16))
+ lsn = f"{int(lsn[0], 16):X}/{int(lsn[1], 16):08X}"
else:
- lsn = "%X/%08X" % ((start_lsn >> 32) & 0xFFFFFFFF,
- start_lsn & 0xFFFFFFFF)
+ lsn = f"{start_lsn >> 32 & 4294967295:X}/{start_lsn & 4294967295:08X}"
command += lsn
@@ -619,7 +603,7 @@ class ReplicationCursor(_replicationCursor):
raise psycopg2.ProgrammingError(
"cannot specify timeline for logical replication")
- command += " TIMELINE %d" % timeline
+ command += f" TIMELINE {timeline}"
if options:
if slot_type == REPLICATION_PHYSICAL:
@@ -630,7 +614,7 @@ class ReplicationCursor(_replicationCursor):
for k, v in options.items():
if not command.endswith('('):
command += ", "
- command += "%s %s" % (quote_ident(k, self), _A(str(v)))
+ command += f"{quote_ident(k, self)} {_A(str(v))}"
command += ")"
self.start_replication_expert(
@@ -643,7 +627,7 @@ class ReplicationCursor(_replicationCursor):
# a dbtype and adapter for Python UUID type
-class UUID_adapter(object):
+class UUID_adapter:
"""Adapt Python's uuid.UUID__ type to PostgreSQL's uuid__.
.. __: https://docs.python.org/library/uuid.html
@@ -658,10 +642,10 @@ class UUID_adapter(object):
return self
def getquoted(self):
- return ("'%s'::uuid" % self._uuid).encode('utf8')
+ return (f"'{self._uuid}'::uuid").encode('utf8')
def __str__(self):
- return "'%s'::uuid" % self._uuid
+ return f"'{self._uuid}'::uuid"
def register_uuid(oids=None, conn_or_curs=None):
@@ -698,7 +682,7 @@ def register_uuid(oids=None, conn_or_curs=None):
# a type, dbtype and adapter for PostgreSQL inet type
-class Inet(object):
+class Inet:
"""Wrap a string to allow for correct SQL-quoting of inet values.
Note that this adapter does NOT check the passed value to make
@@ -710,7 +694,7 @@ class Inet(object):
self.addr = addr
def __repr__(self):
- return "%s(%r)" % (self.__class__.__name__, self.addr)
+ return f"{self.__class__.__name__}({self.addr!r})"
def prepare(self, conn):
self._conn = conn
@@ -783,7 +767,7 @@ def wait_select(conn):
elif state == POLL_WRITE:
select.select([], [conn.fileno()], [])
else:
- raise conn.OperationalError("bad state from poll: %s" % state)
+ raise conn.OperationalError(f"bad state from poll: {state}")
except KeyboardInterrupt:
conn.cancel()
# the loop will be broken by a server error
@@ -805,7 +789,7 @@ def _solve_conn_curs(conn_or_curs):
return conn, curs
-class HstoreAdapter(object):
+class HstoreAdapter:
"""Adapt a Python dict to the hstore syntax."""
def __init__(self, wrapped):
self.wrapped = wrapped
@@ -885,7 +869,7 @@ class HstoreAdapter(object):
for m in self._re_hstore.finditer(s):
if m is None or m.start() != start:
raise psycopg2.InterfaceError(
- "error parsing hstore pair at char %d" % start)
+ f"error parsing hstore pair at char {start}")
k = _bsdec.sub(r'\1', m.group(1))
v = m.group(2)
if v is not None:
@@ -896,7 +880,7 @@ class HstoreAdapter(object):
if start < len(s):
raise psycopg2.InterfaceError(
- "error parsing hstore: unparsed data after char %d" % start)
+ f"error parsing hstore: unparsed data after char {start}")
return rv
@@ -924,12 +908,11 @@ class HstoreAdapter(object):
rv0, rv1 = [], []
# get the oid for the hstore
- curs.execute("""\
-SELECT t.oid, %s
+ curs.execute(f"""SELECT t.oid, {typarray}
FROM pg_type t JOIN pg_namespace ns
ON typnamespace = ns.oid
WHERE typname = 'hstore';
-""" % typarray)
+""")
for oids in curs:
rv0.append(oids[0])
rv1.append(oids[1])
@@ -993,12 +976,7 @@ def register_hstore(conn_or_curs, globally=False, unicode=False,
array_oid = tuple([x for x in array_oid if x])
# create and register the typecaster
- if PY2 and unicode:
- cast = HstoreAdapter.parse_unicode
- else:
- cast = HstoreAdapter.parse
-
- HSTORE = _ext.new_type(oid, "HSTORE", cast)
+ HSTORE = _ext.new_type(oid, "HSTORE", HstoreAdapter.parse)
_ext.register_type(HSTORE, not globally and conn_or_curs or None)
_ext.register_adapter(dict, HstoreAdapter)
@@ -1007,7 +985,7 @@ def register_hstore(conn_or_curs, globally=False, unicode=False,
_ext.register_type(HSTOREARRAY, not globally and conn_or_curs or None)
-class CompositeCaster(object):
+class CompositeCaster:
"""Helps conversion of a PostgreSQL composite type into a Python object.
The class is usually created by the `register_composite()` function.
@@ -1028,7 +1006,7 @@ class CompositeCaster(object):
self.typecaster = _ext.new_type((oid,), name, self.parse)
if array_oid:
self.array_typecaster = _ext.new_array_type(
- (array_oid,), "%sARRAY" % name, self.typecaster)
+ (array_oid,), f"{name}ARRAY", self.typecaster)
else:
self.array_typecaster = None
@@ -1072,7 +1050,7 @@ class CompositeCaster(object):
rv = []
for m in self._re_tokenize.finditer(s):
if m is None:
- raise psycopg2.InterfaceError("can't parse type: %r" % s)
+ raise psycopg2.InterfaceError(f"can't parse type: {s!r}")
if m.group(1) is not None:
rv.append(None)
elif m.group(2) is not None:
@@ -1127,7 +1105,7 @@ ORDER BY attnum;
if not recs:
raise psycopg2.ProgrammingError(
- "PostgreSQL type '%s' not found" % name)
+ f"PostgreSQL type '{name}' not found")
type_oid = recs[0][0]
array_oid = recs[0][1]
diff --git a/lib/pool.py b/lib/pool.py
index 30a29c3..5b14a3a 100644
--- a/lib/pool.py
+++ b/lib/pool.py
@@ -33,7 +33,7 @@ class PoolError(psycopg2.Error):
pass
-class AbstractConnectionPool(object):
+class AbstractConnectionPool:
"""Generic key-based pooling code."""
def __init__(self, minconn, maxconn, *args, **kwargs):
diff --git a/lib/sql.py b/lib/sql.py
index 6883452..aa3b148 100644
--- a/lib/sql.py
+++ b/lib/sql.py
@@ -27,13 +27,12 @@
import string
from psycopg2 import extensions as ext
-from psycopg2.compat import PY3, string_types
_formatter = string.Formatter()
-class Composable(object):
+class Composable:
"""
Abstract base class for objects that can be used to compose an SQL string.
@@ -51,7 +50,7 @@ class Composable(object):
self._wrapped = wrapped
def __repr__(self):
- return "%s(%r)" % (self.__class__.__name__, self._wrapped)
+ return f"{self.__class__.__name__}({self._wrapped!r})"
def as_string(self, context):
"""
@@ -107,10 +106,10 @@ class Composed(Composable):
for i in seq:
if not isinstance(i, Composable):
raise TypeError(
- "Composed elements must be Composable, got %r instead" % i)
+ f"Composed elements must be Composable, got {i!r} instead")
wrapped.append(i)
- super(Composed, self).__init__(wrapped)
+ super().__init__(wrapped)
@property
def seq(self):
@@ -148,7 +147,7 @@ class Composed(Composable):
"foo", "bar"
"""
- if isinstance(joiner, string_types):
+ if isinstance(joiner, str):
joiner = SQL(joiner)
elif not isinstance(joiner, SQL):
raise TypeError(
@@ -180,9 +179,9 @@ class SQL(Composable):
select "foo", "bar" from "table"
"""
def __init__(self, string):
- if not isinstance(string, string_types):
+ if not isinstance(string, str):
raise TypeError("SQL values must be strings")
- super(SQL, self).__init__(string)
+ super().__init__(string)
@property
def string(self):
@@ -324,10 +323,10 @@ class Identifier(Composable):
raise TypeError("Identifier cannot be empty")
for s in strings:
- if not isinstance(s, string_types):
+ if not isinstance(s, str):
raise TypeError("SQL identifier parts must be strings")
- super(Identifier, self).__init__(strings)
+ super().__init__(strings)
@property
def strings(self):
@@ -345,9 +344,7 @@ class Identifier(Composable):
"the Identifier wraps more than one than one string")
def __repr__(self):
- return "%s(%s)" % (
- self.__class__.__name__,
- ', '.join(map(repr, self._wrapped)))
+ return f"{self.__class__.__name__}({', '.join(map(repr, self._wrapped))})"
def as_string(self, context):
return '.'.join(ext.quote_ident(s, context) for s in self._wrapped)
@@ -392,7 +389,7 @@ class Literal(Composable):
a.prepare(conn)
rv = a.getquoted()
- if PY3 and isinstance(rv, bytes):
+ if isinstance(rv, bytes):
rv = rv.decode(ext.encodings[conn.encoding])
return rv
@@ -426,14 +423,14 @@ class Placeholder(Composable):
"""
def __init__(self, name=None):
- if isinstance(name, string_types):
+ if isinstance(name, str):
if ')' in name:
- raise ValueError("invalid name: %r" % name)
+ raise ValueError(f"invalid name: {name!r}")
elif name is not None:
- raise TypeError("expected string or None as name, got %r" % name)
+ raise TypeError(f"expected string or None as name, got {name!r}")
- super(Placeholder, self).__init__(name)
+ super().__init__(name)
@property
def name(self):
@@ -441,12 +438,11 @@ class Placeholder(Composable):
return self._wrapped
def __repr__(self):
- return "Placeholder(%r)" % (
- self._wrapped if self._wrapped is not None else '',)
+ return f"Placeholder({self._wrapped if self._wrapped is not None else ''!r})"
def as_string(self, context):
if self._wrapped is not None:
- return "%%(%s)s" % self._wrapped
+ return f"%({self._wrapped})"
else:
return "%s"
diff --git a/lib/tz.py b/lib/tz.py
index ccbe374..81cd8f8 100644
--- a/lib/tz.py
+++ b/lib/tz.py
@@ -65,7 +65,7 @@ class FixedOffsetTimezone(datetime.tzinfo):
try:
return cls._cache[key]
except KeyError:
- tz = super(FixedOffsetTimezone, cls).__new__(cls, offset, name)
+ tz = super().__new__(cls, offset, name)
cls._cache[key] = tz
return tz
diff --git a/psycopg/adapter_asis.c b/psycopg/adapter_asis.c
index d0cb505..49240bd 100644
--- a/psycopg/adapter_asis.c
+++ b/psycopg/adapter_asis.c
@@ -45,14 +45,12 @@ asis_getquoted(asisObject *self, PyObject *args)
}
else {
rv = PyObject_Str(self->wrapped);
-#if PY_3
- /* unicode to bytes in Py3 */
+ /* unicode to bytes */
if (rv) {
PyObject *tmp = PyUnicode_AsUTF8String(rv);
Py_DECREF(rv);
rv = tmp;
}
-#endif
}
return rv;
diff --git a/psycopg/adapter_binary.c b/psycopg/adapter_binary.c
index 693ce31..f6f7cdd 100644
--- a/psycopg/adapter_binary.c
+++ b/psycopg/adapter_binary.c
@@ -76,15 +76,6 @@ binary_quote(binaryObject *self)
buffer_len = view.len;
}
-#if PY_2
- if (!buffer && (Bytes_Check(self->wrapped) || PyBuffer_Check(self->wrapped))) {
- if (PyObject_AsReadBuffer(self->wrapped, (const void **)&buffer,
- &buffer_len) < 0) {
- goto exit;
- }
- }
-#endif
-
if (!buffer) {
goto exit;
}
diff --git a/psycopg/adapter_pdecimal.c b/psycopg/adapter_pdecimal.c
index f42fdef..a178f88 100644
--- a/psycopg/adapter_pdecimal.c
+++ b/psycopg/adapter_pdecimal.c
@@ -81,8 +81,7 @@ pdecimal_getquoted(pdecimalObject *self, PyObject *args)
/* res may be unicode and may suffer for issue #57 */
output:
-#if PY_3
- /* unicode to bytes in Py3 */
+ /* unicode to bytes */
{
PyObject *tmp = PyUnicode_AsUTF8String(res);
Py_DECREF(res);
@@ -90,7 +89,6 @@ output:
goto end;
}
}
-#endif
if ('-' == Bytes_AS_STRING(res)[0]) {
/* Prepend a space in front of negative numbers (ticket #57) */
diff --git a/psycopg/adapter_pfloat.c b/psycopg/adapter_pfloat.c
index 2243633..e72e7f1 100644
--- a/psycopg/adapter_pfloat.c
+++ b/psycopg/adapter_pfloat.c
@@ -54,8 +54,7 @@ pfloat_getquoted(pfloatObject *self, PyObject *args)
goto exit;
}
-#if PY_3
- /* unicode to bytes in Py3 */
+ /* unicode to bytes */
{
PyObject *tmp = PyUnicode_AsUTF8String(rv);
Py_DECREF(rv);
@@ -63,7 +62,6 @@ pfloat_getquoted(pfloatObject *self, PyObject *args)
goto exit;
}
}
-#endif
if ('-' == Bytes_AS_STRING(rv)[0]) {
/* Prepend a space in front of negative numbers (ticket #57) */
diff --git a/psycopg/adapter_pint.c b/psycopg/adapter_pint.c
index 2784759..63bfae5 100644
--- a/psycopg/adapter_pint.c
+++ b/psycopg/adapter_pint.c
@@ -40,11 +40,7 @@ pint_getquoted(pintObject *self, PyObject *args)
/* Convert subclass to int to handle IntEnum and other subclasses
* whose str() is not the number. */
- if (PyLong_CheckExact(self->wrapped)
-#if PY_2
- || PyInt_CheckExact(self->wrapped)
-#endif
- ) {
+ if (PyLong_CheckExact(self->wrapped)) {
res = PyObject_Str(self->wrapped);
} else {
PyObject *tmp;
@@ -60,8 +56,7 @@ pint_getquoted(pintObject *self, PyObject *args)
goto exit;
}
-#if PY_3
- /* unicode to bytes in Py3 */
+ /* unicode to bytes */
{
PyObject *tmp = PyUnicode_AsUTF8String(res);
Py_DECREF(res);
@@ -69,7 +64,6 @@ pint_getquoted(pintObject *self, PyObject *args)
goto exit;
}
}
-#endif
if ('-' == Bytes_AS_STRING(res)[0]) {
/* Prepend a space in front of negative numbers (ticket #57) */
diff --git a/psycopg/lobject_int.c b/psycopg/lobject_int.c
index 71a8bfb..cf17721 100644
--- a/psycopg/lobject_int.c
+++ b/psycopg/lobject_int.c
@@ -85,11 +85,7 @@ _lobject_parse_mode(const char *mode)
pos += 1;
break;
default:
-#if PY_2
- rv |= LOBJECT_BINARY;
-#else
rv |= LOBJECT_TEXT;
-#endif
break;
}
diff --git a/psycopg/microprotocols.c b/psycopg/microprotocols.c
index 19408fe..4600bd4 100644
--- a/psycopg/microprotocols.c
+++ b/psycopg/microprotocols.c
@@ -92,11 +92,7 @@ _get_superclass_adapter(PyObject *obj, PyObject *proto)
Py_ssize_t i, ii;
type = Py_TYPE(obj);
- if (!(
-#if PY_2
- (Py_TPFLAGS_HAVE_CLASS & type->tp_flags) &&
-#endif
- type->tp_mro)) {
+ if (!(type->tp_mro)) {
/* has no mro */
return Py_None;
}
diff --git a/psycopg/psycopgmodule.c b/psycopg/psycopgmodule.c
index d448d01..acb6781 100644
--- a/psycopg/psycopgmodule.c
+++ b/psycopg/psycopgmodule.c
@@ -309,11 +309,6 @@ adapters_init(PyObject *module)
if (0 > microprotocols_add(&PyFloat_Type, NULL, (PyObject*)&pfloatType)) {
goto exit;
}
-#if PY_2
- if (0 > microprotocols_add(&PyInt_Type, NULL, (PyObject*)&pintType)) {
- goto exit;
- }
-#endif
if (0 > microprotocols_add(&PyLong_Type, NULL, (PyObject*)&pintType)) {
goto exit;
}
@@ -322,25 +317,14 @@ adapters_init(PyObject *module)
}
/* strings */
-#if PY_2
- if (0 > microprotocols_add(&PyString_Type, NULL, (PyObject*)&qstringType)) {
- goto exit;
- }
-#endif
if (0 > microprotocols_add(&PyUnicode_Type, NULL, (PyObject*)&qstringType)) {
goto exit;
}
/* binary */
-#if PY_2
- if (0 > microprotocols_add(&PyBuffer_Type, NULL, (PyObject*)&binaryType)) {
- goto exit;
- }
-#else
if (0 > microprotocols_add(&PyBytes_Type, NULL, (PyObject*)&binaryType)) {
goto exit;
}
-#endif
if (0 > microprotocols_add(&PyByteArray_Type, NULL, (PyObject*)&binaryType)) {
goto exit;
@@ -1052,7 +1036,6 @@ static PyMethodDef psycopgMethods[] = {
{NULL, NULL, 0, NULL} /* Sentinel */
};
-#if PY_3
static struct PyModuleDef psycopgmodule = {
PyModuleDef_HEAD_INIT,
"_psycopg",
@@ -1064,7 +1047,6 @@ static struct PyModuleDef psycopgmodule = {
NULL,
NULL
};
-#endif
#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */
#define PyMODINIT_FUNC void
@@ -1098,11 +1080,7 @@ INIT_MODULE(_psycopg)(void)
if (!(psyco_null = Bytes_FromString("NULL"))) { goto exit; }
/* initialize the module */
-#if PY_2
- module = Py_InitModule("_psycopg", psycopgMethods);
-#else
module = PyModule_Create(&psycopgmodule);
-#endif
if (!module) { goto exit; }
if (0 > add_module_constants(module)) { goto exit; }
@@ -1118,9 +1096,5 @@ INIT_MODULE(_psycopg)(void)
Dprintf("psycopgmodule: module initialization complete");
exit:
-#if PY_3
return module;
-#else
- return;
-#endif
}
diff --git a/psycopg/python.h b/psycopg/python.h
index c142de4..7be45bd 100644
--- a/psycopg/python.h
+++ b/psycopg/python.h
@@ -27,30 +27,11 @@
#ifndef PSYCOPG_PYTHON_H
#define PSYCOPG_PYTHON_H 1
-#define PY_2 (PY_MAJOR_VERSION == 2)
-#define PY_3 (PY_MAJOR_VERSION == 3)
-
-#if PY_2 && PY_VERSION_HEX < 0x02070000
-#error "psycopg requires Python 2.7"
-#endif
-
-#if PY_3 && PY_VERSION_HEX < 0x03060000
+#if PY_VERSION_HEX < 0x03060000
#error "psycopg requires Python 3.6"
#endif
#include <structmember.h>
-#if PY_2
-#include <stringobject.h>
-#endif
-
-/* hash() return size changed around version 3.2a4 on 64bit platforms. Before
- * this, the return size was always a long, regardless of arch. ~3.2
- * introduced the Py_hash_t & Py_uhash_t typedefs with the resulting sizes
- * based upon arch. */
-#if PY_VERSION_HEX < 0x030200A4
-typedef long Py_hash_t;
-typedef unsigned long Py_uhash_t;
-#endif
/* Since Py_TYPE() is changed to the inline static function,
* Py_TYPE(obj) = new_type must be replaced with Py_SET_TYPE(obj, new_type)
@@ -72,43 +53,6 @@ typedef unsigned long Py_uhash_t;
#define FORMAT_CODE_SIZE_T "%zu"
#endif
-#if PY_2
-
-#define Text_Type PyString_Type
-#define Text_Check(s) PyString_Check(s)
-#define Text_Format(f,a) PyString_Format(f,a)
-#define Text_FromUTF8(s) PyString_FromString(s)
-#define Text_FromUTF8AndSize(s,n) PyString_FromStringAndSize(s,n)
-
-#define Bytes_Type PyString_Type
-#define Bytes_Check PyString_Check
-#define Bytes_CheckExact PyString_CheckExact
-#define Bytes_AS_STRING PyString_AS_STRING
-#define Bytes_GET_SIZE PyString_GET_SIZE
-#define Bytes_Size PyString_Size
-#define Bytes_AsString PyString_AsString
-#define Bytes_AsStringAndSize PyString_AsStringAndSize
-#define Bytes_FromString PyString_FromString
-#define Bytes_FromStringAndSize PyString_FromStringAndSize
-#define Bytes_FromFormat PyString_FromFormat
-#define Bytes_ConcatAndDel PyString_ConcatAndDel
-#define _Bytes_Resize _PyString_Resize
-
-#define PyDateTime_DELTA_GET_DAYS(o) (((PyDateTime_Delta*)o)->days)
-#define PyDateTime_DELTA_GET_SECONDS(o) (((PyDateTime_Delta*)o)->seconds)
-#define PyDateTime_DELTA_GET_MICROSECONDS(o) (((PyDateTime_Delta*)o)->microseconds)
-
-#define INIT_MODULE(m) init ## m
-
-/* fix #961, but don't change all types to longs. Sure someone will complain. */
-#define PyLong_FromOid(x) (((x) & 0x80000000) ? \
- PyLong_FromUnsignedLong((unsigned long)(x)) : \
- PyInt_FromLong((x)))
-
-#endif /* PY_2 */
-
-#if PY_3
-
#define Text_Type PyUnicode_Type
#define Text_Check(s) PyUnicode_Check(s)
#define Text_Format(f,a) PyUnicode_Format(f,a)
@@ -149,8 +93,6 @@ typedef unsigned long Py_uhash_t;
#define PyLong_FromOid(x) (PyLong_FromUnsignedLong((unsigned long)(x)))
-#endif /* PY_3 */
-
/* expose Oid attributes in Python C objects */
#define T_OID T_UINT
diff --git a/psycopg/typecast.c b/psycopg/typecast.c
index 021fb0c..4f713b1 100644
--- a/psycopg/typecast.c
+++ b/psycopg/typecast.c
@@ -475,11 +475,7 @@ PyTypeObject typecastType = {
0, /*tp_print*/
0, /*tp_getattr*/
0, /*tp_setattr*/
-#if PY_VERSION_HEX < 0x03000000
- typecast_cmp, /*tp_compare*/
-#else
0, /*tp_reserved*/
-#endif
typecast_repr, /*tp_repr*/
0, /*tp_as_number*/
0, /*tp_as_sequence*/
@@ -651,11 +647,7 @@ typecast_cast(PyObject *obj, const char *str, Py_ssize_t len, PyObject *curs)
* Notice that this way it is about impossible to create a python
* typecaster on a binary type. */
if (str) {
-#if PY_2
- s = PyString_FromStringAndSize(str, len);
-#else
s = conn_decode(((cursorObject *)curs)->conn, str, len);
-#endif
}
else {
Py_INCREF(Py_None);
diff --git a/psycopg/typecast_basic.c b/psycopg/typecast_basic.c
index f3fac98..9363cf1 100644
--- a/psycopg/typecast_basic.c
+++ b/psycopg/typecast_basic.c
@@ -26,22 +26,7 @@
/** INTEGER - cast normal integers (4 bytes) to python int **/
-#if PY_2
-static PyObject *
-typecast_INTEGER_cast(const char *s, Py_ssize_t len, PyObject *curs)
-{
- char buffer[12];
-
- if (s == NULL) { Py_RETURN_NONE; }
- if (s[len] != '\0') {
- strncpy(buffer, s, (size_t) len); buffer[len] = '\0';
- s = buffer;
- }
- return PyInt_FromString((char *)s, NULL, 0);
-}
-#else
#define typecast_INTEGER_cast typecast_LONGINTEGER_cast
-#endif
/** LONGINTEGER - cast long integers (8 bytes) to python long **/
@@ -67,11 +52,7 @@ typecast_FLOAT_cast(const char *s, Py_ssize_t len, PyObject *curs)
if (s == NULL) { Py_RETURN_NONE; }
if (!(str = Text_FromUTF8AndSize(s, len))) { return NULL; }
-#if PY_2
- flo = PyFloat_FromString(str, NULL);
-#else
flo = PyFloat_FromString(str);
-#endif
Py_DECREF(str);
return flo;
}
@@ -103,11 +84,7 @@ typecast_UNICODE_cast(const char *s, Py_ssize_t len, PyObject *curs)
/** STRING - cast strings of any type to python string **/
-#if PY_2
-#define typecast_STRING_cast typecast_BYTES_cast
-#else
#define typecast_STRING_cast typecast_UNICODE_cast
-#endif
/** BOOLEAN - cast boolean value into right python object **/
diff --git a/psycopg/typecast_binary.c b/psycopg/typecast_binary.c
index 032d0f1..9dd2194 100644
--- a/psycopg/typecast_binary.c
+++ b/psycopg/typecast_binary.c
@@ -54,39 +54,6 @@ chunk_repr(chunkObject *self)
);
}
-#if PY_2
-
-static Py_ssize_t
-chunk_getreadbuffer(chunkObject *self, Py_ssize_t segment, void **ptr)
-{
- if (segment != 0)
- {
- PyErr_SetString(PyExc_SystemError,
- "accessing non-existant buffer segment");
- return -1;
- }
- *ptr = self->base;
- return self->len;
-}
-
-static Py_ssize_t
-chunk_getsegcount(chunkObject *self, Py_ssize_t *lenp)
-{
- if (lenp != NULL)
- *lenp = self->len;
- return 1;
-}
-
-static PyBufferProcs chunk_as_buffer =
-{
- (readbufferproc) chunk_getreadbuffer,
- (writebufferproc) NULL,
- (segcountproc) chunk_getsegcount,
- (charbufferproc) NULL
-};
-
-#else
-
/* 3.0 buffer interface */
int chunk_getbuffer(PyObject *_self, Py_buffer *view, int flags)
{
@@ -105,8 +72,6 @@ static PyBufferProcs chunk_as_buffer =
NULL,
};
-#endif
-
#define chunk_doc "memory chunk"
PyTypeObject chunkType = {
@@ -183,13 +148,8 @@ typecast_BINARY_cast(const char *s, Py_ssize_t l, PyObject *curs)
buffer = NULL;
chunk->len = (Py_ssize_t)len;
-#if PY_2
- if ((res = PyBuffer_FromObject((PyObject *)chunk, 0, chunk->len)) == NULL)
- goto exit;
-#else
if ((res = PyMemoryView_FromObject((PyObject*)chunk)) == NULL)
goto exit;
-#endif
exit:
Py_XDECREF((PyObject *)chunk);
diff --git a/psycopg/utils.c b/psycopg/utils.c
index 33b3aa8..d585b2c 100644
--- a/psycopg/utils.c
+++ b/psycopg/utils.c
@@ -190,7 +190,7 @@ psyco_ensure_bytes(PyObject *obj)
/* Take a Python object and return text from it.
*
- * On Py3 this means converting bytes to unicode. On Py2 bytes are fine.
+ * This means converting bytes to unicode.
*
* The function is ref neutral: steals a ref from obj and adds one to the
* return value. It is safe to call it on NULL.
@@ -198,9 +198,6 @@ psyco_ensure_bytes(PyObject *obj)
STEALS(1) PyObject *
psyco_ensure_text(PyObject *obj)
{
-#if PY_2
- return obj;
-#else
if (obj) {
/* bytes to unicode in Py3 */
PyObject *rv = PyUnicode_FromEncodedObject(obj, "utf8", "replace");
@@ -210,7 +207,6 @@ psyco_ensure_text(PyObject *obj)
else {
return NULL;
}
-#endif
}
/* Check if a file derives from TextIOBase.
@@ -309,24 +305,13 @@ exit:
/* Convert a C string into Python Text using a specified codec.
*
- * The codec is the python function codec.getdecoder(enc). It is only used on
- * Python 3 to return unicode: in Py2 the function returns a string.
+ * The codec is the python function codec.getdecoder(enc).
*
* len is optional: use -1 to have it calculated by the function.
*/
PyObject *
psyco_text_from_chars_safe(const char *str, Py_ssize_t len, PyObject *decoder)
{
-#if PY_2
-
- if (!str) { Py_RETURN_NONE; }
-
- if (len < 0) { len = strlen(str); }
-
- return PyString_FromStringAndSize(str, len);
-
-#else
-
static PyObject *replace = NULL;
PyObject *rv = NULL;
PyObject *b = NULL;
@@ -356,8 +341,6 @@ exit:
Py_XDECREF(t);
Py_XDECREF(b);
return rv;
-
-#endif
}
diff --git a/scripts/appveyor.py b/scripts/appveyor.py
index fcdd03f..a554183 100755
--- a/scripts/appveyor.py
+++ b/scripts/appveyor.py
@@ -426,10 +426,7 @@ def check_libpq_version():
.decode('ascii')
.rstrip()
)
- assert want_ver == got_ver, "libpq version mismatch: %r != %r" % (
- want_ver,
- got_ver,
- )
+ assert want_ver == got_ver, f"libpq version mismatch: {want_ver!r} != {got_ver!r}"
def run_test_suite():
@@ -671,7 +668,7 @@ def which(name):
if os.path.isfile(fn):
return fn
- raise Exception("couldn't find program on path: %s" % name)
+ raise Exception(f"couldn't find program on path: {name}")
class Options:
@@ -683,7 +680,7 @@ class Options:
def py_ver(self):
"""The Python version to build as 2 digits string."""
rv = os.environ['PY_VER']
- assert rv in ('27', '36', '37', '38', '39'), rv
+ assert rv in ('36', '37', '38', '39'), rv
return rv
@property
@@ -766,11 +763,9 @@ class Options:
def vs_ver(self):
# https://wiki.python.org/moin/WindowsCompilers
# https://www.appveyor.com/docs/windows-images-software/#python
- # Py 2.7 = VS Ver. 9.0 (VS 2008)
# Py 3.6--3.8 = VS Ver. 14.0 (VS 2015)
# Py 3.9 = VS Ver. 16.0 (VS 2019)
vsvers = {
- '27': '9.0',
'36': '14.0',
'37': '14.0',
'38': '14.0',
@@ -835,7 +830,7 @@ class Options:
def dist_dir(self):
"""The directory where to build packages to distribute."""
return (
- self.package_dir / 'dist' / ('psycopg2-%s' % self.package_version)
+ self.package_dir / 'dist' / (f'psycopg2-{self.package_version}')
)
diff --git a/scripts/make_errorcodes.py b/scripts/make_errorcodes.py
index 91c9833..6152db7 100755
--- a/scripts/make_errorcodes.py
+++ b/scripts/make_errorcodes.py
@@ -16,7 +16,6 @@ The script can be run at a new PostgreSQL release to refresh the module.
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
-from __future__ import print_function
import re
import sys
@@ -26,7 +25,7 @@ from collections import defaultdict
def main():
if len(sys.argv) != 2:
- print("usage: %s /path/to/errorcodes.py" % sys.argv[0], file=sys.stderr)
+ print(f"usage: {sys.argv[0]} /path/to/errorcodes.py", file=sys.stderr)
return 2
filename = sys.argv[1]
@@ -85,7 +84,7 @@ def parse_errors_txt(url):
continue
# We don't expect anything else
- raise ValueError("unexpected line:\n%s" % line)
+ raise ValueError(f"unexpected line:\n{line}")
return classes, errors
@@ -102,9 +101,7 @@ def fetch_errors(versions):
for version in versions:
print(version, file=sys.stderr)
tver = tuple(map(int, version.split()[0].split('.')))
- tag = '%s%s_STABLE' % (
- (tver[0] >= 10 and 'REL_' or 'REL'),
- version.replace('.', '_'))
+ tag = f"{tver[0] >= 10 and 'REL_' or 'REL'}{version.replace('.', '_')}_STABLE"
c1, e1 = parse_errors_txt(errors_txt_url % tag)
classes.update(c1)
@@ -136,19 +133,19 @@ def generate_module_data(classes, errors):
for clscode, clslabel in sorted(classes.items()):
err = clslabel.split(" - ")[1].split("(")[0] \
.strip().replace(" ", "_").replace('/', "_").upper()
- yield "CLASS_%s = %r" % (err, clscode)
+ yield f"CLASS_{err} = {clscode!r}"
seen = set()
for clscode, clslabel in sorted(classes.items()):
yield ""
- yield "# %s" % clslabel
+ yield f"# {clslabel}"
for errcode, errlabel in sorted(errors[clscode].items()):
if errlabel in seen:
- raise Exception("error label already seen: %s" % errlabel)
+ raise Exception(f"error label already seen: {errlabel}")
seen.add(errlabel)
- yield "%s = %r" % (errlabel, errcode)
+ yield f"{errlabel} = {errcode!r}"
if __name__ == '__main__':
diff --git a/scripts/make_errors.py b/scripts/make_errors.py
index 897f80b..0b4f0b1 100755
--- a/scripts/make_errors.py
+++ b/scripts/make_errors.py
@@ -16,7 +16,6 @@ The script can be run at a new PostgreSQL release to refresh the module.
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
-from __future__ import print_function
import os
import re
@@ -69,7 +68,7 @@ def parse_errors_txt(url):
continue
# We don't expect anything else
- raise ValueError("unexpected line:\n%s" % line)
+ raise ValueError(f"unexpected line:\n{line}")
return classes, errors
@@ -86,9 +85,7 @@ def fetch_errors(versions):
for version in versions:
print(version, file=sys.stderr)
tver = tuple(map(int, version.split()[0].split('.')))
- tag = '%s%s_STABLE' % (
- (tver[0] >= 10 and 'REL_' or 'REL'),
- version.replace('.', '_'))
+ tag = f"{tver[0] >= 10 and 'REL_' or 'REL'}{version.replace('.', '_')}_STABLE"
c1, e1 = parse_errors_txt(errors_txt_url % tag)
classes.update(c1)
@@ -119,7 +116,7 @@ def generate_module_data(classes, errors):
# success and warning - never raised
continue
- yield "\n/* %s */" % clslabel
+ yield f"\n/* {clslabel} */"
for errcode, errlabel in sorted(errors[clscode].items()):
if errcode in specific:
@@ -127,7 +124,7 @@ def generate_module_data(classes, errors):
else:
clsname = errlabel.title().replace('_', '')
if clsname in seen:
- raise Exception("class already existing: %s" % clsname)
+ raise Exception(f"class already existing: {clsname}")
seen.add(clsname)
yield tmpl % {
diff --git a/scripts/refcounter.py b/scripts/refcounter.py
index 5477c05..6de2f6d 100755
--- a/scripts/refcounter.py
+++ b/scripts/refcounter.py
@@ -18,7 +18,6 @@ script exits with error 1.
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
-from __future__ import print_function
import argparse
import gc
@@ -37,27 +36,27 @@ def main():
if opt.suite:
test = getattr(test, opt.suite)
- sys.stdout.write("test suite %s\n" % test.__name__)
+ sys.stdout.write(f"test suite {test.__name__}\n")
for i in range(1, opt.nruns + 1):
- sys.stdout.write("test suite run %d of %d\n" % (i, opt.nruns))
+ sys.stdout.write(f"test suite run {i} of {opt.nruns}\n")
runner = unittest.TextTestRunner()
runner.run(test.test_suite())
dump(i, opt)
- f1 = open('debug-%02d.txt' % (opt.nruns - 1)).readlines()
- f2 = open('debug-%02d.txt' % opt.nruns).readlines()
+ f1 = open(f'debug-{(opt.nruns - 1):02}.txt').readlines()
+ f2 = open(f'debug-{opt.nruns:02}.txt').readlines()
for line in difflib.unified_diff(f1, f2,
- "run %d" % (opt.nruns - 1), "run %d" % opt.nruns):
+ f"run {opt.nruns - 1}", f"run {opt.nruns}"):
sys.stdout.write(line)
rv = f1 != f2 and 1 or 0
if opt.objs:
- f1 = open('objs-%02d.txt' % (opt.nruns - 1)).readlines()
- f2 = open('objs-%02d.txt' % opt.nruns).readlines()
+ f1 = open(f'objs-{(opt.nruns - 1):02}.txt').readlines()
+ f2 = open(f'objs-{opt.nruns:02}.txt').readlines()
for line in difflib.unified_diff(f1, f2,
- "run %d" % (opt.nruns - 1), "run %d" % opt.nruns):
+ f"run {opt.nruns - 1}", f"run {opt.nruns}"):
sys.stdout.write(line)
return rv
@@ -86,7 +85,7 @@ def dump(i, opt):
pprint(
sorted(((v, str(k)) for k, v in c.items()), reverse=True),
- stream=open("debug-%02d.txt" % i, "w"))
+ stream=open(f"debug-{i:02}.txt", "w"))
if opt.objs:
co = []
@@ -101,7 +100,7 @@ def dump(i, opt):
else:
co.sort()
- pprint(co, stream=open("objs-%02d.txt" % i, "w"))
+ pprint(co, stream=open(f"objs-{i:02}.txt", "w"))
if __name__ == '__main__':
diff --git a/setup.py b/setup.py
index a413f56..a563fd1 100644
--- a/setup.py
+++ b/setup.py
@@ -26,8 +26,6 @@ UPDATEs. psycopg2 also provide full asynchronous operations and support
for coroutine libraries.
"""
-# Note: The setup.py must be compatible with both Python 2 and 3
-
import os
import sys
@@ -38,7 +36,6 @@ from distutils.command.build_ext import build_ext
from distutils.sysconfig import get_python_inc
from distutils.ccompiler import get_default_compiler
from distutils.errors import CompileError
-from distutils.util import get_platform
try:
import configparser
@@ -58,13 +55,12 @@ Development Status :: 5 - Production/Stable
Intended Audience :: Developers
License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)
Programming Language :: Python
-Programming Language :: Python :: 2
-Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.6
Programming Language :: Python :: 3.7
Programming Language :: Python :: 3.8
Programming Language :: Python :: 3.9
+Programming Language :: Python :: 3 :: Only
Programming Language :: Python :: Implementation :: CPython
Programming Language :: C
Programming Language :: SQL
@@ -118,8 +114,7 @@ For further information please check the 'doc/src/install.rst' file (also at
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
except OSError:
- raise Warning("Unable to find 'pg_config' file in '%s'" %
- self.pg_config_exe)
+ raise Warning(f"Unable to find 'pg_config' file in '{self.pg_config_exe}'")
pg_config_process.stdin.close()
result = pg_config_process.stdout.readline().strip()
if not result:
@@ -174,7 +169,7 @@ For further information please check the 'doc/src/install.rst' file (also at
try:
pg_inst_list_key = winreg.OpenKey(reg,
'SOFTWARE\\PostgreSQL\\Installations')
- except EnvironmentError:
+ except OSError:
# No PostgreSQL installation, as best as we can tell.
return None
@@ -182,7 +177,7 @@ For further information please check the 'doc/src/install.rst' file (also at
# Determine the name of the first subkey, if any:
try:
first_sub_key_name = winreg.EnumKey(pg_inst_list_key, 0)
- except EnvironmentError:
+ except OSError:
return None
pg_first_inst_key = winreg.OpenKey(reg,
@@ -201,12 +196,6 @@ For further information please check the 'doc/src/install.rst' file (also at
if not os.path.exists(pg_config_path):
return None
- # Support unicode paths, if this version of Python provides the
- # necessary infrastructure:
- if sys.version_info[0] < 3:
- pg_config_path = pg_config_path.encode(
- sys.getfilesystemencoding())
-
return pg_config_path
@@ -307,30 +296,6 @@ For further information please check the 'doc/src/install.rst' file (also at
""")
raise
- sysVer = sys.version_info[:2]
-
- # For Python versions that use MSVC compiler 2008, re-insert the
- # manifest into the resulting .pyd file.
- if self.compiler_is_msvc() and sysVer == (2, 7):
- platform = get_platform()
- # Default to the x86 manifest
- manifest = '_psycopg.vc9.x86.manifest'
- if platform == 'win-amd64':
- manifest = '_psycopg.vc9.amd64.manifest'
- try:
- ext_path = self.get_ext_fullpath(extension.name)
- except AttributeError:
- ext_path = os.path.join(self.build_lib,
- 'psycopg2', '_psycopg.pyd')
- # Make sure spawn() will work if compile() was never
- # called. https://github.com/psycopg/psycopg2/issues/380
- if not self.compiler.initialized:
- self.compiler.initialize()
- self.compiler.spawn(
- ['mt.exe', '-nologo', '-manifest',
- os.path.join('psycopg', manifest),
- '-outputresource:%s;2' % ext_path])
-
def finalize_win32(self):
"""Finalize build system configuration on win32 platform."""
@@ -430,8 +395,7 @@ For further information please check the 'doc/src/install.rst' file (also at
pgpatch = int(pgpatch)
else:
sys.stderr.write(
- "Error: could not determine PostgreSQL version from '%s'"
- % pgversion)
+ f"Error: could not determine PostgreSQL version from '{pgversion}'")
sys.exit(1)
define_macros.append(("PG_VERSION_NUM", "%d%02d%02d" %
@@ -453,7 +417,7 @@ For further information please check the 'doc/src/install.rst' file (also at
except Warning:
w = sys.exc_info()[1] # work around py 2/3 different syntax
- sys.stderr.write("Error: %s\n" % w)
+ sys.stderr.write(f"Error: {w}\n")
sys.exit(1)
if hasattr(self, "finalize_" + sys.platform):
@@ -546,7 +510,7 @@ version_flags.append('pq3') # no more a choice
version_flags.append('ext') # no more a choice
if version_flags:
- PSYCOPG_VERSION_EX = PSYCOPG_VERSION + " (%s)" % ' '.join(version_flags)
+ PSYCOPG_VERSION_EX = PSYCOPG_VERSION + f" ({' '.join(version_flags)})"
else:
PSYCOPG_VERSION_EX = PSYCOPG_VERSION
@@ -598,7 +562,7 @@ setup(name="psycopg2",
url="https://psycopg.org/",
license="LGPL with exceptions",
platforms=["any"],
- python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*',
+ python_requires='>=3.6',
description=readme.split("\n")[0],
long_description="\n".join(readme.split("\n")[2:]).lstrip(),
classifiers=[x for x in classifiers.split("\n") if x],
diff --git a/tests/__init__.py b/tests/__init__.py
index f5c422f..76a01ee 100755
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -58,9 +58,6 @@ from . import test_types_basic
from . import test_types_extras
from . import test_with
-if sys.version_info[:2] < (3, 6):
- from . import test_async_keyword
-
def test_suite():
# If connection to test db fails, bail out early.
@@ -76,8 +73,6 @@ def test_suite():
suite = unittest.TestSuite()
suite.addTest(test_async.test_suite())
- if sys.version_info[:2] < (3, 6):
- suite.addTest(test_async_keyword.test_suite())
suite.addTest(test_bugX000.test_suite())
suite.addTest(test_bug_gc.test_suite())
suite.addTest(test_cancel.test_suite())
diff --git a/tests/dbapi20.py b/tests/dbapi20.py
index 9a8a9b2..b3c6405 100644
--- a/tests/dbapi20.py
+++ b/tests/dbapi20.py
@@ -185,13 +185,8 @@ class DatabaseAPI20Test(unittest.TestCase):
def test_Exceptions(self):
# Make sure required exceptions exist, and are in the
# defined hierarchy.
- if sys.version[0] == '3': #under Python 3 StardardError no longer exists
- self.failUnless(issubclass(self.driver.Warning,Exception))
- self.failUnless(issubclass(self.driver.Error,Exception))
- else:
- self.failUnless(issubclass(self.driver.Warning,StandardError))
- self.failUnless(issubclass(self.driver.Error,StandardError))
-
+ self.failUnless(issubclass(self.driver.Warning,Exception))
+ self.failUnless(issubclass(self.driver.Error,Exception))
self.failUnless(
issubclass(self.driver.InterfaceError,self.driver.Error)
)
@@ -547,7 +542,7 @@ class DatabaseAPI20Test(unittest.TestCase):
tests.
'''
populate = [
- "insert into %sbooze values ('%s')" % (self.table_prefix,s)
+ f"insert into {self.table_prefix}booze values ('{s}')"
for s in self.samples
]
return populate
diff --git a/tests/dbapi20_tpc.py b/tests/dbapi20_tpc.py
index d4790f7..fccc775 100644
--- a/tests/dbapi20_tpc.py
+++ b/tests/dbapi20_tpc.py
@@ -19,7 +19,7 @@ class TwoPhaseCommitTests(unittest.TestCase):
def make_xid(self, con):
id = TwoPhaseCommitTests._last_id
TwoPhaseCommitTests._last_id += 1
- return con.xid(42, "%s%d" % (self._global_id_prefix, id), "qualifier")
+ return con.xid(42, f"{self._global_id_prefix}{id}", "qualifier")
def test_xid(self):
con = self.connect()
diff --git a/tests/test_async.py b/tests/test_async.py
index eb97bc9..fdc4224 100755
--- a/tests/test_async.py
+++ b/tests/test_async.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
# test_async.py - unit test for asynchronous API
#
@@ -37,7 +36,7 @@ from .testutils import (ConnectingTestCase, StringIO, skip_before_postgres,
skip_if_crdb, crdb_version, slow)
-class PollableStub(object):
+class PollableStub:
"""A 'pollable' wrapper allowing analysis of the `poll()` calls."""
def __init__(self, pollable):
self.pollable = pollable
diff --git a/tests/test_async_keyword.py b/tests/test_async_keyword.py
deleted file mode 100755
index e112692..0000000
--- a/tests/test_async_keyword.py
+++ /dev/null
@@ -1,225 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-
-# test_async_keyword.py - test for objects using 'async' as attribute/param
-#
-# Copyright (C) 2017-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
-# Copyright (C) 2020 The Psycopg Team
-#
-# psycopg2 is free software: you can redistribute it and/or modify it
-# under the terms of the GNU Lesser General Public License as published
-# by the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# In addition, as a special exception, the copyright holders give
-# permission to link this program with the OpenSSL library (or with
-# modified versions of OpenSSL that use the same license as OpenSSL),
-# and distribute linked combinations including the two.
-#
-# You must obey the GNU Lesser General Public License in all respects for
-# all of the code used other than OpenSSL.
-#
-# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
-# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
-# License for more details.
-
-import time
-from select import select
-
-import psycopg2
-from psycopg2 import extras
-
-from .testconfig import dsn
-import unittest
-from .testutils import ConnectingTestCase, skip_before_postgres, slow
-
-from .test_replication import ReplicationTestCase, skip_repl_if_green
-from psycopg2.extras import LogicalReplicationConnection, StopReplication
-
-
-class AsyncTests(ConnectingTestCase):
- def setUp(self):
- ConnectingTestCase.setUp(self)
-
- self.sync_conn = self.conn
- self.conn = self.connect(async=True)
-
- self.wait(self.conn)
-
- curs = self.conn.cursor()
- curs.execute('''
- CREATE TEMPORARY TABLE table1 (
- id int PRIMARY KEY
- )''')
- self.wait(curs)
-
- def test_connection_setup(self):
- cur = self.conn.cursor()
- sync_cur = self.sync_conn.cursor()
- del cur, sync_cur
-
- self.assert_(self.conn.async)
- self.assert_(not self.sync_conn.async)
-
- # the async connection should be autocommit
- self.assert_(self.conn.autocommit)
-
- # check other properties to be found on the connection
- self.assert_(self.conn.server_version)
- self.assert_(self.conn.protocol_version in (2, 3))
- self.assert_(self.conn.encoding in psycopg2.extensions.encodings)
-
- def test_async_subclass(self):
- class MyConn(psycopg2.extensions.connection):
- def __init__(self, dsn, async=0):
- psycopg2.extensions.connection.__init__(self, dsn, async=async)
-
- conn = self.connect(connection_factory=MyConn, async=True)
- self.assert_(isinstance(conn, MyConn))
- self.assert_(conn.async)
- conn.close()
-
- def test_async_connection_error_message(self):
- try:
- cnn = psycopg2.connect('dbname=thisdatabasedoesntexist', async=True)
- self.wait(cnn)
- except psycopg2.Error as e:
- self.assertNotEqual(str(e), "asynchronous connection failed",
- "connection error reason lost")
- else:
- self.fail("no exception raised")
-
-
-class CancelTests(ConnectingTestCase):
- def setUp(self):
- ConnectingTestCase.setUp(self)
-
- cur = self.conn.cursor()
- cur.execute('''
- CREATE TEMPORARY TABLE table1 (
- id int PRIMARY KEY
- )''')
- self.conn.commit()
-
- @slow
- @skip_before_postgres(8, 2)
- def test_async_cancel(self):
- async_conn = psycopg2.connect(dsn, async=True)
- self.assertRaises(psycopg2.OperationalError, async_conn.cancel)
- extras.wait_select(async_conn)
- cur = async_conn.cursor()
- cur.execute("select pg_sleep(10)")
- time.sleep(1)
- self.assertTrue(async_conn.isexecuting())
- async_conn.cancel()
- self.assertRaises(psycopg2.extensions.QueryCanceledError,
- extras.wait_select, async_conn)
- cur.execute("select 1")
- extras.wait_select(async_conn)
- self.assertEqual(cur.fetchall(), [(1, )])
-
- def test_async_connection_cancel(self):
- async_conn = psycopg2.connect(dsn, async=True)
- async_conn.close()
- self.assertTrue(async_conn.closed)
-
-
-class ConnectTestCase(unittest.TestCase):
- def setUp(self):
- self.args = None
-
- def connect_stub(dsn, connection_factory=None, async=False):
- self.args = (dsn, connection_factory, async)
-
- self._connect_orig = psycopg2._connect
- psycopg2._connect = connect_stub
-
- def tearDown(self):
- psycopg2._connect = self._connect_orig
-
- def test_there_has_to_be_something(self):
- self.assertRaises(TypeError, psycopg2.connect)
- self.assertRaises(TypeError, psycopg2.connect,
- connection_factory=lambda dsn, async=False: None)
- self.assertRaises(TypeError, psycopg2.connect,
- async=True)
-
- def test_factory(self):
- def f(dsn, async=False):
- pass
-
- psycopg2.connect(database='foo', host='baz', connection_factory=f)
- self.assertDsnEqual(self.args[0], 'dbname=foo host=baz')
- self.assertEqual(self.args[1], f)
- self.assertEqual(self.args[2], False)
-
- psycopg2.connect("dbname=foo host=baz", connection_factory=f)
- self.assertDsnEqual(self.args[0], 'dbname=foo host=baz')
- self.assertEqual(self.args[1], f)
- self.assertEqual(self.args[2], False)
-
- def test_async(self):
- psycopg2.connect(database='foo', host='baz', async=1)
- self.assertDsnEqual(self.args[0], 'dbname=foo host=baz')
- self.assertEqual(self.args[1], None)
- self.assert_(self.args[2])
-
- psycopg2.connect("dbname=foo host=baz", async=True)
- self.assertDsnEqual(self.args[0], 'dbname=foo host=baz')
- self.assertEqual(self.args[1], None)
- self.assert_(self.args[2])
-
-
-class AsyncReplicationTest(ReplicationTestCase):
- @skip_before_postgres(9, 4) # slots require 9.4
- @skip_repl_if_green
- def test_async_replication(self):
- conn = self.repl_connect(
- connection_factory=LogicalReplicationConnection, async=1)
- if conn is None:
- return
-
- cur = conn.cursor()
-
- self.create_replication_slot(cur, output_plugin='test_decoding')
- self.wait(cur)
-
- cur.start_replication(self.slot)
- self.wait(cur)
-
- self.make_replication_events()
-
- self.msg_count = 0
-
- def consume(msg):
- # just check the methods
- "%s: %s" % (cur.io_timestamp, repr(msg))
- "%s: %s" % (cur.feedback_timestamp, repr(msg))
-
- self.msg_count += 1
- if self.msg_count > 3:
- cur.send_feedback(reply=True)
- raise StopReplication()
-
- cur.send_feedback(flush_lsn=msg.data_start)
-
- # cannot be used in asynchronous mode
- self.assertRaises(psycopg2.ProgrammingError, cur.consume_stream, consume)
-
- def process_stream():
- while True:
- msg = cur.read_message()
- if msg:
- consume(msg)
- else:
- select([cur], [], [])
- self.assertRaises(StopReplication, process_stream)
-
-
-def test_suite():
- return unittest.TestLoader().loadTestsFromName(__name__)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/tests/test_cancel.py b/tests/test_cancel.py
index de8af90..1f7d586 100755
--- a/tests/test_cancel.py
+++ b/tests/test_cancel.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
# test_cancel.py - unit test for query cancellation
#
diff --git a/tests/test_connection.py b/tests/test_connection.py
index f4c156b..96a8301 100755
--- a/tests/test_connection.py
+++ b/tests/test_connection.py
@@ -42,7 +42,7 @@ import psycopg2.extras
from psycopg2 import extensions as ext
from .testutils import (
- PY2, unittest, skip_if_no_superuser, skip_before_postgres,
+ unittest, skip_if_no_superuser, skip_before_postgres,
skip_after_postgres, skip_before_libpq, skip_after_libpq,
ConnectingTestCase, skip_if_tpc_disabled, skip_if_windows, slow,
skip_if_crdb, crdb_version)
@@ -245,7 +245,7 @@ class ConnectionTests(ConnectingTestCase):
cur = self.conn.cursor()
ext.register_type(ext.UNICODE, cur)
cur.execute("select 'foo'::text;")
- self.assertEqual(cur.fetchone()[0], u'foo')
+ self.assertEqual(cur.fetchone()[0], 'foo')
def test_connect_nonnormal_envvar(self):
# We must perform encoding normalization at connection time
@@ -302,7 +302,7 @@ class ConnectionTests(ConnectingTestCase):
# Stop the committer thread
stop.append(True)
- self.assert_(not notices, "%d notices raised" % len(notices))
+ self.assert_(not notices, f"{len(notices)} notices raised")
def test_connect_cursor_factory(self):
conn = self.connect(cursor_factory=psycopg2.extras.DictCursor)
@@ -343,7 +343,7 @@ class ConnectionTests(ConnectingTestCase):
class SubConnection(ext.connection):
def __init__(self, dsn):
try:
- super(SubConnection, self).__init__(dsn)
+ super().__init__(dsn)
except Exception:
pass
@@ -383,12 +383,11 @@ class ConnectionTests(ConnectingTestCase):
dir = tempfile.mkdtemp()
try:
with open(os.path.join(dir, "mptest.py"), 'w') as f:
- f.write("""\
-import time
+ f.write(f"""import time
import psycopg2
def thread():
- conn = psycopg2.connect(%(dsn)r)
+ conn = psycopg2.connect({dsn!r})
curs = conn.cursor()
for i in range(10):
curs.execute("select 1")
@@ -396,11 +395,11 @@ def thread():
def process():
time.sleep(0.2)
-""" % {'dsn': dsn})
+""")
script = ("""\
import sys
-sys.path.insert(0, %(dir)r)
+sys.path.insert(0, {dir!r})
import time
import threading
import multiprocessing
@@ -411,7 +410,7 @@ t.start()
time.sleep(0.2)
multiprocessing.Process(target=mptest.process, name='myprocess').start()
t.join()
-""" % {'dir': dir})
+""".format(dir=dir))
out = sp.check_output(
[sys.executable, '-c', script], stderr=sp.STDOUT)
@@ -464,15 +463,12 @@ class ParseDsnTestCase(ConnectingTestCase):
self.assertTrue(raised, "ProgrammingError raised due to invalid URI")
def test_unicode_value(self):
- snowman = u"\u2603"
+ snowman = "\u2603"
d = ext.parse_dsn('dbname=' + snowman)
- if PY2:
- self.assertEqual(d['dbname'], snowman.encode('utf8'))
- else:
- self.assertEqual(d['dbname'], snowman)
+ self.assertEqual(d['dbname'], snowman)
def test_unicode_key(self):
- snowman = u"\u2603"
+ snowman = "\u2603"
self.assertRaises(psycopg2.ProgrammingError, ext.parse_dsn,
snowman + '=' + snowman)
@@ -1230,7 +1226,7 @@ class ConnectionTwoPhaseTests(ConnectingTestCase):
def test_xid_unicode(self):
cnn = self.connect()
- x1 = cnn.xid(10, u'uni', u'code')
+ x1 = cnn.xid(10, 'uni', 'code')
cnn.tpc_begin(x1)
cnn.tpc_prepare()
cnn.reset()
@@ -1245,7 +1241,7 @@ class ConnectionTwoPhaseTests(ConnectingTestCase):
# Let's just check unicode is accepted as type.
cnn = self.connect()
cnn.set_client_encoding('utf8')
- cnn.tpc_begin(u"transaction-id")
+ cnn.tpc_begin("transaction-id")
cnn.tpc_prepare()
cnn.reset()
@@ -1683,7 +1679,7 @@ class AutocommitTests(ConnectingTestCase):
class PasswordLeakTestCase(ConnectingTestCase):
def setUp(self):
- super(PasswordLeakTestCase, self).setUp()
+ super().setUp()
PasswordLeakTestCase.dsn = None
class GrassingConnection(ext.connection):
@@ -1736,8 +1732,7 @@ class SignalTestCase(ConnectingTestCase):
""")
def _test_bug_551(self, query):
- script = ("""\
-import os
+ script = f"""import os
import sys
import time
import signal
@@ -1758,7 +1753,7 @@ def killer():
signal.signal(signal.SIGABRT, handle_sigabort)
-conn = psycopg2.connect(%(dsn)r)
+conn = psycopg2.connect({dsn!r})
cur = conn.cursor()
@@ -1769,8 +1764,8 @@ t.daemon = True
t.start()
while True:
- cur.execute(%(query)r, ("Hello, world!",))
-""" % {'dsn': dsn, 'query': query})
+ cur.execute({query!r}, ("Hello, world!",))
+"""
proc = sp.Popen([sys.executable, '-c', script],
stdout=sp.PIPE, stderr=sp.PIPE)
diff --git a/tests/test_copy.py b/tests/test_copy.py
index 9274f1d..7002bf7 100755
--- a/tests/test_copy.py
+++ b/tests/test_copy.py
@@ -34,7 +34,7 @@ from subprocess import Popen, PIPE
import psycopg2
import psycopg2.extensions
-from .testutils import skip_copy_if_green, PY2, TextIOBase
+from .testutils import skip_copy_if_green, TextIOBase
from .testconfig import dsn
@@ -97,7 +97,7 @@ class CopyTests(ConnectingTestCase):
curs = self.conn.cursor()
f = StringIO()
for i in range(10):
- f.write("%s\n" % (i,))
+ f.write(f"{i}\n")
f.seek(0)
curs.copy_from(MinimalRead(f), "tcopy", columns=['id'])
@@ -109,7 +109,7 @@ class CopyTests(ConnectingTestCase):
curs = self.conn.cursor()
f = StringIO()
for i in range(10):
- f.write("%s\n" % (i,))
+ f.write(f"{i}\n")
f.seek(0)
@@ -133,14 +133,9 @@ class CopyTests(ConnectingTestCase):
self.conn.set_client_encoding('latin1')
self._create_temp_table() # the above call closed the xn
- if PY2:
- abin = ''.join(map(chr, range(32, 127) + range(160, 256)))
- about = abin.decode('latin1').replace('\\', '\\\\')
-
- else:
- abin = bytes(list(range(32, 127))
- + list(range(160, 256))).decode('latin1')
- about = abin.replace('\\', '\\\\')
+ abin = bytes(list(range(32, 127))
+ + list(range(160, 256))).decode('latin1')
+ about = abin.replace('\\', '\\\\')
curs = self.conn.cursor()
curs.execute('insert into tcopy values (%s, %s)',
@@ -155,13 +150,9 @@ class CopyTests(ConnectingTestCase):
self.conn.set_client_encoding('latin1')
self._create_temp_table() # the above call closed the xn
- if PY2:
- abin = ''.join(map(chr, range(32, 127) + range(160, 255)))
- about = abin.replace('\\', '\\\\')
- else:
- abin = bytes(list(range(32, 127))
- + list(range(160, 255))).decode('latin1')
- about = abin.replace('\\', '\\\\').encode('latin1')
+ abin = bytes(list(range(32, 127))
+ + list(range(160, 255))).decode('latin1')
+ about = abin.replace('\\', '\\\\').encode('latin1')
curs = self.conn.cursor()
curs.execute('insert into tcopy values (%s, %s)',
@@ -176,15 +167,9 @@ class CopyTests(ConnectingTestCase):
self.conn.set_client_encoding('latin1')
self._create_temp_table() # the above call closed the xn
- if PY2:
- abin = ''.join(map(chr, range(32, 127) + range(160, 256)))
- abin = abin.decode('latin1')
- about = abin.replace('\\', '\\\\')
-
- else:
- abin = bytes(list(range(32, 127))
- + list(range(160, 256))).decode('latin1')
- about = abin.replace('\\', '\\\\')
+ abin = bytes(list(range(32, 127))
+ + list(range(160, 256))).decode('latin1')
+ about = abin.replace('\\', '\\\\')
f = io.StringIO()
f.write(about)
@@ -224,7 +209,7 @@ class CopyTests(ConnectingTestCase):
f = StringIO()
for i, c in zip(range(nrecs), cycle(string.ascii_letters)):
l = c * srec
- f.write("%s\t%s\n" % (i, l))
+ f.write(f"{i}\t{l}\n")
f.seek(0)
curs.copy_from(MinimalRead(f), "tcopy", **copykw)
@@ -252,7 +237,7 @@ class CopyTests(ConnectingTestCase):
self.assertEqual(ntests, len(string.ascii_letters))
def test_copy_expert_file_refcount(self):
- class Whatever(object):
+ class Whatever:
pass
f = Whatever()
@@ -261,7 +246,7 @@ class CopyTests(ConnectingTestCase):
curs.copy_expert, 'COPY tcopy (data) FROM STDIN', f)
def test_copy_no_column_limit(self):
- cols = ["c%050d" % i for i in range(200)]
+ cols = [f"c{i:050}" for i in range(200)]
curs = self.conn.cursor()
curs.execute('CREATE TEMPORARY TABLE manycols (%s)' % ',\n'.join(
@@ -332,9 +317,8 @@ class CopyTests(ConnectingTestCase):
@slow
def test_copy_from_segfault(self):
# issue #219
- script = ("""\
-import psycopg2
-conn = psycopg2.connect(%(dsn)r)
+ script = f"""import psycopg2
+conn = psycopg2.connect({dsn!r})
curs = conn.cursor()
curs.execute("create table copy_segf (id int)")
try:
@@ -342,7 +326,7 @@ try:
except psycopg2.ProgrammingError:
pass
conn.close()
-""" % {'dsn': dsn})
+"""
proc = Popen([sys.executable, '-c', script])
proc.communicate()
@@ -351,9 +335,8 @@ conn.close()
@slow
def test_copy_to_segfault(self):
# issue #219
- script = ("""\
-import psycopg2
-conn = psycopg2.connect(%(dsn)r)
+ script = f"""import psycopg2
+conn = psycopg2.connect({dsn!r})
curs = conn.cursor()
curs.execute("create table copy_segf (id int)")
try:
@@ -361,7 +344,7 @@ try:
except psycopg2.ProgrammingError:
pass
conn.close()
-""" % {'dsn': dsn})
+"""
proc = Popen([sys.executable, '-c', script], stdout=PIPE)
proc.communicate()
diff --git a/tests/test_cursor.py b/tests/test_cursor.py
index e59c974..0549e3c 100755
--- a/tests/test_cursor.py
+++ b/tests/test_cursor.py
@@ -39,7 +39,6 @@ from .testutils import (ConnectingTestCase, skip_before_postgres,
skip_if_windows, skip_if_crdb, crdb_version)
import psycopg2.extras
-from psycopg2.compat import text_type
class CursorTests(ConnectingTestCase):
@@ -75,36 +74,36 @@ class CursorTests(ConnectingTestCase):
# test consistency between execute and mogrify.
# unicode query containing only ascii data
- cur.execute(u"SELECT 'foo';")
+ cur.execute("SELECT 'foo';")
self.assertEqual('foo', cur.fetchone()[0])
- self.assertEqual(b"SELECT 'foo';", cur.mogrify(u"SELECT 'foo';"))
+ self.assertEqual(b"SELECT 'foo';", cur.mogrify("SELECT 'foo';"))
conn.set_client_encoding('UTF8')
- snowman = u"\u2603"
+ snowman = "\u2603"
def b(s):
- if isinstance(s, text_type):
+ if isinstance(s, str):
return s.encode('utf8')
else:
return s
# unicode query with non-ascii data
- cur.execute(u"SELECT '%s';" % snowman)
+ cur.execute(f"SELECT '{snowman}';")
self.assertEqual(snowman.encode('utf8'), b(cur.fetchone()[0]))
- self.assertQuotedEqual(("SELECT '%s';" % snowman).encode('utf8'),
- cur.mogrify(u"SELECT '%s';" % snowman))
+ self.assertQuotedEqual(f"SELECT '{snowman}';".encode('utf8'),
+ cur.mogrify(f"SELECT '{snowman}';"))
# unicode args
cur.execute("SELECT %s;", (snowman,))
self.assertEqual(snowman.encode("utf-8"), b(cur.fetchone()[0]))
- self.assertQuotedEqual(("SELECT '%s';" % snowman).encode('utf8'),
+ self.assertQuotedEqual(f"SELECT '{snowman}';".encode('utf8'),
cur.mogrify("SELECT %s;", (snowman,)))
# unicode query and args
- cur.execute(u"SELECT %s;", (snowman,))
+ cur.execute("SELECT %s;", (snowman,))
self.assertEqual(snowman.encode("utf-8"), b(cur.fetchone()[0]))
- self.assertQuotedEqual(("SELECT '%s';" % snowman).encode('utf8'),
- cur.mogrify(u"SELECT %s;", (snowman,)))
+ self.assertQuotedEqual(f"SELECT '{snowman}';".encode('utf8'),
+ cur.mogrify("SELECT %s;", (snowman,)))
def test_mogrify_decimal_explodes(self):
conn = self.conn
@@ -293,12 +292,12 @@ class CursorTests(ConnectingTestCase):
cur = self.conn.cursor()
# Set up the temporary function
- cur.execute('''
- CREATE FUNCTION %s(%s INT)
+ cur.execute(f'''
+ CREATE FUNCTION {procname}({escaped_paramname} INT)
RETURNS INT AS
'SELECT $1 * $1'
LANGUAGE SQL
- ''' % (procname, escaped_paramname))
+ ''')
# Make sure callproc works right
cur.callproc(procname, {paramname: 2})
@@ -309,7 +308,7 @@ class CursorTests(ConnectingTestCase):
({paramname: 2, 'foo': 'bar'}, psycopg2.ProgrammingError),
({paramname: '2'}, psycopg2.ProgrammingError),
({paramname: 'two'}, psycopg2.ProgrammingError),
- ({u'bj\xc3rn': 2}, psycopg2.ProgrammingError),
+ ({'bj\xc3rn': 2}, psycopg2.ProgrammingError),
({3: 2}, TypeError),
({self: 2}, TypeError),
]
@@ -584,8 +583,7 @@ class NamedCursorTests(ConnectingTestCase):
time.sleep(0.2)
t2 = next(i)[0]
self.assert_((t2 - t1).microseconds * 1e-6 < 0.1,
- "named cursor records fetched in 2 roundtrips (delta: %s)"
- % (t2 - t1))
+ f"named cursor records fetched in 2 roundtrips (delta: {t2 - t1})")
@skip_before_postgres(8, 0)
def test_iter_named_cursor_default_itersize(self):
diff --git a/tests/test_dates.py b/tests/test_dates.py
index 48c6f15..29c37b0 100755
--- a/tests/test_dates.py
+++ b/tests/test_dates.py
@@ -379,7 +379,7 @@ class DatetimeTests(ConnectingTestCase, CommonDatetimeTestsMixin):
cur)
def f(val):
- cur.execute("select '%s'::text" % val)
+ cur.execute(f"select '{val}'::text")
return cur.fetchone()[0]
self.assertRaises(OverflowError, f, '100000000000000000:00:00')
@@ -423,7 +423,7 @@ class DatetimeTests(ConnectingTestCase, CommonDatetimeTestsMixin):
]:
cur.execute("select %s::text", (s,))
r = cur.fetchone()[0]
- self.assertEqual(r, v, "%s -> %s != %s" % (s, r, v))
+ self.assertEqual(r, v, f"{s} -> {r} != {v}")
@skip_if_crdb("interval style")
@skip_before_postgres(8, 4)
diff --git a/tests/test_errcodes.py b/tests/test_errcodes.py
index 3ce3282..88afd9d 100755
--- a/tests/test_errcodes.py
+++ b/tests/test_errcodes.py
@@ -53,7 +53,7 @@ class ErrocodeTests(ConnectingTestCase):
if errs:
self.fail(
- "raised %s errors in %s cycles (first is %s %s)" % (
+ "raised {} errors in {} cycles (first is {} {})".format(
len(errs), MAX_CYCLES,
errs[0].__class__.__name__, errs[0]))
diff --git a/tests/test_extras_dictcursor.py b/tests/test_extras_dictcursor.py
index a3c553e..4c63db7 100755
--- a/tests/test_extras_dictcursor.py
+++ b/tests/test_extras_dictcursor.py
@@ -20,14 +20,14 @@ import time
import pickle
import unittest
from datetime import timedelta
+from functools import lru_cache
import psycopg2
-from psycopg2.compat import lru_cache
import psycopg2.extras
from psycopg2.extras import NamedTupleConnection, NamedTupleCursor
from .testutils import ConnectingTestCase, skip_before_postgres, \
- skip_before_python, skip_from_python, crdb_version, skip_if_crdb
+ crdb_version, skip_if_crdb
class _DictCursorBase(ConnectingTestCase):
@@ -179,27 +179,7 @@ class ExtrasDictCursorTests(_DictCursorBase):
self.assertEqual(len(rv3), 2)
self.assertEqual(len(rv), 2)
- @skip_from_python(3)
- def test_iter_methods_2(self):
- curs = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
- curs.execute("select 10 as a, 20 as b")
- r = curs.fetchone()
- self.assert_(isinstance(r.keys(), list))
- self.assertEqual(len(r.keys()), 2)
- self.assert_(isinstance(r.values(), tuple)) # sic?
- self.assertEqual(len(r.values()), 2)
- self.assert_(isinstance(r.items(), list))
- self.assertEqual(len(r.items()), 2)
-
- self.assert_(not isinstance(r.iterkeys(), list))
- self.assertEqual(len(list(r.iterkeys())), 2)
- self.assert_(not isinstance(r.itervalues(), list))
- self.assertEqual(len(list(r.itervalues())), 2)
- self.assert_(not isinstance(r.iteritems(), list))
- self.assertEqual(len(list(r.iteritems())), 2)
-
- @skip_before_python(3)
- def test_iter_methods_3(self):
+ def test_iter_methods(self):
curs = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
curs.execute("select 10 as a, 20 as b")
r = curs.fetchone()
@@ -226,21 +206,6 @@ class ExtrasDictCursorTests(_DictCursorBase):
self.assertEqual(list(r1.values()), list(r.values()))
self.assertEqual(list(r1.items()), list(r.items()))
- @skip_from_python(3)
- def test_order_iter(self):
- curs = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
- curs.execute("select 5 as foo, 4 as bar, 33 as baz, 2 as qux")
- r = curs.fetchone()
- self.assertEqual(list(r.iterkeys()), ['foo', 'bar', 'baz', 'qux'])
- self.assertEqual(list(r.itervalues()), [5, 4, 33, 2])
- self.assertEqual(list(r.iteritems()),
- [('foo', 5), ('bar', 4), ('baz', 33), ('qux', 2)])
-
- r1 = pickle.loads(pickle.dumps(r))
- self.assertEqual(list(r1.iterkeys()), list(r.iterkeys()))
- self.assertEqual(list(r1.itervalues()), list(r.itervalues()))
- self.assertEqual(list(r1.iteritems()), list(r.iteritems()))
-
class ExtrasDictCursorRealTests(_DictCursorBase):
def testRealMeansReal(self):
@@ -340,27 +305,7 @@ class ExtrasDictCursorRealTests(_DictCursorBase):
row = getter(curs)
self.failUnless(row['foo'] == 'bar')
- @skip_from_python(3)
- def test_iter_methods_2(self):
- curs = self.conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
- curs.execute("select 10 as a, 20 as b")
- r = curs.fetchone()
- self.assert_(isinstance(r.keys(), list))
- self.assertEqual(len(r.keys()), 2)
- self.assert_(isinstance(r.values(), list))
- self.assertEqual(len(r.values()), 2)
- self.assert_(isinstance(r.items(), list))
- self.assertEqual(len(r.items()), 2)
-
- self.assert_(not isinstance(r.iterkeys(), list))
- self.assertEqual(len(list(r.iterkeys())), 2)
- self.assert_(not isinstance(r.itervalues(), list))
- self.assertEqual(len(list(r.itervalues())), 2)
- self.assert_(not isinstance(r.iteritems(), list))
- self.assertEqual(len(list(r.iteritems())), 2)
-
- @skip_before_python(3)
- def test_iter_methods_3(self):
+ def test_iter_methods(self):
curs = self.conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
curs.execute("select 10 as a, 20 as b")
r = curs.fetchone()
@@ -387,21 +332,6 @@ class ExtrasDictCursorRealTests(_DictCursorBase):
self.assertEqual(list(r1.values()), list(r.values()))
self.assertEqual(list(r1.items()), list(r.items()))
- @skip_from_python(3)
- def test_order_iter(self):
- curs = self.conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
- curs.execute("select 5 as foo, 4 as bar, 33 as baz, 2 as qux")
- r = curs.fetchone()
- self.assertEqual(list(r.iterkeys()), ['foo', 'bar', 'baz', 'qux'])
- self.assertEqual(list(r.itervalues()), [5, 4, 33, 2])
- self.assertEqual(list(r.iteritems()),
- [('foo', 5), ('bar', 4), ('baz', 33), ('qux', 2)])
-
- r1 = pickle.loads(pickle.dumps(r))
- self.assertEqual(list(r1.iterkeys()), list(r.iterkeys()))
- self.assertEqual(list(r1.itervalues()), list(r.itervalues()))
- self.assertEqual(list(r1.iteritems()), list(r.iteritems()))
-
def test_pop(self):
curs = self.conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
curs.execute("select 1 as a, 2 as b, 3 as c")
@@ -564,7 +494,6 @@ class NamedTupleCursorTest(ConnectingTestCase):
self.assertEqual(rv.f_column_, 2)
self.assertEqual(rv.f3, 3)
- @skip_before_python(3)
@skip_before_postgres(8)
def test_nonascii_name(self):
curs = self.conn.cursor()
@@ -692,7 +621,7 @@ class NamedTupleCursorTest(ConnectingTestCase):
recs = []
curs = self.conn.cursor()
for i in range(10):
- curs.execute("select 1 as f%s" % i)
+ curs.execute(f"select 1 as f{i}")
recs.append(curs.fetchone())
# Still in cache
diff --git a/tests/test_fast_executemany.py b/tests/test_fast_executemany.py
index eaba029..a153ef0 100755
--- a/tests/test_fast_executemany.py
+++ b/tests/test_fast_executemany.py
@@ -43,9 +43,9 @@ class TestPaginate(unittest.TestCase):
[list(range(i * 100, (i + 1) * 100)) for i in range(10)])
-class FastExecuteTestMixin(object):
+class FastExecuteTestMixin:
def setUp(self):
- super(FastExecuteTestMixin, self).setUp()
+ super().setUp()
cur = self.conn.cursor()
cur.execute("""create table testfast (
id serial primary key, date date, val int, data text)""")
@@ -102,7 +102,7 @@ class TestExecuteBatch(FastExecuteTestMixin, testutils.ConnectingTestCase):
page_size=10)
# last command was 5 statements
- self.assertEqual(sum(c == u';' for c in cur.query.decode('ascii')), 4)
+ self.assertEqual(sum(c == ';' for c in cur.query.decode('ascii')), 4)
cur.execute("select id, val from testfast order by id")
self.assertEqual(cur.fetchall(), [(i, i * 10) for i in range(25)])
@@ -111,7 +111,7 @@ class TestExecuteBatch(FastExecuteTestMixin, testutils.ConnectingTestCase):
def test_unicode(self):
cur = self.conn.cursor()
ext.register_type(ext.UNICODE, cur)
- snowman = u"\u2603"
+ snowman = "\u2603"
# unicode in statement
psycopg2.extras.execute_batch(cur,
@@ -206,7 +206,7 @@ class TestExecuteValues(FastExecuteTestMixin, testutils.ConnectingTestCase):
def test_unicode(self):
cur = self.conn.cursor()
ext.register_type(ext.UNICODE, cur)
- snowman = u"\u2603"
+ snowman = "\u2603"
# unicode in statement
psycopg2.extras.execute_values(cur,
diff --git a/tests/test_green.py b/tests/test_green.py
index f511f3e..fa9f298 100755
--- a/tests/test_green.py
+++ b/tests/test_green.py
@@ -36,7 +36,7 @@ from .testutils import ConnectingTestCase, skip_before_postgres, slow
from .testutils import skip_if_crdb
-class ConnectionStub(object):
+class ConnectionStub:
"""A `connection` wrapper allowing analysis of the `poll()` calls."""
def __init__(self, conn):
self.conn = conn
@@ -137,7 +137,7 @@ class GreenTestCase(ConnectingTestCase):
elif state == POLL_WRITE:
select.select([], [conn.fileno()], [], 0.1)
else:
- raise conn.OperationalError("bad state from poll: %s" % state)
+ raise conn.OperationalError(f"bad state from poll: {state}")
stub = self.set_stub_wait_callback(self.conn, wait)
cur = self.conn.cursor()
@@ -182,7 +182,7 @@ class CallbackErrorTestCase(ConnectingTestCase):
elif state == POLL_WRITE:
select.select([], [conn.fileno()], [])
else:
- raise conn.OperationalError("bad state from poll: %s" % state)
+ raise conn.OperationalError(f"bad state from poll: {state}")
except KeyboardInterrupt:
conn.cancel()
# the loop will be broken by a server error
diff --git a/tests/test_ipaddress.py b/tests/test_ipaddress.py
index 5d2ef3a..451ec0b 100755
--- a/tests/test_ipaddress.py
+++ b/tests/test_ipaddress.py
@@ -15,7 +15,6 @@
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
-from __future__ import unicode_literals
from . import testutils
import unittest
diff --git a/tests/test_lobject.py b/tests/test_lobject.py
index e13ca36..8d94656 100755
--- a/tests/test_lobject.py
+++ b/tests/test_lobject.py
@@ -185,15 +185,15 @@ class LargeObjectTests(LargeObjectTestCase):
def test_read_text(self):
lo = self.conn.lobject()
- snowman = u"\u2603"
- lo.write(u"some data " + snowman)
+ snowman = "\u2603"
+ lo.write("some data " + snowman)
lo.close()
lo = self.conn.lobject(lo.oid, "rt")
x = lo.read(4)
- self.assertEqual(type(x), type(u''))
- self.assertEqual(x, u"some")
- self.assertEqual(lo.read(), u" data " + snowman)
+ self.assertEqual(type(x), type(''))
+ self.assertEqual(x, "some")
+ self.assertEqual(lo.read(), " data " + snowman)
@slow
def test_read_large(self):
@@ -207,7 +207,7 @@ class LargeObjectTests(LargeObjectTestCase):
data1 = lo.read()
# avoid dumping megacraps in the console in case of error
self.assert_(data == data1,
- "%r... != %r..." % (data[:100], data1[:100]))
+ f"{data[:100]!r}... != {data1[:100]!r}...")
def test_seek_tell(self):
lo = self.conn.lobject()
diff --git a/tests/test_module.py b/tests/test_module.py
index c492bbd..5b47ba2 100755
--- a/tests/test_module.py
+++ b/tests/test_module.py
@@ -130,7 +130,7 @@ class ConnectTestCase(unittest.TestCase):
def test_int_port_param(self):
psycopg2.connect(database='sony', port=6543)
- dsn = " %s " % self.args[0]
+ dsn = f" {self.args[0]} "
self.assert_(" dbname=sony " in dsn, dsn)
self.assert_(" port=6543 " in dsn, dsn)
@@ -338,12 +338,12 @@ class TestExtensionModule(unittest.TestCase):
pkgdir = os.path.dirname(psycopg2.__file__)
pardir = os.path.dirname(pkgdir)
self.assert_(pardir in sys.path)
- script = ("""
+ script = f"""
import sys
-sys.path.remove(%r)
-sys.path.insert(0, %r)
+sys.path.remove({pardir!r})
+sys.path.insert(0, {pkgdir!r})
import _psycopg
-""" % (pardir, pkgdir))
+"""
proc = Popen([sys.executable, '-c', script])
proc.communicate()
diff --git a/tests/test_notify.py b/tests/test_notify.py
index 89a6060..a3586f8 100755
--- a/tests/test_notify.py
+++ b/tests/test_notify.py
@@ -56,23 +56,23 @@ class NotifiesTests(ConnectingTestCase):
if payload is None:
payload = ''
else:
- payload = ", %r" % payload
+ payload = f", {payload!r}"
script = ("""\
import time
-time.sleep(%(sec)s)
-import %(module)s as psycopg2
-import %(module)s.extensions as ext
-conn = psycopg2.connect(%(dsn)r)
+time.sleep({sec})
+import {module} as psycopg2
+import {module}.extensions as ext
+conn = psycopg2.connect({dsn!r})
conn.set_isolation_level(ext.ISOLATION_LEVEL_AUTOCOMMIT)
print(conn.info.backend_pid)
curs = conn.cursor()
-curs.execute("NOTIFY " %(name)r %(payload)r)
+curs.execute("NOTIFY " {name!r} {payload!r})
curs.close()
conn.close()
-""" % {
- 'module': psycopg2.__name__,
- 'dsn': dsn, 'sec': sec, 'name': name, 'payload': payload})
+""".format(
+ module=psycopg2.__name__,
+ dsn=dsn, sec=sec, name=name, payload=payload))
return Popen([sys.executable, '-c', script], stdout=PIPE)
diff --git a/tests/test_quote.py b/tests/test_quote.py
index dfe3219..98ec494 100755
--- a/tests/test_quote.py
+++ b/tests/test_quote.py
@@ -25,7 +25,7 @@
from . import testutils
import unittest
-from .testutils import ConnectingTestCase, skip_if_crdb, unichr, PY2
+from .testutils import ConnectingTestCase, skip_if_crdb
import psycopg2
import psycopg2.extensions
@@ -79,17 +79,11 @@ class QuotingTestCase(ConnectingTestCase):
data = b"""some data with \000\013 binary
stuff into, 'quotes' and \\ a backslash too.
"""
- if PY2:
- data += "".join(map(chr, range(256)))
- else:
- data += bytes(list(range(256)))
+ data += bytes(list(range(256)))
curs = self.conn.cursor()
curs.execute("SELECT %s::bytea;", (psycopg2.Binary(data),))
- if PY2:
- res = str(curs.fetchone()[0])
- else:
- res = curs.fetchone()[0].tobytes()
+ res = curs.fetchone()[0].tobytes()
if res[0] in (b'x', ord(b'x')) and self.conn.info.server_version >= 90000:
return self.skipTest(
@@ -104,13 +98,12 @@ class QuotingTestCase(ConnectingTestCase):
server_encoding = curs.fetchone()[0]
if server_encoding != "UTF8":
return self.skipTest(
- "Unicode test skipped since server encoding is %s"
- % server_encoding)
+ f"Unicode test skipped since server encoding is {server_encoding}")
- data = u"""some data with \t chars
+ data = """some data with \t chars
to escape into, 'quotes', \u20ac euro sign and \\ a backslash too.
"""
- data += u"".join(map(unichr, [u for u in range(1, 65536)
+ data += "".join(map(chr, [u for u in range(1, 65536)
if not 0xD800 <= u <= 0xDFFF])) # surrogate area
self.conn.set_client_encoding('UNICODE')
@@ -125,11 +118,8 @@ class QuotingTestCase(ConnectingTestCase):
def test_latin1(self):
self.conn.set_client_encoding('LATIN1')
curs = self.conn.cursor()
- if PY2:
- data = ''.join(map(chr, range(32, 127) + range(160, 256)))
- else:
- data = bytes(list(range(32, 127))
- + list(range(160, 256))).decode('latin1')
+ data = bytes(list(range(32, 127))
+ + list(range(160, 256))).decode('latin1')
# as string
curs.execute("SELECT %s::text;", (data,))
@@ -137,25 +127,13 @@ class QuotingTestCase(ConnectingTestCase):
self.assertEqual(res, data)
self.assert_(not self.conn.notices)
- # as unicode
- if PY2:
- psycopg2.extensions.register_type(psycopg2.extensions.UNICODE, self.conn)
- data = data.decode('latin1')
-
- curs.execute("SELECT %s::text;", (data,))
- res = curs.fetchone()[0]
- self.assertEqual(res, data)
- self.assert_(not self.conn.notices)
@skip_if_crdb("encoding")
def test_koi8(self):
self.conn.set_client_encoding('KOI8')
curs = self.conn.cursor()
- if PY2:
- data = ''.join(map(chr, range(32, 127) + range(128, 256)))
- else:
- data = bytes(list(range(32, 127))
- + list(range(128, 256))).decode('koi8_r')
+ data = bytes(list(range(32, 127))
+ + list(range(128, 256))).decode('koi8_r')
# as string
curs.execute("SELECT %s::text;", (data,))
@@ -163,18 +141,8 @@ class QuotingTestCase(ConnectingTestCase):
self.assertEqual(res, data)
self.assert_(not self.conn.notices)
- # as unicode
- if PY2:
- psycopg2.extensions.register_type(psycopg2.extensions.UNICODE, self.conn)
- data = data.decode('koi8_r')
-
- curs.execute("SELECT %s::text;", (data,))
- res = curs.fetchone()[0]
- self.assertEqual(res, data)
- self.assert_(not self.conn.notices)
-
def test_bytes(self):
- snowman = u"\u2603"
+ snowman = "\u2603"
conn = self.connect()
conn.set_client_encoding('UNICODE')
psycopg2.extensions.register_type(psycopg2.extensions.BYTES, conn)
@@ -202,12 +170,9 @@ class TestQuotedIdentifier(ConnectingTestCase):
@testutils.skip_before_postgres(8, 0)
def test_unicode_ident(self):
- snowman = u"\u2603"
+ snowman = "\u2603"
quoted = '"' + snowman + '"'
- if PY2:
- self.assertEqual(quote_ident(snowman, self.conn), quoted.encode('utf8'))
- else:
- self.assertEqual(quote_ident(snowman, self.conn), quoted)
+ self.assertEqual(quote_ident(snowman, self.conn), quoted)
class TestStringAdapter(ConnectingTestCase):
@@ -223,7 +188,7 @@ class TestStringAdapter(ConnectingTestCase):
# self.assertEqual(adapt(egrave).getquoted(), "'\xe8'")
def test_encoding_error(self):
- snowman = u"\u2603"
+ snowman = "\u2603"
a = adapt(snowman)
self.assertRaises(UnicodeEncodeError, a.getquoted)
@@ -231,14 +196,14 @@ class TestStringAdapter(ConnectingTestCase):
# Note: this works-ish mostly in case when the standard db connection
# we test with is utf8, otherwise the encoding chosen by PQescapeString
# may give bad results.
- snowman = u"\u2603"
+ snowman = "\u2603"
a = adapt(snowman)
a.encoding = 'utf8'
self.assertEqual(a.encoding, 'utf8')
self.assertEqual(a.getquoted(), b"'\xe2\x98\x83'")
def test_connection_wins_anyway(self):
- snowman = u"\u2603"
+ snowman = "\u2603"
a = adapt(snowman)
a.encoding = 'latin9'
@@ -248,9 +213,8 @@ class TestStringAdapter(ConnectingTestCase):
self.assertEqual(a.encoding, 'utf_8')
self.assertQuotedEqual(a.getquoted(), b"'\xe2\x98\x83'")
- @testutils.skip_before_python(3)
def test_adapt_bytes(self):
- snowman = u"\u2603"
+ snowman = "\u2603"
self.conn.set_client_encoding('utf8')
a = psycopg2.extensions.QuotedString(snowman.encode('utf8'))
a.prepare(self.conn)
diff --git a/tests/test_replication.py b/tests/test_replication.py
index 3ed68a5..569ed39 100755
--- a/tests/test_replication.py
+++ b/tests/test_replication.py
@@ -41,13 +41,13 @@ skip_repl_if_green = skip_if_green("replication not supported in green mode")
class ReplicationTestCase(ConnectingTestCase):
def setUp(self):
- super(ReplicationTestCase, self).setUp()
+ super().setUp()
self.slot = testconfig.repl_slot
self._slots = []
def tearDown(self):
# first close all connections, as they might keep the slot(s) active
- super(ReplicationTestCase, self).tearDown()
+ super().tearDown()
time.sleep(0.025) # sometimes the slot is still active, wait a little
@@ -244,9 +244,9 @@ class AsyncReplicationTest(ReplicationTestCase):
def consume(msg):
# just check the methods
- "%s: %s" % (cur.io_timestamp, repr(msg))
- "%s: %s" % (cur.feedback_timestamp, repr(msg))
- "%s: %s" % (cur.wal_end, repr(msg))
+ f"{cur.io_timestamp}: {repr(msg)}"
+ f"{cur.feedback_timestamp}: {repr(msg)}"
+ f"{cur.wal_end}: {repr(msg)}"
self.msg_count += 1
if self.msg_count > 3:
diff --git a/tests/test_sql.py b/tests/test_sql.py
index 7818ee8..2d96208 100755
--- a/tests/test_sql.py
+++ b/tests/test_sql.py
@@ -31,7 +31,6 @@ from .testutils import (
import psycopg2
from psycopg2 import sql
-from psycopg2.compat import text_type
class SqlFormatTests(ConnectingTestCase):
@@ -62,13 +61,6 @@ class SqlFormatTests(ConnectingTestCase):
self.assert_(isinstance(s1, str))
self.assertEqual(s1, 'select "field" from "table"')
- def test_unicode(self):
- s = sql.SQL(u"select {0} from {1}").format(
- sql.Identifier(u'field'), sql.Identifier('table'))
- s1 = s.as_string(self.conn)
- self.assert_(isinstance(s1, text_type))
- self.assertEqual(s1, u'select "field" from "table"')
-
def test_compose_literal(self):
s = sql.SQL("select {0};").format(sql.Literal(dt.date(2016, 12, 31)))
s1 = s.as_string(self.conn)
@@ -111,7 +103,7 @@ class SqlFormatTests(ConnectingTestCase):
self.assertRaises(ValueError, sql.SQL("select {a:<};").format, a=10)
def test_must_be_adaptable(self):
- class Foo(object):
+ class Foo:
pass
self.assertRaises(psycopg2.ProgrammingError,
@@ -182,7 +174,7 @@ class IdentifierTests(ConnectingTestCase):
def test_init(self):
self.assert_(isinstance(sql.Identifier('foo'), sql.Identifier))
- self.assert_(isinstance(sql.Identifier(u'foo'), sql.Identifier))
+ self.assert_(isinstance(sql.Identifier('foo'), sql.Identifier))
self.assert_(isinstance(sql.Identifier('foo', 'bar', 'baz'), sql.Identifier))
self.assertRaises(TypeError, sql.Identifier)
self.assertRaises(TypeError, sql.Identifier, 10)
@@ -231,7 +223,7 @@ class LiteralTests(ConnectingTestCase):
def test_init(self):
self.assert_(isinstance(sql.Literal('foo'), sql.Literal))
- self.assert_(isinstance(sql.Literal(u'foo'), sql.Literal))
+ self.assert_(isinstance(sql.Literal('foo'), sql.Literal))
self.assert_(isinstance(sql.Literal(b'foo'), sql.Literal))
self.assert_(isinstance(sql.Literal(42), sql.Literal))
self.assert_(isinstance(
@@ -256,7 +248,7 @@ class LiteralTests(ConnectingTestCase):
self.assert_(sql.Literal('foo') != sql.SQL('foo'))
def test_must_be_adaptable(self):
- class Foo(object):
+ class Foo:
pass
self.assertRaises(psycopg2.ProgrammingError,
@@ -269,7 +261,7 @@ class SQLTests(ConnectingTestCase):
def test_init(self):
self.assert_(isinstance(sql.SQL('foo'), sql.SQL))
- self.assert_(isinstance(sql.SQL(u'foo'), sql.SQL))
+ self.assert_(isinstance(sql.SQL('foo'), sql.SQL))
self.assertRaises(TypeError, sql.SQL, 10)
self.assertRaises(TypeError, sql.SQL, dt.date(2016, 12, 31))
diff --git a/tests/test_types_basic.py b/tests/test_types_basic.py
index efdff73..e5ee552 100755
--- a/tests/test_types_basic.py
+++ b/tests/test_types_basic.py
@@ -31,7 +31,7 @@ import platform
from . import testutils
import unittest
-from .testutils import PY2, long, text_type, ConnectingTestCase, restore_types
+from .testutils import ConnectingTestCase, restore_types
from .testutils import skip_if_crdb
import psycopg2
@@ -52,15 +52,13 @@ class TypesBasicTests(ConnectingTestCase):
"wrong quoting: " + s)
def testUnicode(self):
- s = u"Quote'this\\! ''ok?''"
+ s = "Quote'this\\! ''ok?''"
self.failUnless(self.execute("SELECT %s AS foo", (s,)) == s,
"wrong unicode quoting: " + s)
def testNumber(self):
s = self.execute("SELECT %s AS foo", (1971,))
self.failUnless(s == 1971, "wrong integer quoting: " + str(s))
- s = self.execute("SELECT %s AS foo", (long(1971),))
- self.failUnless(s == long(1971), "wrong integer quoting: " + str(s))
def testBoolean(self):
x = self.execute("SELECT %s as foo", (False,))
@@ -110,16 +108,10 @@ class TypesBasicTests(ConnectingTestCase):
self.failUnless(str(s) == "-inf", "wrong float quoting: " + str(s))
def testBinary(self):
- if PY2:
- s = ''.join([chr(x) for x in range(256)])
- b = psycopg2.Binary(s)
- buf = self.execute("SELECT %s::bytea AS foo", (b,))
- self.assertEqual(s, str(buf))
- else:
- s = bytes(range(256))
- b = psycopg2.Binary(s)
- buf = self.execute("SELECT %s::bytea AS foo", (b,))
- self.assertEqual(s, buf.tobytes())
+ s = bytes(range(256))
+ b = psycopg2.Binary(s)
+ buf = self.execute("SELECT %s::bytea AS foo", (b,))
+ self.assertEqual(s, buf.tobytes())
def testBinaryNone(self):
b = psycopg2.Binary(None)
@@ -128,26 +120,16 @@ class TypesBasicTests(ConnectingTestCase):
def testBinaryEmptyString(self):
# test to make sure an empty Binary is converted to an empty string
- if PY2:
- b = psycopg2.Binary('')
- self.assertEqual(str(b), "''::bytea")
- else:
- b = psycopg2.Binary(bytes([]))
- self.assertEqual(str(b), "''::bytea")
+ b = psycopg2.Binary(bytes([]))
+ self.assertEqual(str(b), "''::bytea")
def testBinaryRoundTrip(self):
# test to make sure buffers returned by psycopg2 are
# understood by execute:
- if PY2:
- s = ''.join([chr(x) for x in range(256)])
- buf = self.execute("SELECT %s::bytea AS foo", (psycopg2.Binary(s),))
- buf2 = self.execute("SELECT %s::bytea AS foo", (buf,))
- self.assertEqual(s, str(buf2))
- else:
- s = bytes(range(256))
- buf = self.execute("SELECT %s::bytea AS foo", (psycopg2.Binary(s),))
- buf2 = self.execute("SELECT %s::bytea AS foo", (buf,))
- self.assertEqual(s, buf2.tobytes())
+ s = bytes(range(256))
+ buf = self.execute("SELECT %s::bytea AS foo", (psycopg2.Binary(s),))
+ buf2 = self.execute("SELECT %s::bytea AS foo", (buf,))
+ self.assertEqual(s, buf2.tobytes())
@skip_if_crdb("nested array")
def testArray(self):
@@ -227,8 +209,8 @@ class TypesBasicTests(ConnectingTestCase):
curs = self.conn.cursor()
curs.execute("select '{a,b,c}'::text[]")
x = curs.fetchone()[0]
- self.assert_(isinstance(x[0], text_type))
- self.assertEqual(x, [u'a', u'b', u'c'])
+ self.assert_(isinstance(x[0], str))
+ self.assertEqual(x, ['a', 'b', 'c'])
def testBytesArray(self):
psycopg2.extensions.register_type(
@@ -291,27 +273,6 @@ class TypesBasicTests(ConnectingTestCase):
curs.execute("select %s::int[]", (a,))
self.assertEqual(curs.fetchone()[0], a)
- @testutils.skip_from_python(3)
- def testTypeRoundtripBuffer(self):
- o1 = buffer("".join(map(chr, range(256))))
- o2 = self.execute("select %s;", (o1,))
- self.assertEqual(type(o1), type(o2))
-
- # Test with an empty buffer
- o1 = buffer("")
- o2 = self.execute("select %s;", (o1,))
- self.assertEqual(type(o1), type(o2))
- self.assertEqual(str(o1), str(o2))
-
- @testutils.skip_from_python(3)
- def testTypeRoundtripBufferArray(self):
- o1 = buffer("".join(map(chr, range(256))))
- o1 = [o1]
- o2 = self.execute("select %s;", (o1,))
- self.assertEqual(type(o1[0]), type(o2[0]))
- self.assertEqual(str(o1[0]), str(o2[0]))
-
- @testutils.skip_before_python(3)
def testTypeRoundtripBytes(self):
o1 = bytes(range(256))
o2 = self.execute("select %s;", (o1,))
@@ -322,7 +283,6 @@ class TypesBasicTests(ConnectingTestCase):
o2 = self.execute("select %s;", (o1,))
self.assertEqual(memoryview, type(o2))
- @testutils.skip_before_python(3)
def testTypeRoundtripBytesArray(self):
o1 = bytes(range(256))
o1 = [o1]
@@ -332,12 +292,7 @@ class TypesBasicTests(ConnectingTestCase):
def testAdaptBytearray(self):
o1 = bytearray(range(256))
o2 = self.execute("select %s;", (o1,))
-
- if PY2:
- self.assertEqual(buffer, type(o2))
- else:
- self.assertEqual(memoryview, type(o2))
-
+ self.assertEqual(memoryview, type(o2))
self.assertEqual(len(o1), len(o2))
for c1, c2 in zip(o1, o2):
self.assertEqual(c1, ord(c2))
@@ -345,28 +300,18 @@ class TypesBasicTests(ConnectingTestCase):
# Test with an empty buffer
o1 = bytearray([])
o2 = self.execute("select %s;", (o1,))
-
self.assertEqual(len(o2), 0)
- if PY2:
- self.assertEqual(buffer, type(o2))
- else:
- self.assertEqual(memoryview, type(o2))
+ self.assertEqual(memoryview, type(o2))
def testAdaptMemoryview(self):
o1 = memoryview(bytearray(range(256)))
o2 = self.execute("select %s;", (o1,))
- if PY2:
- self.assertEqual(buffer, type(o2))
- else:
- self.assertEqual(memoryview, type(o2))
+ self.assertEqual(memoryview, type(o2))
# Test with an empty buffer
o1 = memoryview(bytearray([]))
o2 = self.execute("select %s;", (o1,))
- if PY2:
- self.assertEqual(buffer, type(o2))
- else:
- self.assertEqual(memoryview, type(o2))
+ self.assertEqual(memoryview, type(o2))
def testByteaHexCheckFalsePositive(self):
# the check \x -> x to detect bad bytea decode
@@ -382,8 +327,6 @@ class TypesBasicTests(ConnectingTestCase):
self.assertEqual(1, f1)
i1 = self.execute("select -%s;", (-1,))
self.assertEqual(1, i1)
- l1 = self.execute("select -%s;", (long(-1),))
- self.assertEqual(1, l1)
def testGenericArray(self):
a = self.execute("select '{1, 2, 3}'::int4[]")
@@ -417,7 +360,6 @@ class TypesBasicTests(ConnectingTestCase):
a = self.execute("select '{10:20:30:40:50:60}'::macaddr[]")
self.assertEqual(a, ['10:20:30:40:50:60'])
- @testutils.skip_before_python(3, 4)
def testIntEnum(self):
from enum import IntEnum
@@ -440,7 +382,7 @@ class AdaptSubclassTest(unittest.TestCase):
@restore_types
def test_adapt_most_specific(self):
- class A(object):
+ class A:
pass
class B(A):
@@ -453,19 +395,6 @@ class AdaptSubclassTest(unittest.TestCase):
register_adapter(B, lambda b: AsIs("b"))
self.assertEqual(b'b', adapt(C()).getquoted())
- @testutils.skip_from_python(3)
- @restore_types
- def test_no_mro_no_joy(self):
- class A:
- pass
-
- class B(A):
- pass
-
- register_adapter(A, lambda a: AsIs("a"))
- self.assertRaises(psycopg2.ProgrammingError, adapt, B())
-
- @testutils.skip_before_python(3)
@restore_types
def test_adapt_subtype_3(self):
class A:
@@ -512,10 +441,7 @@ class ByteaParserTest(unittest.TestCase):
if rv is None:
return None
- if PY2:
- return str(rv)
- else:
- return rv.tobytes()
+ return rv.tobytes()
def test_null(self):
rv = self.cast(None)
@@ -536,10 +462,7 @@ class ByteaParserTest(unittest.TestCase):
buf = buf.upper()
buf = '\\x' + buf
rv = self.cast(buf.encode('utf8'))
- if PY2:
- self.assertEqual(rv, ''.join(map(chr, range(256))))
- else:
- self.assertEqual(rv, bytes(range(256)))
+ self.assertEqual(rv, bytes(range(256)))
def test_full_hex_upper(self):
return self.test_full_hex(upper=True)
@@ -547,10 +470,7 @@ class ByteaParserTest(unittest.TestCase):
def test_full_escaped_octal(self):
buf = ''.join(("\\%03o" % i) for i in range(256))
rv = self.cast(buf.encode('utf8'))
- if PY2:
- self.assertEqual(rv, ''.join(map(chr, range(256))))
- else:
- self.assertEqual(rv, bytes(range(256)))
+ self.assertEqual(rv, bytes(range(256)))
def test_escaped_mixed(self):
buf = ''.join(("\\%03o" % i) for i in range(32))
@@ -558,12 +478,8 @@ class ByteaParserTest(unittest.TestCase):
buf += ''.join('\\' + c for c in string.ascii_letters)
buf += '\\\\'
rv = self.cast(buf.encode('utf8'))
- if PY2:
- tgt = ''.join(map(chr, range(32))) \
- + string.ascii_letters * 2 + '\\'
- else:
- tgt = bytes(range(32)) + \
- (string.ascii_letters * 2 + '\\').encode('ascii')
+ tgt = bytes(range(32)) + \
+ (string.ascii_letters * 2 + '\\').encode('ascii')
self.assertEqual(rv, tgt)
diff --git a/tests/test_types_extras.py b/tests/test_types_extras.py
index b3819c3..c653020 100755
--- a/tests/test_types_extras.py
+++ b/tests/test_types_extras.py
@@ -25,8 +25,8 @@ from functools import wraps
from pickle import dumps, loads
import unittest
-from .testutils import (PY2, text_type, skip_if_no_uuid, skip_before_postgres,
- ConnectingTestCase, py3_raises_typeerror, slow, skip_from_python,
+from .testutils import (skip_if_no_uuid, skip_before_postgres,
+ ConnectingTestCase, raises_typeerror, slow,
restore_types, skip_if_crdb, crdb_version)
import psycopg2
@@ -110,13 +110,13 @@ class TypesExtrasTests(ConnectingTestCase):
self.assertQuotedEqual(a.getquoted(), b"'192.168.1.0/24'::inet")
# adapts ok with unicode too
- i = Inet(u"192.168.1.0/24")
+ i = Inet("192.168.1.0/24")
a = psycopg2.extensions.adapt(i)
a.prepare(self.conn)
self.assertQuotedEqual(a.getquoted(), b"'192.168.1.0/24'::inet")
def test_adapt_fail(self):
- class Foo(object):
+ class Foo:
pass
self.assertRaises(psycopg2.ProgrammingError,
psycopg2.extensions.adapt, Foo(), ext.ISQLQuote, None)
@@ -151,7 +151,7 @@ class HstoreTestCase(ConnectingTestCase):
o = {'a': '1', 'b': "'", 'c': None}
if self.conn.encoding == 'UTF8':
- o['d'] = u'\xe0'
+ o['d'] = '\xe0'
a = HstoreAdapter(o)
a.prepare(self.conn)
@@ -166,7 +166,7 @@ class HstoreTestCase(ConnectingTestCase):
self.assertQuotedEqual(ii[1], b"('b' => '''')")
self.assertQuotedEqual(ii[2], b"('c' => NULL)")
if 'd' in o:
- encc = u'\xe0'.encode(psycopg2.extensions.encodings[self.conn.encoding])
+ encc = '\xe0'.encode(psycopg2.extensions.encodings[self.conn.encoding])
self.assertQuotedEqual(ii[3], b"('d' => '" + encc + b"')")
def test_adapt_9(self):
@@ -175,7 +175,7 @@ class HstoreTestCase(ConnectingTestCase):
o = {'a': '1', 'b': "'", 'c': None}
if self.conn.encoding == 'UTF8':
- o['d'] = u'\xe0'
+ o['d'] = '\xe0'
a = HstoreAdapter(o)
a.prepare(self.conn)
@@ -197,7 +197,7 @@ class HstoreTestCase(ConnectingTestCase):
self.assertQuotedEqual(ii[2][0], b"'c'")
self.assertQuotedEqual(ii[2][1], b"NULL")
if 'd' in o:
- encc = u'\xe0'.encode(psycopg2.extensions.encodings[self.conn.encoding])
+ encc = '\xe0'.encode(psycopg2.extensions.encodings[self.conn.encoding])
self.assertQuotedEqual(ii[3][0], b"'d'")
self.assertQuotedEqual(ii[3][1], b"'" + encc + b"'")
@@ -250,19 +250,6 @@ class HstoreTestCase(ConnectingTestCase):
self.assertEqual(t[2], {'a': 'b'})
@skip_if_no_hstore
- @skip_from_python(3)
- def test_register_unicode(self):
- register_hstore(self.conn, unicode=True)
- cur = self.conn.cursor()
- cur.execute("select null::hstore, ''::hstore, 'a => b'::hstore")
- t = cur.fetchone()
- self.assert_(t[0] is None)
- self.assertEqual(t[1], {})
- self.assertEqual(t[2], {u'a': u'b'})
- self.assert_(isinstance(t[2].keys()[0], unicode))
- self.assert_(isinstance(t[2].values()[0], unicode))
-
- @skip_if_no_hstore
@restore_types
def test_register_globally(self):
HstoreAdapter.get_oids(self.conn)
@@ -297,38 +284,12 @@ class HstoreTestCase(ConnectingTestCase):
ok({''.join(ab): ''.join(ab)})
self.conn.set_client_encoding('latin1')
- if PY2:
- ab = map(chr, range(32, 127) + range(160, 255))
- else:
- ab = bytes(list(range(32, 127)) + list(range(160, 255))).decode('latin1')
+ ab = bytes(list(range(32, 127)) + list(range(160, 255))).decode('latin1')
ok({''.join(ab): ''.join(ab)})
ok(dict(zip(ab, ab)))
@skip_if_no_hstore
- @skip_from_python(3)
- def test_roundtrip_unicode(self):
- register_hstore(self.conn, unicode=True)
- cur = self.conn.cursor()
-
- def ok(d):
- cur.execute("select %s", (d,))
- d1 = cur.fetchone()[0]
- self.assertEqual(len(d), len(d1))
- for k, v in d1.iteritems():
- self.assert_(k in d, k)
- self.assertEqual(d[k], v)
- self.assert_(isinstance(k, unicode))
- self.assert_(v is None or isinstance(v, unicode))
-
- ok({})
- ok({'a': 'b', 'c': None, 'd': u'\u20ac', u'\u2603': 'e'})
-
- ab = map(unichr, range(1, 1024))
- ok({u''.join(ab): u''.join(ab)})
- ok(dict(zip(ab, ab)))
-
- @skip_if_no_hstore
@restore_types
def test_oid(self):
cur = self.conn.cursor()
@@ -356,10 +317,7 @@ class HstoreTestCase(ConnectingTestCase):
ds.append({''.join(ab): ''.join(ab)})
self.conn.set_client_encoding('latin1')
- if PY2:
- ab = map(chr, range(32, 127) + range(160, 255))
- else:
- ab = bytes(list(range(32, 127)) + list(range(160, 255))).decode('latin1')
+ ab = bytes(list(range(32, 127)) + list(range(160, 255))).decode('latin1')
ds.append({''.join(ab): ''.join(ab)})
ds.append(dict(zip(ab, ab)))
@@ -443,7 +401,7 @@ class AdaptTypeTestCase(ConnectingTestCase):
def test_none_fast_path(self):
# the None adapter is not actually invoked in regular adaptation
- class WonkyAdapter(object):
+ class WonkyAdapter:
def __init__(self, obj):
pass
@@ -753,11 +711,11 @@ class AdaptTypeTestCase(ConnectingTestCase):
def _create_type(self, name, fields):
curs = self.conn.cursor()
try:
- curs.execute("drop type %s cascade;" % name)
+ curs.execute(f"drop type {name} cascade;")
except psycopg2.ProgrammingError:
self.conn.rollback()
- curs.execute("create type %s as (%s);" % (name,
+ curs.execute("create type {} as ({});".format(name,
", ".join(["%s %s" % p for p in fields])))
if '.' in name:
schema, name = name.split('.')
@@ -792,7 +750,7 @@ def skip_if_no_json_type(f):
class JsonTestCase(ConnectingTestCase):
def test_adapt(self):
objs = [None, "te'xt", 123, 123.45,
- u'\xe0\u20ac', ['a', 100], {'a': 100}]
+ '\xe0\u20ac', ['a', 100], {'a': 100}]
curs = self.conn.cursor()
for obj in enumerate(objs):
@@ -947,7 +905,7 @@ class JsonTestCase(ConnectingTestCase):
self.assertEqual(data['b'], None)
def test_str(self):
- snowman = u"\u2603"
+ snowman = "\u2603"
obj = {'a': [1, 2, snowman]}
j = psycopg2.extensions.adapt(psycopg2.extras.Json(obj))
s = str(j)
@@ -1238,9 +1196,9 @@ class RangeTestCase(unittest.TestCase):
self.assert_(not Range() < Range())
self.assert_(not Range(empty=True) < Range(empty=True))
self.assert_(not Range(1, 2) < Range(1, 2))
- with py3_raises_typeerror():
+ with raises_typeerror():
self.assert_(1 < Range(1, 2))
- with py3_raises_typeerror():
+ with raises_typeerror():
self.assert_(not Range(1, 2) < 1)
def test_gt_ordering(self):
@@ -1253,9 +1211,9 @@ class RangeTestCase(unittest.TestCase):
self.assert_(not Range() > Range())
self.assert_(not Range(empty=True) > Range(empty=True))
self.assert_(not Range(1, 2) > Range(1, 2))
- with py3_raises_typeerror():
+ with raises_typeerror():
self.assert_(not 1 > Range(1, 2))
- with py3_raises_typeerror():
+ with raises_typeerror():
self.assert_(Range(1, 2) > 1)
def test_le_ordering(self):
@@ -1268,9 +1226,9 @@ class RangeTestCase(unittest.TestCase):
self.assert_(Range() <= Range())
self.assert_(Range(empty=True) <= Range(empty=True))
self.assert_(Range(1, 2) <= Range(1, 2))
- with py3_raises_typeerror():
+ with raises_typeerror():
self.assert_(1 <= Range(1, 2))
- with py3_raises_typeerror():
+ with raises_typeerror():
self.assert_(not Range(1, 2) <= 1)
def test_ge_ordering(self):
@@ -1283,9 +1241,9 @@ class RangeTestCase(unittest.TestCase):
self.assert_(Range() >= Range())
self.assert_(Range(empty=True) >= Range(empty=True))
self.assert_(Range(1, 2) >= Range(1, 2))
- with py3_raises_typeerror():
+ with raises_typeerror():
self.assert_(not 1 >= Range(1, 2))
- with py3_raises_typeerror():
+ with raises_typeerror():
self.assert_(Range(1, 2) >= 1)
def test_pickling(self):
@@ -1303,20 +1261,20 @@ class RangeTestCase(unittest.TestCase):
# Using the "u" prefix to make sure we have the proper return types in
# Python2
expected = [
- u'(0, 4)',
- u'[0, 4]',
- u'(0, 4]',
- u'[0, 4)',
- u'empty',
+ '(0, 4)',
+ '[0, 4]',
+ '(0, 4]',
+ '[0, 4)',
+ 'empty',
]
results = []
for bounds in ('()', '[]', '(]', '[)'):
r = Range(0, 4, bounds=bounds)
- results.append(text_type(r))
+ results.append(str(r))
r = Range(empty=True)
- results.append(text_type(r))
+ results.append(str(r))
self.assertEqual(results, expected)
def test_str_datetime(self):
@@ -1327,8 +1285,8 @@ class RangeTestCase(unittest.TestCase):
tz = FixedOffsetTimezone(-5 * 60, "EST")
r = DateTimeTZRange(datetime(2010, 1, 1, tzinfo=tz),
datetime(2011, 1, 1, tzinfo=tz))
- expected = u'[2010-01-01 00:00:00-05:00, 2011-01-01 00:00:00-05:00)'
- result = text_type(r)
+ expected = '[2010-01-01 00:00:00-05:00, 2011-01-01 00:00:00-05:00)'
+ result = str(r)
self.assertEqual(result, expected)
@@ -1342,14 +1300,14 @@ class RangeCasterTestCase(ConnectingTestCase):
def test_cast_null(self):
cur = self.conn.cursor()
for type in self.builtin_ranges:
- cur.execute("select NULL::%s" % type)
+ cur.execute(f"select NULL::{type}")
r = cur.fetchone()[0]
self.assertEqual(r, None)
def test_cast_empty(self):
cur = self.conn.cursor()
for type in self.builtin_ranges:
- cur.execute("select 'empty'::%s" % type)
+ cur.execute(f"select 'empty'::{type}")
r = cur.fetchone()[0]
self.assert_(isinstance(r, Range), type)
self.assert_(r.isempty)
@@ -1357,7 +1315,7 @@ class RangeCasterTestCase(ConnectingTestCase):
def test_cast_inf(self):
cur = self.conn.cursor()
for type in self.builtin_ranges:
- cur.execute("select '(,)'::%s" % type)
+ cur.execute(f"select '(,)'::{type}")
r = cur.fetchone()[0]
self.assert_(isinstance(r, Range), type)
self.assert_(not r.isempty)
@@ -1367,7 +1325,7 @@ class RangeCasterTestCase(ConnectingTestCase):
def test_cast_numbers(self):
cur = self.conn.cursor()
for type in ('int4range', 'int8range'):
- cur.execute("select '(10,20)'::%s" % type)
+ cur.execute(f"select '(10,20)'::{type}")
r = cur.fetchone()[0]
self.assert_(isinstance(r, NumericRange))
self.assert_(not r.isempty)
diff --git a/tests/test_with.py b/tests/test_with.py
index 9e501f2..984602b 100755
--- a/tests/test_with.py
+++ b/tests/test_with.py
@@ -117,7 +117,7 @@ class WithConnectionTestCase(WithTestCase):
class MyConn(ext.connection):
def commit(self):
commits.append(None)
- super(MyConn, self).commit()
+ super().commit()
with self.connect(connection_factory=MyConn) as conn:
curs = conn.cursor()
@@ -136,7 +136,7 @@ class WithConnectionTestCase(WithTestCase):
class MyConn(ext.connection):
def rollback(self):
rollbacks.append(None)
- super(MyConn, self).rollback()
+ super().rollback()
try:
with self.connect(connection_factory=MyConn) as conn:
@@ -195,7 +195,7 @@ class WithCursorTestCase(WithTestCase):
class MyCurs(ext.cursor):
def close(self):
closes.append(None)
- super(MyCurs, self).close()
+ super().close()
with self.conn.cursor(cursor_factory=MyCurs) as curs:
self.assert_(isinstance(curs, MyCurs))
diff --git a/tests/testconfig.py b/tests/testconfig.py
index 511f1fb..ed6132a 100644
--- a/tests/testconfig.py
+++ b/tests/testconfig.py
@@ -23,15 +23,15 @@ if green:
psycopg2.extensions.set_wait_callback(wait_callback)
# Construct a DSN to connect to the test database:
-dsn = 'dbname=%s' % dbname
+dsn = f'dbname={dbname}'
if dbhost is not None:
- dsn += ' host=%s' % dbhost
+ dsn += f' host={dbhost}'
if dbport is not None:
- dsn += ' port=%s' % dbport
+ dsn += f' port={dbport}'
if dbuser is not None:
- dsn += ' user=%s' % dbuser
+ dsn += f' user={dbuser}'
if dbpass is not None:
- dsn += ' password=%s' % dbpass
+ dsn += f' password={dbpass}'
# Don't run replication tests if REPL_DSN is not set, default to normal DSN if
# set to empty string.
diff --git a/tests/testutils.py b/tests/testutils.py
index b53b2dc..f0c3264 100644
--- a/tests/testutils.py
+++ b/tests/testutils.py
@@ -34,32 +34,16 @@ import platform
import unittest
from functools import wraps
from ctypes.util import find_library
+from io import StringIO # noqa
+from io import TextIOBase # noqa
+from importlib import reload # noqa
import psycopg2
import psycopg2.errors
import psycopg2.extensions
-from psycopg2.compat import PY2, PY3, string_types, text_type
from .testconfig import green, dsn, repl_dsn
-# Python 2/3 compatibility
-
-if PY2:
- # Python 2
- from StringIO import StringIO
- TextIOBase = object
- long = long
- reload = reload
- unichr = unichr
-
-else:
- # Python 3
- from io import StringIO # noqa
- from io import TextIOBase # noqa
- from importlib import reload # noqa
- long = int
- unichr = chr
-
# Silence warnings caused by the stubbornness of the Python unittest
# maintainers
@@ -102,7 +86,7 @@ class ConnectingTestCase(unittest.TestCase):
def assertQuotedEqual(self, first, second, msg=None):
"""Compare two quoted strings disregarding eventual E'' quotes"""
def f(s):
- if isinstance(s, text_type):
+ if isinstance(s, str):
return re.sub(r"\bE'", "'", s)
elif isinstance(first, bytes):
return re.sub(br"\bE'", b"'", s)
@@ -116,8 +100,7 @@ class ConnectingTestCase(unittest.TestCase):
self._conns
except AttributeError as e:
raise AttributeError(
- "%s (did you forget to call ConnectingTestCase.setUp()?)"
- % e)
+ f"{e} (did you forget to call ConnectingTestCase.setUp()?)")
if 'dsn' in kwargs:
conninfo = kwargs.pop('dsn')
@@ -150,7 +133,7 @@ class ConnectingTestCase(unittest.TestCase):
# Otherwise we tried to run some bad operation in the connection
# (e.g. bug #482) and we'd rather know that.
if e.pgcode is None:
- return self.skipTest("replication db not configured: %s" % e)
+ return self.skipTest(f"replication db not configured: {e}")
else:
raise
@@ -326,7 +309,7 @@ def skip_before_libpq(*ver):
v = libpq_version()
decorator = unittest.skipIf(
v < int("%d%02d%02d" % ver),
- "skipped because libpq %d" % v,
+ f"skipped because libpq {v}",
)
return decorator(cls)
return skip_before_libpq_
@@ -340,7 +323,7 @@ def skip_after_libpq(*ver):
v = libpq_version()
decorator = unittest.skipIf(
v >= int("%d%02d%02d" % ver),
- "skipped because libpq %s" % v,
+ f"skipped because libpq {v}",
)
return decorator(cls)
return skip_after_libpq_
@@ -351,8 +334,7 @@ def skip_before_python(*ver):
def skip_before_python_(cls):
decorator = unittest.skipIf(
sys.version_info[:len(ver)] < ver,
- "skipped because Python %s"
- % ".".join(map(str, sys.version_info[:len(ver)])),
+ f"skipped because Python {'.'.join(map(str, sys.version_info[:len(ver)]))}",
)
return decorator(cls)
return skip_before_python_
@@ -363,8 +345,7 @@ def skip_from_python(*ver):
def skip_from_python_(cls):
decorator = unittest.skipIf(
sys.version_info[:len(ver)] >= ver,
- "skipped because Python %s"
- % ".".join(map(str, sys.version_info[:len(ver)])),
+ f"skipped because Python {'.'.join(map(str, sys.version_info[:len(ver)]))}",
)
return decorator(cls)
return skip_from_python_
@@ -431,7 +412,7 @@ def crdb_version(conn, __crdb_version=[]):
m = re.search(r"\bv(\d+)\.(\d+)\.(\d+)", sver)
if not m:
raise ValueError(
- "can't parse CockroachDB version from %s" % sver)
+ f"can't parse CockroachDB version from {sver}")
ver = int(m.group(1)) * 10000 + int(m.group(2)) * 100 + int(m.group(3))
__crdb_version.append(ver)
@@ -454,8 +435,8 @@ def skip_if_crdb(reason, conn=None, version=None):
"== 20.1.3": the test will be skipped only if the version matches.
"""
- if not isinstance(reason, string_types):
- raise TypeError("reason should be a string, got %r instead" % reason)
+ if not isinstance(reason, str):
+ raise TypeError(f"reason should be a string, got {reason!r} instead")
if conn is not None:
ver = crdb_version(conn)
@@ -465,7 +446,7 @@ def skip_if_crdb(reason, conn=None, version=None):
"%s (https://github.com/cockroachdb/cockroach/issues/%s)"
% (reason, crdb_reasons[reason]))
raise unittest.SkipTest(
- "not supported on CockroachDB %s: %s" % (ver, reason))
+ f"not supported on CockroachDB {ver}: {reason}")
@decorate_all_tests
def skip_if_crdb_(f):
@@ -519,14 +500,13 @@ def _crdb_match_version(version, pattern):
return op(version, ref)
-class py3_raises_typeerror(object):
+class raises_typeerror:
def __enter__(self):
pass
def __exit__(self, type, exc, tb):
- if PY3:
- assert type is TypeError
- return True
+ assert type is TypeError
+ return True
def slow(f):
diff --git a/tox.ini b/tox.ini
index b8d7d6d..a2ab53c 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,5 +1,5 @@
[tox]
-envlist = py{27,36,37,38,39}
+envlist = py{36,37,38,39}
[testenv]
commands = make check