summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorHugo van Kemenade <hugovk@users.noreply.github.com>2020-11-17 21:37:42 +0200
committerHugo van Kemenade <hugovk@users.noreply.github.com>2020-11-17 22:22:11 +0200
commit7babeccbececd9dd02642dfd193a3c3a0fc0dbe7 (patch)
treefe83142e2763e9d922edbb97b16db6f46bb84a52
parent6c48b63ae4a70da6dbbc5b6eef20806d7f505950 (diff)
downloadpsycopg2-7babeccbececd9dd02642dfd193a3c3a0fc0dbe7.tar.gz
Upgrade Python syntax with pyupgrade --py36-plus
-rw-r--r--doc/src/conf.py9
-rwxr-xr-xdoc/src/tools/lib/dbapi_extension.py1
-rw-r--r--doc/src/tools/lib/sql_role.py1
-rw-r--r--doc/src/tools/lib/ticket_role.py1
-rw-r--r--doc/src/tools/make_sqlstate_docs.py3
-rw-r--r--lib/_json.py2
-rw-r--r--lib/_range.py10
-rw-r--r--lib/extensions.py6
-rw-r--r--lib/extras.py92
-rw-r--r--lib/pool.py2
-rw-r--r--lib/sql.py18
-rw-r--r--lib/tz.py2
-rwxr-xr-xscripts/appveyor.py2
-rwxr-xr-xscripts/make_errorcodes.py7
-rwxr-xr-xscripts/make_errors.py3
-rwxr-xr-xscripts/refcounter.py1
-rw-r--r--setup.py4
-rw-r--r--tests/dbapi20.py2
-rwxr-xr-xtests/test_async.py3
-rwxr-xr-xtests/test_async_keyword.py1
-rwxr-xr-xtests/test_cancel.py1
-rwxr-xr-xtests/test_connection.py28
-rwxr-xr-xtests/test_copy.py16
-rwxr-xr-xtests/test_cursor.py20
-rwxr-xr-xtests/test_dates.py2
-rwxr-xr-xtests/test_errcodes.py2
-rwxr-xr-xtests/test_fast_executemany.py10
-rwxr-xr-xtests/test_green.py2
-rwxr-xr-xtests/test_ipaddress.py1
-rwxr-xr-xtests/test_lobject.py12
-rwxr-xr-xtests/test_module.py6
-rwxr-xr-xtests/test_notify.py16
-rwxr-xr-xtests/test_quote.py16
-rwxr-xr-xtests/test_replication.py10
-rwxr-xr-xtests/test_sql.py10
-rwxr-xr-xtests/test_types_basic.py6
-rwxr-xr-xtests/test_types_extras.py32
-rwxr-xr-xtests/test_with.py6
-rw-r--r--tests/testutils.py4
39 files changed, 179 insertions, 191 deletions
diff --git a/doc/src/conf.py b/doc/src/conf.py
index 1b65c34..94f154e 100644
--- a/doc/src/conf.py
+++ b/doc/src/conf.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
#
# Psycopg documentation build configuration file, created by
# sphinx-quickstart on Sun Feb 7 13:48:41 2010.
@@ -48,9 +47,9 @@ source_suffix = '.rst'
master_doc = 'index'
# General information about the project.
-project = u'Psycopg'
+project = 'Psycopg'
copyright = (
- u'2001-2020, Federico Di Gregorio, Daniele Varrazzo, The Psycopg Team'
+ '2001-2020, Federico Di Gregorio, Daniele Varrazzo, The Psycopg Team'
)
# The version info for the project you're documenting, acts as replacement for
@@ -235,8 +234,8 @@ latex_documents = [
(
'index',
'psycopg.tex',
- u'Psycopg Documentation',
- u'Federico Di Gregorio',
+ 'Psycopg Documentation',
+ 'Federico Di Gregorio',
'manual',
)
]
diff --git a/doc/src/tools/lib/dbapi_extension.py b/doc/src/tools/lib/dbapi_extension.py
index 23d9165..7fc776a 100755
--- a/doc/src/tools/lib/dbapi_extension.py
+++ b/doc/src/tools/lib/dbapi_extension.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
"""
extension
~~~~~~~~~
diff --git a/doc/src/tools/lib/sql_role.py b/doc/src/tools/lib/sql_role.py
index 43347b4..1731546 100644
--- a/doc/src/tools/lib/sql_role.py
+++ b/doc/src/tools/lib/sql_role.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
"""
sql role
~~~~~~~~
diff --git a/doc/src/tools/lib/ticket_role.py b/doc/src/tools/lib/ticket_role.py
index 0ee3d63..8ba87cb 100644
--- a/doc/src/tools/lib/ticket_role.py
+++ b/doc/src/tools/lib/ticket_role.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
"""
ticket role
~~~~~~~~~~~
diff --git a/doc/src/tools/make_sqlstate_docs.py b/doc/src/tools/make_sqlstate_docs.py
index 3e1a026..ce6d2e7 100644
--- a/doc/src/tools/make_sqlstate_docs.py
+++ b/doc/src/tools/make_sqlstate_docs.py
@@ -2,7 +2,6 @@
"""Create the docs table of the sqlstate errors.
"""
-from __future__ import print_function
import re
import sys
@@ -40,7 +39,7 @@ def main():
for l in lines:
cls = l.sqlstate[:2] if l.sqlstate else None
if cls and cls != sqlclass:
- print("**Class %s**: %s" % (cls, sqlclasses[cls]))
+ print("**Class {}**: {}".format(cls, sqlclasses[cls]))
print(h1)
sqlclass = cls
diff --git a/lib/_json.py b/lib/_json.py
index e5958b9..1664d06 100644
--- a/lib/_json.py
+++ b/lib/_json.py
@@ -43,7 +43,7 @@ JSONB_OID = 3802
JSONBARRAY_OID = 3807
-class Json(object):
+class Json:
"""
An `~psycopg2.extensions.ISQLQuote` wrapper to adapt a Python object to
:sql:`json` data type.
diff --git a/lib/_range.py b/lib/_range.py
index 499f501..1db11f8 100644
--- a/lib/_range.py
+++ b/lib/_range.py
@@ -32,7 +32,7 @@ from psycopg2.extensions import ISQLQuote, adapt, register_adapter
from psycopg2.extensions import new_type, new_array_type, register_type
-class Range(object):
+class Range:
"""Python representation for a PostgreSQL |range|_ type.
:param lower: lower bound for the range. `!None` means unbound
@@ -59,7 +59,7 @@ class Range(object):
if self._bounds is None:
return "%s(empty=True)" % self.__class__.__name__
else:
- return "%s(%r, %r, %r)" % (self.__class__.__name__,
+ return "{}({!r}, {!r}, {!r})".format(self.__class__.__name__,
self._lower, self._upper, self._bounds)
def __str__(self):
@@ -238,7 +238,7 @@ def register_range(pgrange, pyrange, conn_or_curs, globally=False):
return caster
-class RangeAdapter(object):
+class RangeAdapter:
"""`ISQLQuote` adapter for `Range` subclasses.
This is an abstract class: concrete classes must set a `name` class
@@ -286,7 +286,7 @@ class RangeAdapter(object):
+ b", '" + r._bounds.encode('utf8') + b"')"
-class RangeCaster(object):
+class RangeCaster:
"""Helper class to convert between `Range` and PostgreSQL range types.
Objects of this class are usually created by `register_range()`. Manual
@@ -503,7 +503,7 @@ class NumberRangeAdapter(RangeAdapter):
else:
upper = ''
- return ("'%s%s,%s%s'" % (
+ return ("'{}{},{}{}'".format(
r._bounds[0], lower, upper, r._bounds[1])).encode('ascii')
diff --git a/lib/extensions.py b/lib/extensions.py
index c4a6618..1de6607 100644
--- a/lib/extensions.py
+++ b/lib/extensions.py
@@ -106,7 +106,7 @@ def register_adapter(typ, callable):
# The SQL_IN class is the official adapter for tuples starting from 2.0.6.
-class SQL_IN(object):
+class SQL_IN:
"""Adapt any iterable to an SQL quotable object."""
def __init__(self, seq):
self._seq = seq
@@ -130,7 +130,7 @@ class SQL_IN(object):
return str(self.getquoted())
-class NoneAdapter(object):
+class NoneAdapter:
"""Adapt None to NULL.
This adapter is not used normally as a fast path in mogrify uses NULL,
@@ -168,7 +168,7 @@ def make_dsn(dsn=None, **kwargs):
tmp.update(kwargs)
kwargs = tmp
- dsn = " ".join(["%s=%s" % (k, _param_escape(str(v)))
+ dsn = " ".join(["{}={}".format(k, _param_escape(str(v)))
for (k, v) in kwargs.items()])
# verify that the returned dsn is valid
diff --git a/lib/extras.py b/lib/extras.py
index 3b42be7..3f1da84 100644
--- a/lib/extras.py
+++ b/lib/extras.py
@@ -72,47 +72,47 @@ class DictCursorBase(_cursor):
else:
raise NotImplementedError(
"DictCursorBase can't be instantiated without a row factory.")
- super(DictCursorBase, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self._query_executed = False
self._prefetch = False
self.row_factory = row_factory
def fetchone(self):
if self._prefetch:
- res = super(DictCursorBase, self).fetchone()
+ res = super().fetchone()
if self._query_executed:
self._build_index()
if not self._prefetch:
- res = super(DictCursorBase, self).fetchone()
+ res = super().fetchone()
return res
def fetchmany(self, size=None):
if self._prefetch:
- res = super(DictCursorBase, self).fetchmany(size)
+ res = super().fetchmany(size)
if self._query_executed:
self._build_index()
if not self._prefetch:
- res = super(DictCursorBase, self).fetchmany(size)
+ res = super().fetchmany(size)
return res
def fetchall(self):
if self._prefetch:
- res = super(DictCursorBase, self).fetchall()
+ res = super().fetchall()
if self._query_executed:
self._build_index()
if not self._prefetch:
- res = super(DictCursorBase, self).fetchall()
+ res = super().fetchall()
return res
def __iter__(self):
try:
if self._prefetch:
- res = super(DictCursorBase, self).__iter__()
+ res = super().__iter__()
first = next(res)
if self._query_executed:
self._build_index()
if not self._prefetch:
- res = super(DictCursorBase, self).__iter__()
+ res = super().__iter__()
first = next(res)
yield first
@@ -126,7 +126,7 @@ class DictConnection(_connection):
"""A connection that uses `DictCursor` automatically."""
def cursor(self, *args, **kwargs):
kwargs.setdefault('cursor_factory', self.cursor_factory or DictCursor)
- return super(DictConnection, self).cursor(*args, **kwargs)
+ return super().cursor(*args, **kwargs)
class DictCursor(DictCursorBase):
@@ -137,18 +137,18 @@ class DictCursor(DictCursorBase):
def __init__(self, *args, **kwargs):
kwargs['row_factory'] = DictRow
- super(DictCursor, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self._prefetch = True
def execute(self, query, vars=None):
self.index = OrderedDict()
self._query_executed = True
- return super(DictCursor, self).execute(query, vars)
+ return super().execute(query, vars)
def callproc(self, procname, vars=None):
self.index = OrderedDict()
self._query_executed = True
- return super(DictCursor, self).callproc(procname, vars)
+ return super().callproc(procname, vars)
def _build_index(self):
if self._query_executed and self.description:
@@ -169,22 +169,22 @@ class DictRow(list):
def __getitem__(self, x):
if not isinstance(x, (int, slice)):
x = self._index[x]
- return super(DictRow, self).__getitem__(x)
+ return super().__getitem__(x)
def __setitem__(self, x, v):
if not isinstance(x, (int, slice)):
x = self._index[x]
- super(DictRow, self).__setitem__(x, v)
+ super().__setitem__(x, v)
def items(self):
- g = super(DictRow, self).__getitem__
+ g = super().__getitem__
return ((n, g(self._index[n])) for n in self._index)
def keys(self):
return iter(self._index)
def values(self):
- g = super(DictRow, self).__getitem__
+ g = super().__getitem__
return (g(self._index[n]) for n in self._index)
def get(self, x, default=None):
@@ -201,7 +201,7 @@ class DictRow(list):
def __reduce__(self):
# this is apparently useless, but it fixes #1073
- return super(DictRow, self).__reduce__()
+ return super().__reduce__()
def __getstate__(self):
return self[:], self._index.copy()
@@ -215,7 +215,7 @@ class RealDictConnection(_connection):
"""A connection that uses `RealDictCursor` automatically."""
def cursor(self, *args, **kwargs):
kwargs.setdefault('cursor_factory', self.cursor_factory or RealDictCursor)
- return super(RealDictConnection, self).cursor(*args, **kwargs)
+ return super().cursor(*args, **kwargs)
class RealDictCursor(DictCursorBase):
@@ -228,17 +228,17 @@ class RealDictCursor(DictCursorBase):
"""
def __init__(self, *args, **kwargs):
kwargs['row_factory'] = RealDictRow
- super(RealDictCursor, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
def execute(self, query, vars=None):
self.column_mapping = []
self._query_executed = True
- return super(RealDictCursor, self).execute(query, vars)
+ return super().execute(query, vars)
def callproc(self, procname, vars=None):
self.column_mapping = []
self._query_executed = True
- return super(RealDictCursor, self).callproc(procname, vars)
+ return super().callproc(procname, vars)
def _build_index(self):
if self._query_executed and self.description:
@@ -256,7 +256,7 @@ class RealDictRow(OrderedDict):
else:
cursor = None
- super(RealDictRow, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
if cursor is not None:
# Required for named cursors
@@ -272,20 +272,20 @@ class RealDictRow(OrderedDict):
if RealDictRow in self:
# We are in the row building phase
mapping = self[RealDictRow]
- super(RealDictRow, self).__setitem__(mapping[key], value)
+ super().__setitem__(mapping[key], value)
if key == len(mapping) - 1:
# Row building finished
del self[RealDictRow]
return
- super(RealDictRow, self).__setitem__(key, value)
+ super().__setitem__(key, value)
class NamedTupleConnection(_connection):
"""A connection that uses `NamedTupleCursor` automatically."""
def cursor(self, *args, **kwargs):
kwargs.setdefault('cursor_factory', self.cursor_factory or NamedTupleCursor)
- return super(NamedTupleConnection, self).cursor(*args, **kwargs)
+ return super().cursor(*args, **kwargs)
class NamedTupleCursor(_cursor):
@@ -309,18 +309,18 @@ class NamedTupleCursor(_cursor):
def execute(self, query, vars=None):
self.Record = None
- return super(NamedTupleCursor, self).execute(query, vars)
+ return super().execute(query, vars)
def executemany(self, query, vars):
self.Record = None
- return super(NamedTupleCursor, self).executemany(query, vars)
+ return super().executemany(query, vars)
def callproc(self, procname, vars=None):
self.Record = None
- return super(NamedTupleCursor, self).callproc(procname, vars)
+ return super().callproc(procname, vars)
def fetchone(self):
- t = super(NamedTupleCursor, self).fetchone()
+ t = super().fetchone()
if t is not None:
nt = self.Record
if nt is None:
@@ -328,14 +328,14 @@ class NamedTupleCursor(_cursor):
return nt._make(t)
def fetchmany(self, size=None):
- ts = super(NamedTupleCursor, self).fetchmany(size)
+ ts = super().fetchmany(size)
nt = self.Record
if nt is None:
nt = self.Record = self._make_nt()
return list(map(nt._make, ts))
def fetchall(self):
- ts = super(NamedTupleCursor, self).fetchall()
+ ts = super().fetchall()
nt = self.Record
if nt is None:
nt = self.Record = self._make_nt()
@@ -343,7 +343,7 @@ class NamedTupleCursor(_cursor):
def __iter__(self):
try:
- it = super(NamedTupleCursor, self).__iter__()
+ it = super().__iter__()
t = next(it)
nt = self.Record
@@ -438,7 +438,7 @@ class LoggingConnection(_connection):
def cursor(self, *args, **kwargs):
self._check()
kwargs.setdefault('cursor_factory', self.cursor_factory or LoggingCursor)
- return super(LoggingConnection, self).cursor(*args, **kwargs)
+ return super().cursor(*args, **kwargs)
class LoggingCursor(_cursor):
@@ -446,13 +446,13 @@ class LoggingCursor(_cursor):
def execute(self, query, vars=None):
try:
- return super(LoggingCursor, self).execute(query, vars)
+ return super().execute(query, vars)
finally:
self.connection.log(self.query, self)
def callproc(self, procname, vars=None):
try:
- return super(LoggingCursor, self).callproc(procname, vars)
+ return super().callproc(procname, vars)
finally:
self.connection.log(self.query, self)
@@ -501,14 +501,14 @@ class LogicalReplicationConnection(_replicationConnection):
def __init__(self, *args, **kwargs):
kwargs['replication_type'] = REPLICATION_LOGICAL
- super(LogicalReplicationConnection, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
class PhysicalReplicationConnection(_replicationConnection):
def __init__(self, *args, **kwargs):
kwargs['replication_type'] = REPLICATION_PHYSICAL
- super(PhysicalReplicationConnection, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
class StopReplication(Exception):
@@ -592,9 +592,9 @@ class ReplicationCursor(_replicationCursor):
if type(start_lsn) is str:
lsn = start_lsn.split('/')
- lsn = "%X/%08X" % (int(lsn[0], 16), int(lsn[1], 16))
+ lsn = "{:X}/{:08X}".format(int(lsn[0], 16), int(lsn[1], 16))
else:
- lsn = "%X/%08X" % ((start_lsn >> 32) & 0xFFFFFFFF,
+ lsn = "{:X}/{:08X}".format((start_lsn >> 32) & 0xFFFFFFFF,
start_lsn & 0xFFFFFFFF)
command += lsn
@@ -615,7 +615,7 @@ class ReplicationCursor(_replicationCursor):
for k, v in options.items():
if not command.endswith('('):
command += ", "
- command += "%s %s" % (quote_ident(k, self), _A(str(v)))
+ command += "{} {}".format(quote_ident(k, self), _A(str(v)))
command += ")"
self.start_replication_expert(
@@ -628,7 +628,7 @@ class ReplicationCursor(_replicationCursor):
# a dbtype and adapter for Python UUID type
-class UUID_adapter(object):
+class UUID_adapter:
"""Adapt Python's uuid.UUID__ type to PostgreSQL's uuid__.
.. __: https://docs.python.org/library/uuid.html
@@ -683,7 +683,7 @@ def register_uuid(oids=None, conn_or_curs=None):
# a type, dbtype and adapter for PostgreSQL inet type
-class Inet(object):
+class Inet:
"""Wrap a string to allow for correct SQL-quoting of inet values.
Note that this adapter does NOT check the passed value to make
@@ -695,7 +695,7 @@ class Inet(object):
self.addr = addr
def __repr__(self):
- return "%s(%r)" % (self.__class__.__name__, self.addr)
+ return f"{self.__class__.__name__}({self.addr!r})"
def prepare(self, conn):
self._conn = conn
@@ -790,7 +790,7 @@ def _solve_conn_curs(conn_or_curs):
return conn, curs
-class HstoreAdapter(object):
+class HstoreAdapter:
"""Adapt a Python dict to the hstore syntax."""
def __init__(self, wrapped):
self.wrapped = wrapped
@@ -987,7 +987,7 @@ def register_hstore(conn_or_curs, globally=False, unicode=False,
_ext.register_type(HSTOREARRAY, not globally and conn_or_curs or None)
-class CompositeCaster(object):
+class CompositeCaster:
"""Helps conversion of a PostgreSQL composite type into a Python object.
The class is usually created by the `register_composite()` function.
diff --git a/lib/pool.py b/lib/pool.py
index 30a29c3..5b14a3a 100644
--- a/lib/pool.py
+++ b/lib/pool.py
@@ -33,7 +33,7 @@ class PoolError(psycopg2.Error):
pass
-class AbstractConnectionPool(object):
+class AbstractConnectionPool:
"""Generic key-based pooling code."""
def __init__(self, minconn, maxconn, *args, **kwargs):
diff --git a/lib/sql.py b/lib/sql.py
index 2077267..aeff748 100644
--- a/lib/sql.py
+++ b/lib/sql.py
@@ -32,7 +32,7 @@ from psycopg2 import extensions as ext
_formatter = string.Formatter()
-class Composable(object):
+class Composable:
"""
Abstract base class for objects that can be used to compose an SQL string.
@@ -50,7 +50,7 @@ class Composable(object):
self._wrapped = wrapped
def __repr__(self):
- return "%s(%r)" % (self.__class__.__name__, self._wrapped)
+ return f"{self.__class__.__name__}({self._wrapped!r})"
def as_string(self, context):
"""
@@ -109,7 +109,7 @@ class Composed(Composable):
"Composed elements must be Composable, got %r instead" % i)
wrapped.append(i)
- super(Composed, self).__init__(wrapped)
+ super().__init__(wrapped)
@property
def seq(self):
@@ -181,7 +181,7 @@ class SQL(Composable):
def __init__(self, string):
if not isinstance(string, str):
raise TypeError("SQL values must be strings")
- super(SQL, self).__init__(string)
+ super().__init__(string)
@property
def string(self):
@@ -326,7 +326,7 @@ class Identifier(Composable):
if not isinstance(s, str):
raise TypeError("SQL identifier parts must be strings")
- super(Identifier, self).__init__(strings)
+ super().__init__(strings)
@property
def strings(self):
@@ -344,7 +344,7 @@ class Identifier(Composable):
"the Identifier wraps more than one than one string")
def __repr__(self):
- return "%s(%s)" % (
+ return "{}({})".format(
self.__class__.__name__,
', '.join(map(repr, self._wrapped)))
@@ -432,7 +432,7 @@ class Placeholder(Composable):
elif name is not None:
raise TypeError("expected string or None as name, got %r" % name)
- super(Placeholder, self).__init__(name)
+ super().__init__(name)
@property
def name(self):
@@ -440,8 +440,8 @@ class Placeholder(Composable):
return self._wrapped
def __repr__(self):
- return "Placeholder(%r)" % (
- self._wrapped if self._wrapped is not None else '',)
+ return "Placeholder({!r})".format(
+ self._wrapped if self._wrapped is not None else '')
def as_string(self, context):
if self._wrapped is not None:
diff --git a/lib/tz.py b/lib/tz.py
index ccbe374..81cd8f8 100644
--- a/lib/tz.py
+++ b/lib/tz.py
@@ -65,7 +65,7 @@ class FixedOffsetTimezone(datetime.tzinfo):
try:
return cls._cache[key]
except KeyError:
- tz = super(FixedOffsetTimezone, cls).__new__(cls, offset, name)
+ tz = super().__new__(cls, offset, name)
cls._cache[key] = tz
return tz
diff --git a/scripts/appveyor.py b/scripts/appveyor.py
index 0ebccad..fd939cf 100755
--- a/scripts/appveyor.py
+++ b/scripts/appveyor.py
@@ -439,7 +439,7 @@ def check_libpq_version():
.decode('ascii')
.rstrip()
)
- assert want_ver == got_ver, "libpq version mismatch: %r != %r" % (
+ assert want_ver == got_ver, "libpq version mismatch: {!r} != {!r}".format(
want_ver,
got_ver,
)
diff --git a/scripts/make_errorcodes.py b/scripts/make_errorcodes.py
index e080077..49ae8d5 100755
--- a/scripts/make_errorcodes.py
+++ b/scripts/make_errorcodes.py
@@ -16,7 +16,6 @@ The script can be run at a new PostgreSQL release to refresh the module.
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
-from __future__ import print_function
import re
import sys
@@ -102,7 +101,7 @@ def fetch_errors(versions):
for version in versions:
print(version, file=sys.stderr)
tver = tuple(map(int, version.split()[0].split('.')))
- tag = '%s%s_STABLE' % (
+ tag = '{}{}_STABLE'.format(
(tver[0] >= 10 and 'REL_' or 'REL'),
version.replace('.', '_'))
c1, e1 = parse_errors_txt(errors_txt_url % tag)
@@ -136,7 +135,7 @@ def generate_module_data(classes, errors):
for clscode, clslabel in sorted(classes.items()):
err = clslabel.split(" - ")[1].split("(")[0] \
.strip().replace(" ", "_").replace('/', "_").upper()
- yield "CLASS_%s = %r" % (err, clscode)
+ yield f"CLASS_{err} = {clscode!r}"
seen = set()
@@ -148,7 +147,7 @@ def generate_module_data(classes, errors):
if errlabel in seen:
raise Exception("error label already seen: %s" % errlabel)
seen.add(errlabel)
- yield "%s = %r" % (errlabel, errcode)
+ yield f"{errlabel} = {errcode!r}"
if __name__ == '__main__':
diff --git a/scripts/make_errors.py b/scripts/make_errors.py
index 0d0a3a8..e36d00a 100755
--- a/scripts/make_errors.py
+++ b/scripts/make_errors.py
@@ -16,7 +16,6 @@ The script can be run at a new PostgreSQL release to refresh the module.
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
-from __future__ import print_function
import os
import re
@@ -86,7 +85,7 @@ def fetch_errors(versions):
for version in versions:
print(version, file=sys.stderr)
tver = tuple(map(int, version.split()[0].split('.')))
- tag = '%s%s_STABLE' % (
+ tag = '{}{}_STABLE'.format(
(tver[0] >= 10 and 'REL_' or 'REL'),
version.replace('.', '_'))
c1, e1 = parse_errors_txt(errors_txt_url % tag)
diff --git a/scripts/refcounter.py b/scripts/refcounter.py
index 5477c05..683a0c0 100755
--- a/scripts/refcounter.py
+++ b/scripts/refcounter.py
@@ -18,7 +18,6 @@ script exits with error 1.
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
-from __future__ import print_function
import argparse
import gc
diff --git a/setup.py b/setup.py
index 0731cff..80d7391 100644
--- a/setup.py
+++ b/setup.py
@@ -170,7 +170,7 @@ For further information please check the 'doc/src/install.rst' file (also at
try:
pg_inst_list_key = winreg.OpenKey(reg,
'SOFTWARE\\PostgreSQL\\Installations')
- except EnvironmentError:
+ except OSError:
# No PostgreSQL installation, as best as we can tell.
return None
@@ -178,7 +178,7 @@ For further information please check the 'doc/src/install.rst' file (also at
# Determine the name of the first subkey, if any:
try:
first_sub_key_name = winreg.EnumKey(pg_inst_list_key, 0)
- except EnvironmentError:
+ except OSError:
return None
pg_first_inst_key = winreg.OpenKey(reg,
diff --git a/tests/dbapi20.py b/tests/dbapi20.py
index d2f87da..b96af09 100644
--- a/tests/dbapi20.py
+++ b/tests/dbapi20.py
@@ -542,7 +542,7 @@ class DatabaseAPI20Test(unittest.TestCase):
tests.
'''
populate = [
- "insert into %sbooze values ('%s')" % (self.table_prefix,s)
+ f"insert into {self.table_prefix}booze values ('{s}')"
for s in self.samples
]
return populate
diff --git a/tests/test_async.py b/tests/test_async.py
index eb97bc9..fdc4224 100755
--- a/tests/test_async.py
+++ b/tests/test_async.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
# test_async.py - unit test for asynchronous API
#
@@ -37,7 +36,7 @@ from .testutils import (ConnectingTestCase, StringIO, skip_before_postgres,
skip_if_crdb, crdb_version, slow)
-class PollableStub(object):
+class PollableStub:
"""A 'pollable' wrapper allowing analysis of the `poll()` calls."""
def __init__(self, pollable):
self.pollable = pollable
diff --git a/tests/test_async_keyword.py b/tests/test_async_keyword.py
index e112692..162db1c 100755
--- a/tests/test_async_keyword.py
+++ b/tests/test_async_keyword.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
# test_async_keyword.py - test for objects using 'async' as attribute/param
#
diff --git a/tests/test_cancel.py b/tests/test_cancel.py
index de8af90..1f7d586 100755
--- a/tests/test_cancel.py
+++ b/tests/test_cancel.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
# test_cancel.py - unit test for query cancellation
#
diff --git a/tests/test_connection.py b/tests/test_connection.py
index f4ea436..5fb7670 100755
--- a/tests/test_connection.py
+++ b/tests/test_connection.py
@@ -245,7 +245,7 @@ class ConnectionTests(ConnectingTestCase):
cur = self.conn.cursor()
ext.register_type(ext.UNICODE, cur)
cur.execute("select 'foo'::text;")
- self.assertEqual(cur.fetchone()[0], u'foo')
+ self.assertEqual(cur.fetchone()[0], 'foo')
def test_connect_nonnormal_envvar(self):
# We must perform encoding normalization at connection time
@@ -343,7 +343,7 @@ class ConnectionTests(ConnectingTestCase):
class SubConnection(ext.connection):
def __init__(self, dsn):
try:
- super(SubConnection, self).__init__(dsn)
+ super().__init__(dsn)
except Exception:
pass
@@ -388,7 +388,7 @@ import time
import psycopg2
def thread():
- conn = psycopg2.connect(%(dsn)r)
+ conn = psycopg2.connect({dsn!r})
curs = conn.cursor()
for i in range(10):
curs.execute("select 1")
@@ -396,11 +396,11 @@ def thread():
def process():
time.sleep(0.2)
-""" % {'dsn': dsn})
+""".format(dsn=dsn))
script = ("""\
import sys
-sys.path.insert(0, %(dir)r)
+sys.path.insert(0, {dir!r})
import time
import threading
import multiprocessing
@@ -411,7 +411,7 @@ t.start()
time.sleep(0.2)
multiprocessing.Process(target=mptest.process, name='myprocess').start()
t.join()
-""" % {'dir': dir})
+""".format(dir=dir))
out = sp.check_output(
[sys.executable, '-c', script], stderr=sp.STDOUT)
@@ -464,12 +464,12 @@ class ParseDsnTestCase(ConnectingTestCase):
self.assertTrue(raised, "ProgrammingError raised due to invalid URI")
def test_unicode_value(self):
- snowman = u"\u2603"
+ snowman = "\u2603"
d = ext.parse_dsn('dbname=' + snowman)
self.assertEqual(d['dbname'], snowman)
def test_unicode_key(self):
- snowman = u"\u2603"
+ snowman = "\u2603"
self.assertRaises(psycopg2.ProgrammingError, ext.parse_dsn,
snowman + '=' + snowman)
@@ -1227,7 +1227,7 @@ class ConnectionTwoPhaseTests(ConnectingTestCase):
def test_xid_unicode(self):
cnn = self.connect()
- x1 = cnn.xid(10, u'uni', u'code')
+ x1 = cnn.xid(10, 'uni', 'code')
cnn.tpc_begin(x1)
cnn.tpc_prepare()
cnn.reset()
@@ -1242,7 +1242,7 @@ class ConnectionTwoPhaseTests(ConnectingTestCase):
# Let's just check uniconde is accepted as type.
cnn = self.connect()
cnn.set_client_encoding('utf8')
- cnn.tpc_begin(u"transaction-id")
+ cnn.tpc_begin("transaction-id")
cnn.tpc_prepare()
cnn.reset()
@@ -1679,7 +1679,7 @@ class AutocommitTests(ConnectingTestCase):
class PasswordLeakTestCase(ConnectingTestCase):
def setUp(self):
- super(PasswordLeakTestCase, self).setUp()
+ super().setUp()
PasswordLeakTestCase.dsn = None
class GrassingConnection(ext.connection):
@@ -1754,7 +1754,7 @@ def killer():
signal.signal(signal.SIGABRT, handle_sigabort)
-conn = psycopg2.connect(%(dsn)r)
+conn = psycopg2.connect({dsn!r})
cur = conn.cursor()
@@ -1765,8 +1765,8 @@ t.daemon = True
t.start()
while True:
- cur.execute(%(query)r, ("Hello, world!",))
-""" % {'dsn': dsn, 'query': query})
+ cur.execute({query!r}, ("Hello, world!",))
+""".format(dsn=dsn, query=query))
proc = sp.Popen([sys.executable, '-c', script],
stdout=sp.PIPE, stderr=sp.PIPE)
diff --git a/tests/test_copy.py b/tests/test_copy.py
index bbf68a6..e48e3bd 100755
--- a/tests/test_copy.py
+++ b/tests/test_copy.py
@@ -97,7 +97,7 @@ class CopyTests(ConnectingTestCase):
curs = self.conn.cursor()
f = StringIO()
for i in range(10):
- f.write("%s\n" % (i,))
+ f.write(f"{i}\n")
f.seek(0)
curs.copy_from(MinimalRead(f), "tcopy", columns=['id'])
@@ -109,7 +109,7 @@ class CopyTests(ConnectingTestCase):
curs = self.conn.cursor()
f = StringIO()
for i in range(10):
- f.write("%s\n" % (i,))
+ f.write(f"{i}\n")
f.seek(0)
@@ -209,7 +209,7 @@ class CopyTests(ConnectingTestCase):
f = StringIO()
for i, c in zip(range(nrecs), cycle(string.ascii_letters)):
l = c * srec
- f.write("%s\t%s\n" % (i, l))
+ f.write(f"{i}\t{l}\n")
f.seek(0)
curs.copy_from(MinimalRead(f), "tcopy", **copykw)
@@ -237,7 +237,7 @@ class CopyTests(ConnectingTestCase):
self.assertEqual(ntests, len(string.ascii_letters))
def test_copy_expert_file_refcount(self):
- class Whatever(object):
+ class Whatever:
pass
f = Whatever()
@@ -319,7 +319,7 @@ class CopyTests(ConnectingTestCase):
# issue #219
script = ("""\
import psycopg2
-conn = psycopg2.connect(%(dsn)r)
+conn = psycopg2.connect({dsn!r})
curs = conn.cursor()
curs.execute("create table copy_segf (id int)")
try:
@@ -327,7 +327,7 @@ try:
except psycopg2.ProgrammingError:
pass
conn.close()
-""" % {'dsn': dsn})
+""".format(dsn=dsn))
proc = Popen([sys.executable, '-c', script])
proc.communicate()
@@ -338,7 +338,7 @@ conn.close()
# issue #219
script = ("""\
import psycopg2
-conn = psycopg2.connect(%(dsn)r)
+conn = psycopg2.connect({dsn!r})
curs = conn.cursor()
curs.execute("create table copy_segf (id int)")
try:
@@ -346,7 +346,7 @@ try:
except psycopg2.ProgrammingError:
pass
conn.close()
-""" % {'dsn': dsn})
+""".format(dsn=dsn))
proc = Popen([sys.executable, '-c', script], stdout=PIPE)
proc.communicate()
diff --git a/tests/test_cursor.py b/tests/test_cursor.py
index 1e6b250..1b5b620 100755
--- a/tests/test_cursor.py
+++ b/tests/test_cursor.py
@@ -74,12 +74,12 @@ class CursorTests(ConnectingTestCase):
# test consistency between execute and mogrify.
# unicode query containing only ascii data
- cur.execute(u"SELECT 'foo';")
+ cur.execute("SELECT 'foo';")
self.assertEqual('foo', cur.fetchone()[0])
- self.assertEqual(b"SELECT 'foo';", cur.mogrify(u"SELECT 'foo';"))
+ self.assertEqual(b"SELECT 'foo';", cur.mogrify("SELECT 'foo';"))
conn.set_client_encoding('UTF8')
- snowman = u"\u2603"
+ snowman = "\u2603"
def b(s):
if isinstance(s, str):
@@ -88,10 +88,10 @@ class CursorTests(ConnectingTestCase):
return s
# unicode query with non-ascii data
- cur.execute(u"SELECT '%s';" % snowman)
+ cur.execute("SELECT '%s';" % snowman)
self.assertEqual(snowman.encode('utf8'), b(cur.fetchone()[0]))
self.assertQuotedEqual(("SELECT '%s';" % snowman).encode('utf8'),
- cur.mogrify(u"SELECT '%s';" % snowman))
+ cur.mogrify("SELECT '%s';" % snowman))
# unicode args
cur.execute("SELECT %s;", (snowman,))
@@ -100,10 +100,10 @@ class CursorTests(ConnectingTestCase):
cur.mogrify("SELECT %s;", (snowman,)))
# unicode query and args
- cur.execute(u"SELECT %s;", (snowman,))
+ cur.execute("SELECT %s;", (snowman,))
self.assertEqual(snowman.encode("utf-8"), b(cur.fetchone()[0]))
self.assertQuotedEqual(("SELECT '%s';" % snowman).encode('utf8'),
- cur.mogrify(u"SELECT %s;", (snowman,)))
+ cur.mogrify("SELECT %s;", (snowman,)))
def test_mogrify_decimal_explodes(self):
conn = self.conn
@@ -283,11 +283,11 @@ class CursorTests(ConnectingTestCase):
# Set up the temporary function
cur.execute('''
- CREATE FUNCTION %s(%s INT)
+ CREATE FUNCTION {}({} INT)
RETURNS INT AS
'SELECT $1 * $1'
LANGUAGE SQL
- ''' % (procname, escaped_paramname))
+ '''.format(procname, escaped_paramname))
# Make sure callproc works right
cur.callproc(procname, {paramname: 2})
@@ -298,7 +298,7 @@ class CursorTests(ConnectingTestCase):
({paramname: 2, 'foo': 'bar'}, psycopg2.ProgrammingError),
({paramname: '2'}, psycopg2.ProgrammingError),
({paramname: 'two'}, psycopg2.ProgrammingError),
- ({u'bj\xc3rn': 2}, psycopg2.ProgrammingError),
+ ({'bj\xc3rn': 2}, psycopg2.ProgrammingError),
({3: 2}, TypeError),
({self: 2}, TypeError),
]
diff --git a/tests/test_dates.py b/tests/test_dates.py
index 48c6f15..fb50abe 100755
--- a/tests/test_dates.py
+++ b/tests/test_dates.py
@@ -423,7 +423,7 @@ class DatetimeTests(ConnectingTestCase, CommonDatetimeTestsMixin):
]:
cur.execute("select %s::text", (s,))
r = cur.fetchone()[0]
- self.assertEqual(r, v, "%s -> %s != %s" % (s, r, v))
+ self.assertEqual(r, v, f"{s} -> {r} != {v}")
@skip_if_crdb("interval style")
@skip_before_postgres(8, 4)
diff --git a/tests/test_errcodes.py b/tests/test_errcodes.py
index 3ce3282..88afd9d 100755
--- a/tests/test_errcodes.py
+++ b/tests/test_errcodes.py
@@ -53,7 +53,7 @@ class ErrocodeTests(ConnectingTestCase):
if errs:
self.fail(
- "raised %s errors in %s cycles (first is %s %s)" % (
+ "raised {} errors in {} cycles (first is {} {})".format(
len(errs), MAX_CYCLES,
errs[0].__class__.__name__, errs[0]))
diff --git a/tests/test_fast_executemany.py b/tests/test_fast_executemany.py
index eaba029..a153ef0 100755
--- a/tests/test_fast_executemany.py
+++ b/tests/test_fast_executemany.py
@@ -43,9 +43,9 @@ class TestPaginate(unittest.TestCase):
[list(range(i * 100, (i + 1) * 100)) for i in range(10)])
-class FastExecuteTestMixin(object):
+class FastExecuteTestMixin:
def setUp(self):
- super(FastExecuteTestMixin, self).setUp()
+ super().setUp()
cur = self.conn.cursor()
cur.execute("""create table testfast (
id serial primary key, date date, val int, data text)""")
@@ -102,7 +102,7 @@ class TestExecuteBatch(FastExecuteTestMixin, testutils.ConnectingTestCase):
page_size=10)
# last command was 5 statements
- self.assertEqual(sum(c == u';' for c in cur.query.decode('ascii')), 4)
+ self.assertEqual(sum(c == ';' for c in cur.query.decode('ascii')), 4)
cur.execute("select id, val from testfast order by id")
self.assertEqual(cur.fetchall(), [(i, i * 10) for i in range(25)])
@@ -111,7 +111,7 @@ class TestExecuteBatch(FastExecuteTestMixin, testutils.ConnectingTestCase):
def test_unicode(self):
cur = self.conn.cursor()
ext.register_type(ext.UNICODE, cur)
- snowman = u"\u2603"
+ snowman = "\u2603"
# unicode in statement
psycopg2.extras.execute_batch(cur,
@@ -206,7 +206,7 @@ class TestExecuteValues(FastExecuteTestMixin, testutils.ConnectingTestCase):
def test_unicode(self):
cur = self.conn.cursor()
ext.register_type(ext.UNICODE, cur)
- snowman = u"\u2603"
+ snowman = "\u2603"
# unicode in statement
psycopg2.extras.execute_values(cur,
diff --git a/tests/test_green.py b/tests/test_green.py
index f511f3e..e4e93a6 100755
--- a/tests/test_green.py
+++ b/tests/test_green.py
@@ -36,7 +36,7 @@ from .testutils import ConnectingTestCase, skip_before_postgres, slow
from .testutils import skip_if_crdb
-class ConnectionStub(object):
+class ConnectionStub:
"""A `connection` wrapper allowing analysis of the `poll()` calls."""
def __init__(self, conn):
self.conn = conn
diff --git a/tests/test_ipaddress.py b/tests/test_ipaddress.py
index 5d2ef3a..451ec0b 100755
--- a/tests/test_ipaddress.py
+++ b/tests/test_ipaddress.py
@@ -15,7 +15,6 @@
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
-from __future__ import unicode_literals
from . import testutils
import unittest
diff --git a/tests/test_lobject.py b/tests/test_lobject.py
index e13ca36..6088b28 100755
--- a/tests/test_lobject.py
+++ b/tests/test_lobject.py
@@ -185,15 +185,15 @@ class LargeObjectTests(LargeObjectTestCase):
def test_read_text(self):
lo = self.conn.lobject()
- snowman = u"\u2603"
- lo.write(u"some data " + snowman)
+ snowman = "\u2603"
+ lo.write("some data " + snowman)
lo.close()
lo = self.conn.lobject(lo.oid, "rt")
x = lo.read(4)
- self.assertEqual(type(x), type(u''))
- self.assertEqual(x, u"some")
- self.assertEqual(lo.read(), u" data " + snowman)
+ self.assertEqual(type(x), type(''))
+ self.assertEqual(x, "some")
+ self.assertEqual(lo.read(), " data " + snowman)
@slow
def test_read_large(self):
@@ -207,7 +207,7 @@ class LargeObjectTests(LargeObjectTestCase):
data1 = lo.read()
# avoid dumping megacraps in the console in case of error
self.assert_(data == data1,
- "%r... != %r..." % (data[:100], data1[:100]))
+ "{!r}... != {!r}...".format(data[:100], data1[:100]))
def test_seek_tell(self):
lo = self.conn.lobject()
diff --git a/tests/test_module.py b/tests/test_module.py
index 78f4e43..dc96e23 100755
--- a/tests/test_module.py
+++ b/tests/test_module.py
@@ -329,10 +329,10 @@ class TestExtensionModule(unittest.TestCase):
self.assert_(pardir in sys.path)
script = ("""
import sys
-sys.path.remove(%r)
-sys.path.insert(0, %r)
+sys.path.remove({!r})
+sys.path.insert(0, {!r})
import _psycopg
-""" % (pardir, pkgdir))
+""".format(pardir, pkgdir))
proc = Popen([sys.executable, '-c', script])
proc.communicate()
diff --git a/tests/test_notify.py b/tests/test_notify.py
index 89a6060..f6c514b 100755
--- a/tests/test_notify.py
+++ b/tests/test_notify.py
@@ -60,19 +60,19 @@ class NotifiesTests(ConnectingTestCase):
script = ("""\
import time
-time.sleep(%(sec)s)
-import %(module)s as psycopg2
-import %(module)s.extensions as ext
-conn = psycopg2.connect(%(dsn)r)
+time.sleep({sec})
+import {module} as psycopg2
+import {module}.extensions as ext
+conn = psycopg2.connect({dsn!r})
conn.set_isolation_level(ext.ISOLATION_LEVEL_AUTOCOMMIT)
print(conn.info.backend_pid)
curs = conn.cursor()
-curs.execute("NOTIFY " %(name)r %(payload)r)
+curs.execute("NOTIFY " {name!r} {payload!r})
curs.close()
conn.close()
-""" % {
- 'module': psycopg2.__name__,
- 'dsn': dsn, 'sec': sec, 'name': name, 'payload': payload})
+""".format(
+ module=psycopg2.__name__,
+ dsn=dsn, sec=sec, name=name, payload=payload))
return Popen([sys.executable, '-c', script], stdout=PIPE)
diff --git a/tests/test_quote.py b/tests/test_quote.py
index 8a1f06a..7489b3f 100755
--- a/tests/test_quote.py
+++ b/tests/test_quote.py
@@ -101,10 +101,10 @@ class QuotingTestCase(ConnectingTestCase):
"Unicode test skipped since server encoding is %s"
% server_encoding)
- data = u"""some data with \t chars
+ data = """some data with \t chars
to escape into, 'quotes', \u20ac euro sign and \\ a backslash too.
"""
- data += u"".join(map(chr, [u for u in range(1, 65536)
+ data += "".join(map(chr, [u for u in range(1, 65536)
if not 0xD800 <= u <= 0xDFFF])) # surrogate area
self.conn.set_client_encoding('UNICODE')
@@ -143,7 +143,7 @@ class QuotingTestCase(ConnectingTestCase):
self.assert_(not self.conn.notices)
def test_bytes(self):
- snowman = u"\u2603"
+ snowman = "\u2603"
conn = self.connect()
conn.set_client_encoding('UNICODE')
psycopg2.extensions.register_type(psycopg2.extensions.BYTES, conn)
@@ -171,7 +171,7 @@ class TestQuotedIdentifier(ConnectingTestCase):
@testutils.skip_before_postgres(8, 0)
def test_unicode_ident(self):
- snowman = u"\u2603"
+ snowman = "\u2603"
quoted = '"' + snowman + '"'
self.assertEqual(quote_ident(snowman, self.conn), quoted)
@@ -189,7 +189,7 @@ class TestStringAdapter(ConnectingTestCase):
# self.assertEqual(adapt(egrave).getquoted(), "'\xe8'")
def test_encoding_error(self):
- snowman = u"\u2603"
+ snowman = "\u2603"
a = adapt(snowman)
self.assertRaises(UnicodeEncodeError, a.getquoted)
@@ -197,14 +197,14 @@ class TestStringAdapter(ConnectingTestCase):
# Note: this works-ish mostly in case when the standard db connection
# we test with is utf8, otherwise the encoding chosen by PQescapeString
# may give bad results.
- snowman = u"\u2603"
+ snowman = "\u2603"
a = adapt(snowman)
a.encoding = 'utf8'
self.assertEqual(a.encoding, 'utf8')
self.assertEqual(a.getquoted(), b"'\xe2\x98\x83'")
def test_connection_wins_anyway(self):
- snowman = u"\u2603"
+ snowman = "\u2603"
a = adapt(snowman)
a.encoding = 'latin9'
@@ -215,7 +215,7 @@ class TestStringAdapter(ConnectingTestCase):
self.assertQuotedEqual(a.getquoted(), b"'\xe2\x98\x83'")
def test_adapt_bytes(self):
- snowman = u"\u2603"
+ snowman = "\u2603"
self.conn.set_client_encoding('utf8')
a = psycopg2.extensions.QuotedString(snowman.encode('utf8'))
a.prepare(self.conn)
diff --git a/tests/test_replication.py b/tests/test_replication.py
index 3ed68a5..657fa60 100755
--- a/tests/test_replication.py
+++ b/tests/test_replication.py
@@ -41,13 +41,13 @@ skip_repl_if_green = skip_if_green("replication not supported in green mode")
class ReplicationTestCase(ConnectingTestCase):
def setUp(self):
- super(ReplicationTestCase, self).setUp()
+ super().setUp()
self.slot = testconfig.repl_slot
self._slots = []
def tearDown(self):
# first close all connections, as they might keep the slot(s) active
- super(ReplicationTestCase, self).tearDown()
+ super().tearDown()
time.sleep(0.025) # sometimes the slot is still active, wait a little
@@ -244,9 +244,9 @@ class AsyncReplicationTest(ReplicationTestCase):
def consume(msg):
# just check the methods
- "%s: %s" % (cur.io_timestamp, repr(msg))
- "%s: %s" % (cur.feedback_timestamp, repr(msg))
- "%s: %s" % (cur.wal_end, repr(msg))
+ "{}: {}".format(cur.io_timestamp, repr(msg))
+ "{}: {}".format(cur.feedback_timestamp, repr(msg))
+ "{}: {}".format(cur.wal_end, repr(msg))
self.msg_count += 1
if self.msg_count > 3:
diff --git a/tests/test_sql.py b/tests/test_sql.py
index 730fa18..2d96208 100755
--- a/tests/test_sql.py
+++ b/tests/test_sql.py
@@ -103,7 +103,7 @@ class SqlFormatTests(ConnectingTestCase):
self.assertRaises(ValueError, sql.SQL("select {a:<};").format, a=10)
def test_must_be_adaptable(self):
- class Foo(object):
+ class Foo:
pass
self.assertRaises(psycopg2.ProgrammingError,
@@ -174,7 +174,7 @@ class IdentifierTests(ConnectingTestCase):
def test_init(self):
self.assert_(isinstance(sql.Identifier('foo'), sql.Identifier))
- self.assert_(isinstance(sql.Identifier(u'foo'), sql.Identifier))
+ self.assert_(isinstance(sql.Identifier('foo'), sql.Identifier))
self.assert_(isinstance(sql.Identifier('foo', 'bar', 'baz'), sql.Identifier))
self.assertRaises(TypeError, sql.Identifier)
self.assertRaises(TypeError, sql.Identifier, 10)
@@ -223,7 +223,7 @@ class LiteralTests(ConnectingTestCase):
def test_init(self):
self.assert_(isinstance(sql.Literal('foo'), sql.Literal))
- self.assert_(isinstance(sql.Literal(u'foo'), sql.Literal))
+ self.assert_(isinstance(sql.Literal('foo'), sql.Literal))
self.assert_(isinstance(sql.Literal(b'foo'), sql.Literal))
self.assert_(isinstance(sql.Literal(42), sql.Literal))
self.assert_(isinstance(
@@ -248,7 +248,7 @@ class LiteralTests(ConnectingTestCase):
self.assert_(sql.Literal('foo') != sql.SQL('foo'))
def test_must_be_adaptable(self):
- class Foo(object):
+ class Foo:
pass
self.assertRaises(psycopg2.ProgrammingError,
@@ -261,7 +261,7 @@ class SQLTests(ConnectingTestCase):
def test_init(self):
self.assert_(isinstance(sql.SQL('foo'), sql.SQL))
- self.assert_(isinstance(sql.SQL(u'foo'), sql.SQL))
+ self.assert_(isinstance(sql.SQL('foo'), sql.SQL))
self.assertRaises(TypeError, sql.SQL, 10)
self.assertRaises(TypeError, sql.SQL, dt.date(2016, 12, 31))
diff --git a/tests/test_types_basic.py b/tests/test_types_basic.py
index 9317571..e5ee552 100755
--- a/tests/test_types_basic.py
+++ b/tests/test_types_basic.py
@@ -52,7 +52,7 @@ class TypesBasicTests(ConnectingTestCase):
"wrong quoting: " + s)
def testUnicode(self):
- s = u"Quote'this\\! ''ok?''"
+ s = "Quote'this\\! ''ok?''"
self.failUnless(self.execute("SELECT %s AS foo", (s,)) == s,
"wrong unicode quoting: " + s)
@@ -210,7 +210,7 @@ class TypesBasicTests(ConnectingTestCase):
curs.execute("select '{a,b,c}'::text[]")
x = curs.fetchone()[0]
self.assert_(isinstance(x[0], str))
- self.assertEqual(x, [u'a', u'b', u'c'])
+ self.assertEqual(x, ['a', 'b', 'c'])
def testBytesArray(self):
psycopg2.extensions.register_type(
@@ -382,7 +382,7 @@ class AdaptSubclassTest(unittest.TestCase):
@restore_types
def test_adapt_most_specific(self):
- class A(object):
+ class A:
pass
class B(A):
diff --git a/tests/test_types_extras.py b/tests/test_types_extras.py
index a126994..942785c 100755
--- a/tests/test_types_extras.py
+++ b/tests/test_types_extras.py
@@ -110,13 +110,13 @@ class TypesExtrasTests(ConnectingTestCase):
self.assertQuotedEqual(a.getquoted(), b"'192.168.1.0/24'::inet")
# adapts ok with unicode too
- i = Inet(u"192.168.1.0/24")
+ i = Inet("192.168.1.0/24")
a = psycopg2.extensions.adapt(i)
a.prepare(self.conn)
self.assertQuotedEqual(a.getquoted(), b"'192.168.1.0/24'::inet")
def test_adapt_fail(self):
- class Foo(object):
+ class Foo:
pass
self.assertRaises(psycopg2.ProgrammingError,
psycopg2.extensions.adapt, Foo(), ext.ISQLQuote, None)
@@ -151,7 +151,7 @@ class HstoreTestCase(ConnectingTestCase):
o = {'a': '1', 'b': "'", 'c': None}
if self.conn.encoding == 'UTF8':
- o['d'] = u'\xe0'
+ o['d'] = '\xe0'
a = HstoreAdapter(o)
a.prepare(self.conn)
@@ -166,7 +166,7 @@ class HstoreTestCase(ConnectingTestCase):
self.assertQuotedEqual(ii[1], b"('b' => '''')")
self.assertQuotedEqual(ii[2], b"('c' => NULL)")
if 'd' in o:
- encc = u'\xe0'.encode(psycopg2.extensions.encodings[self.conn.encoding])
+ encc = '\xe0'.encode(psycopg2.extensions.encodings[self.conn.encoding])
self.assertQuotedEqual(ii[3], b"('d' => '" + encc + b"')")
def test_adapt_9(self):
@@ -175,7 +175,7 @@ class HstoreTestCase(ConnectingTestCase):
o = {'a': '1', 'b': "'", 'c': None}
if self.conn.encoding == 'UTF8':
- o['d'] = u'\xe0'
+ o['d'] = '\xe0'
a = HstoreAdapter(o)
a.prepare(self.conn)
@@ -197,7 +197,7 @@ class HstoreTestCase(ConnectingTestCase):
self.assertQuotedEqual(ii[2][0], b"'c'")
self.assertQuotedEqual(ii[2][1], b"NULL")
if 'd' in o:
- encc = u'\xe0'.encode(psycopg2.extensions.encodings[self.conn.encoding])
+ encc = '\xe0'.encode(psycopg2.extensions.encodings[self.conn.encoding])
self.assertQuotedEqual(ii[3][0], b"'d'")
self.assertQuotedEqual(ii[3][1], b"'" + encc + b"'")
@@ -401,7 +401,7 @@ class AdaptTypeTestCase(ConnectingTestCase):
def test_none_fast_path(self):
# the None adapter is not actually invoked in regular adaptation
- class WonkyAdapter(object):
+ class WonkyAdapter:
def __init__(self, obj):
pass
@@ -715,7 +715,7 @@ class AdaptTypeTestCase(ConnectingTestCase):
except psycopg2.ProgrammingError:
self.conn.rollback()
- curs.execute("create type %s as (%s);" % (name,
+ curs.execute("create type {} as ({});".format(name,
", ".join(["%s %s" % p for p in fields])))
if '.' in name:
schema, name = name.split('.')
@@ -750,7 +750,7 @@ def skip_if_no_json_type(f):
class JsonTestCase(ConnectingTestCase):
def test_adapt(self):
objs = [None, "te'xt", 123, 123.45,
- u'\xe0\u20ac', ['a', 100], {'a': 100}]
+ '\xe0\u20ac', ['a', 100], {'a': 100}]
curs = self.conn.cursor()
for obj in enumerate(objs):
@@ -905,7 +905,7 @@ class JsonTestCase(ConnectingTestCase):
self.assertEqual(data['b'], None)
def test_str(self):
- snowman = u"\u2603"
+ snowman = "\u2603"
obj = {'a': [1, 2, snowman]}
j = psycopg2.extensions.adapt(psycopg2.extras.Json(obj))
s = str(j)
@@ -1261,11 +1261,11 @@ class RangeTestCase(unittest.TestCase):
# Using the "u" prefix to make sure we have the proper return types in
# Python2
expected = [
- u'(0, 4)',
- u'[0, 4]',
- u'(0, 4]',
- u'[0, 4)',
- u'empty',
+ '(0, 4)',
+ '[0, 4]',
+ '(0, 4]',
+ '[0, 4)',
+ 'empty',
]
results = []
@@ -1285,7 +1285,7 @@ class RangeTestCase(unittest.TestCase):
tz = FixedOffsetTimezone(-5 * 60, "EST")
r = DateTimeTZRange(datetime(2010, 1, 1, tzinfo=tz),
datetime(2011, 1, 1, tzinfo=tz))
- expected = u'[2010-01-01 00:00:00-05:00, 2011-01-01 00:00:00-05:00)'
+ expected = '[2010-01-01 00:00:00-05:00, 2011-01-01 00:00:00-05:00)'
result = str(r)
self.assertEqual(result, expected)
diff --git a/tests/test_with.py b/tests/test_with.py
index 9e501f2..984602b 100755
--- a/tests/test_with.py
+++ b/tests/test_with.py
@@ -117,7 +117,7 @@ class WithConnectionTestCase(WithTestCase):
class MyConn(ext.connection):
def commit(self):
commits.append(None)
- super(MyConn, self).commit()
+ super().commit()
with self.connect(connection_factory=MyConn) as conn:
curs = conn.cursor()
@@ -136,7 +136,7 @@ class WithConnectionTestCase(WithTestCase):
class MyConn(ext.connection):
def rollback(self):
rollbacks.append(None)
- super(MyConn, self).rollback()
+ super().rollback()
try:
with self.connect(connection_factory=MyConn) as conn:
@@ -195,7 +195,7 @@ class WithCursorTestCase(WithTestCase):
class MyCurs(ext.cursor):
def close(self):
closes.append(None)
- super(MyCurs, self).close()
+ super().close()
with self.conn.cursor(cursor_factory=MyCurs) as curs:
self.assert_(isinstance(curs, MyCurs))
diff --git a/tests/testutils.py b/tests/testutils.py
index b64d4fc..be6098c 100644
--- a/tests/testutils.py
+++ b/tests/testutils.py
@@ -449,7 +449,7 @@ def skip_if_crdb(reason, conn=None, version=None):
"%s (https://github.com/cockroachdb/cockroach/issues/%s)"
% (reason, crdb_reasons[reason]))
raise unittest.SkipTest(
- "not supported on CockroachDB %s: %s" % (ver, reason))
+ f"not supported on CockroachDB {ver}: {reason}")
@decorate_all_tests
def skip_if_crdb_(f):
@@ -502,7 +502,7 @@ def _crdb_match_version(version, pattern):
return op(version, ref)
-class raises_typeerror(object):
+class raises_typeerror:
def __enter__(self):
pass