summaryrefslogtreecommitdiff
path: root/lib/sqlalchemy/testing
diff options
context:
space:
mode:
Diffstat (limited to 'lib/sqlalchemy/testing')
-rw-r--r--lib/sqlalchemy/testing/asyncio.py124
-rw-r--r--lib/sqlalchemy/testing/config.py15
-rw-r--r--lib/sqlalchemy/testing/engines.py6
-rw-r--r--lib/sqlalchemy/testing/fixtures.py11
-rw-r--r--lib/sqlalchemy/testing/plugin/plugin_base.py37
-rw-r--r--lib/sqlalchemy/testing/plugin/pytestplugin.py160
-rw-r--r--lib/sqlalchemy/testing/provision.py10
-rw-r--r--lib/sqlalchemy/testing/util.py32
8 files changed, 325 insertions, 70 deletions
diff --git a/lib/sqlalchemy/testing/asyncio.py b/lib/sqlalchemy/testing/asyncio.py
new file mode 100644
index 000000000..52386d33e
--- /dev/null
+++ b/lib/sqlalchemy/testing/asyncio.py
@@ -0,0 +1,124 @@
+# testing/asyncio.py
+# Copyright (C) 2005-2020 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+
+# functions and wrappers to run tests, fixtures, provisioning and
+# setup/teardown in an asyncio event loop, conditionally based on the
+# current DB driver being used for a test.
+
+# note that SQLAlchemy's asyncio integration also supports a method
+# of running individual asyncio functions inside of separate event loops
+# using "async_fallback" mode; however running whole functions in the event
+# loop is a more accurate test for how SQLAlchemy's asyncio features
+# would run in the real world.
+
+
+from functools import wraps
+import inspect
+
+from . import config
+from ..util.concurrency import _util_async_run
+
+# may be set to False if the
+# --disable-asyncio flag is passed to the test runner.
+ENABLE_ASYNCIO = True
+
+
+def _assume_async(fn, *args, **kwargs):
+ """Run a function in an asyncio loop unconditionally.
+
+ This function is used for provisioning features like
+ testing a database connection for server info.
+
+ Note that for blocking IO database drivers, this means they block the
+ event loop.
+
+ """
+
+ if not ENABLE_ASYNCIO:
+ return fn(*args, **kwargs)
+
+ return _util_async_run(fn, *args, **kwargs)
+
+
+def _maybe_async_provisioning(fn, *args, **kwargs):
+ """Run a function in an asyncio loop if any current drivers might need it.
+
+ This function is used for provisioning features that take
+ place outside of a specific database driver being selected, so if the
+ current driver that happens to be used for the provisioning operation
+ is an async driver, it will run in asyncio and not fail.
+
+ Note that for blocking IO database drivers, this means they block the
+ event loop.
+
+ """
+ if not ENABLE_ASYNCIO:
+
+ return fn(*args, **kwargs)
+
+ if config.any_async:
+ return _util_async_run(fn, *args, **kwargs)
+ else:
+ return fn(*args, **kwargs)
+
+
+def _maybe_async(fn, *args, **kwargs):
+ """Run a function in an asyncio loop if the current selected driver is
+ async.
+
+ This function is used for test setup/teardown and tests themselves
+ where the current DB driver is known.
+
+
+ """
+ if not ENABLE_ASYNCIO:
+
+ return fn(*args, **kwargs)
+
+ is_async = config._current.is_async
+
+ if is_async:
+ return _util_async_run(fn, *args, **kwargs)
+ else:
+ return fn(*args, **kwargs)
+
+
+def _maybe_async_wrapper(fn):
+ """Apply the _maybe_async function to an existing function and return
+ as a wrapped callable, supporting generator functions as well.
+
+ This is currently used for pytest fixtures that support generator use.
+
+ """
+
+ if inspect.isgeneratorfunction(fn):
+ _stop = object()
+
+ def call_next(gen):
+ try:
+ return next(gen)
+ # can't raise StopIteration in an awaitable.
+ except StopIteration:
+ return _stop
+
+ @wraps(fn)
+ def wrap_fixture(*args, **kwargs):
+ gen = fn(*args, **kwargs)
+ while True:
+ value = _maybe_async(call_next, gen)
+ if value is _stop:
+ break
+ yield value
+
+ else:
+
+ @wraps(fn)
+ def wrap_fixture(*args, **kwargs):
+ return _maybe_async(fn, *args, **kwargs)
+
+ return wrap_fixture
diff --git a/lib/sqlalchemy/testing/config.py b/lib/sqlalchemy/testing/config.py
index 0b8027b84..270ac4c2c 100644
--- a/lib/sqlalchemy/testing/config.py
+++ b/lib/sqlalchemy/testing/config.py
@@ -7,6 +7,8 @@
import collections
+from .. import util
+
requirements = None
db = None
db_url = None
@@ -14,6 +16,7 @@ db_opts = None
file_config = None
test_schema = None
test_schema_2 = None
+any_async = False
_current = None
ident = "main"
@@ -104,6 +107,10 @@ class Config(object):
self.test_schema = "test_schema"
self.test_schema_2 = "test_schema_2"
+ self.is_async = db.dialect.is_async and not util.asbool(
+ db.url.query.get("async_fallback", False)
+ )
+
_stack = collections.deque()
_configs = set()
@@ -121,7 +128,15 @@ class Config(object):
If there are no configs set up yet, this config also
gets set as the "_current".
"""
+ global any_async
+
cfg = Config(db, db_opts, options, file_config)
+
+ # if any backends include an async driver, then ensure
+ # all setup/teardown and tests are wrapped in the maybe_async()
+ # decorator that will set up a greenlet context for async drivers.
+ any_async = any_async or cfg.is_async
+
cls._configs.add(cfg)
return cfg
diff --git a/lib/sqlalchemy/testing/engines.py b/lib/sqlalchemy/testing/engines.py
index bb137cb32..d0a1bc0d0 100644
--- a/lib/sqlalchemy/testing/engines.py
+++ b/lib/sqlalchemy/testing/engines.py
@@ -46,7 +46,7 @@ class ConnectionKiller(object):
fn()
except Exception as e:
warnings.warn(
- "testing_reaper couldn't " "rollback/close connection: %s" % e
+ "testing_reaper couldn't rollback/close connection: %s" % e
)
def rollback_all(self):
@@ -199,9 +199,7 @@ class ReconnectFixture(object):
try:
fn()
except Exception as e:
- warnings.warn(
- "ReconnectFixture couldn't " "close connection: %s" % e
- )
+ warnings.warn("ReconnectFixture couldn't close connection: %s" % e)
def shutdown(self, stop=False):
# TODO: this doesn't cover all cases
diff --git a/lib/sqlalchemy/testing/fixtures.py b/lib/sqlalchemy/testing/fixtures.py
index 0ede25176..a52fdd196 100644
--- a/lib/sqlalchemy/testing/fixtures.py
+++ b/lib/sqlalchemy/testing/fixtures.py
@@ -48,6 +48,11 @@ class TestBase(object):
# skipped.
__skip_if__ = None
+ # If this class should be wrapped in asyncio compatibility functions
+ # when using an async engine. This should be set to False only for tests
+ # that use the asyncio features of sqlalchemy directly
+ __asyncio_wrap__ = True
+
def assert_(self, val, msg=None):
assert val, msg
@@ -90,6 +95,12 @@ class TestBase(object):
# engines.drop_all_tables(metadata, config.db)
+class AsyncTestBase(TestBase):
+ """Mixin marking a test as using its own explicit asyncio patterns."""
+
+ __asyncio_wrap__ = False
+
+
class FutureEngineMixin(object):
@classmethod
def setup_class(cls):
diff --git a/lib/sqlalchemy/testing/plugin/plugin_base.py b/lib/sqlalchemy/testing/plugin/plugin_base.py
index 5e41f2cdf..8b6a7d68a 100644
--- a/lib/sqlalchemy/testing/plugin/plugin_base.py
+++ b/lib/sqlalchemy/testing/plugin/plugin_base.py
@@ -63,21 +63,21 @@ def setup_options(make_option):
make_option(
"--log-info",
action="callback",
- type="string",
+ type=str,
callback=_log,
help="turn on info logging for <LOG> (multiple OK)",
)
make_option(
"--log-debug",
action="callback",
- type="string",
+ type=str,
callback=_log,
help="turn on debug logging for <LOG> (multiple OK)",
)
make_option(
"--db",
action="append",
- type="string",
+ type=str,
dest="db",
help="Use prefab database uri. Multiple OK, "
"first one is run by default.",
@@ -91,7 +91,7 @@ def setup_options(make_option):
make_option(
"--dburi",
action="append",
- type="string",
+ type=str,
dest="dburi",
help="Database uri. Multiple OK, " "first one is run by default.",
)
@@ -111,6 +111,11 @@ def setup_options(make_option):
help="Drop all tables in the target database first",
)
make_option(
+ "--disable-asyncio",
+ action="store_true",
+ help="disable test / fixtures / provisoning running in asyncio",
+ )
+ make_option(
"--backend-only",
action="store_true",
dest="backend_only",
@@ -130,20 +135,20 @@ def setup_options(make_option):
)
make_option(
"--profile-sort",
- type="string",
+ type=str,
default="cumulative",
dest="profilesort",
help="Type of sort for profiling standard output",
)
make_option(
"--profile-dump",
- type="string",
+ type=str,
dest="profiledump",
help="Filename where a single profile run will be dumped",
)
make_option(
"--postgresql-templatedb",
- type="string",
+ type=str,
help="name of template database to use for PostgreSQL "
"CREATE DATABASE (defaults to current database)",
)
@@ -156,7 +161,7 @@ def setup_options(make_option):
)
make_option(
"--write-idents",
- type="string",
+ type=str,
dest="write_idents",
help="write out generated follower idents to <file>, "
"when -n<num> is used",
@@ -172,7 +177,7 @@ def setup_options(make_option):
make_option(
"--requirements",
action="callback",
- type="string",
+ type=str,
callback=_requirements_opt,
help="requirements class for testing, overrides setup.cfg",
)
@@ -188,14 +193,14 @@ def setup_options(make_option):
"--include-tag",
action="callback",
callback=_include_tag,
- type="string",
+ type=str,
help="Include tests with tag <tag>",
)
make_option(
"--exclude-tag",
action="callback",
callback=_exclude_tag,
- type="string",
+ type=str,
help="Exclude tests with tag <tag>",
)
make_option(
@@ -375,10 +380,18 @@ def _init_symbols(options, file_config):
@post
+def _set_disable_asyncio(opt, file_config):
+ if opt.disable_asyncio:
+ from sqlalchemy.testing import asyncio
+
+ asyncio.ENABLE_ASYNCIO = False
+
+
+@post
def _engine_uri(options, file_config):
- from sqlalchemy.testing import config
from sqlalchemy import testing
+ from sqlalchemy.testing import config
from sqlalchemy.testing import provision
if options.dburi:
diff --git a/lib/sqlalchemy/testing/plugin/pytestplugin.py b/lib/sqlalchemy/testing/plugin/pytestplugin.py
index 644ea6dc2..6be64aa61 100644
--- a/lib/sqlalchemy/testing/plugin/pytestplugin.py
+++ b/lib/sqlalchemy/testing/plugin/pytestplugin.py
@@ -26,11 +26,6 @@ else:
from typing import Sequence
try:
- import asyncio
-except ImportError:
- pass
-
-try:
import xdist # noqa
has_xdist = True
@@ -126,11 +121,15 @@ def collect_types_fixture():
def pytest_sessionstart(session):
- plugin_base.post_begin()
+ from sqlalchemy.testing import asyncio
+
+ asyncio._assume_async(plugin_base.post_begin)
def pytest_sessionfinish(session):
- plugin_base.final_process_cleanup()
+ from sqlalchemy.testing import asyncio
+
+ asyncio._maybe_async_provisioning(plugin_base.final_process_cleanup)
if session.config.option.dump_pyannotate:
from pyannotate_runtime import collect_types
@@ -162,23 +161,31 @@ if has_xdist:
import uuid
def pytest_configure_node(node):
+ from sqlalchemy.testing import provision
+ from sqlalchemy.testing import asyncio
+
# the master for each node fills workerinput dictionary
# which pytest-xdist will transfer to the subprocess
plugin_base.memoize_important_follower_config(node.workerinput)
node.workerinput["follower_ident"] = "test_%s" % uuid.uuid4().hex[0:12]
- from sqlalchemy.testing import provision
- provision.create_follower_db(node.workerinput["follower_ident"])
+ asyncio._maybe_async_provisioning(
+ provision.create_follower_db, node.workerinput["follower_ident"]
+ )
def pytest_testnodedown(node, error):
from sqlalchemy.testing import provision
+ from sqlalchemy.testing import asyncio
- provision.drop_follower_db(node.workerinput["follower_ident"])
+ asyncio._maybe_async_provisioning(
+ provision.drop_follower_db, node.workerinput["follower_ident"]
+ )
def pytest_collection_modifyitems(session, config, items):
+
# look for all those classes that specify __backend__ and
# expand them out into per-database test cases.
@@ -189,6 +196,8 @@ def pytest_collection_modifyitems(session, config, items):
# it's to suit the rather odd use case here which is that we are adding
# new classes to a module on the fly.
+ from sqlalchemy.testing import asyncio
+
rebuilt_items = collections.defaultdict(
lambda: collections.defaultdict(list)
)
@@ -201,20 +210,26 @@ def pytest_collection_modifyitems(session, config, items):
]
test_classes = set(item.parent for item in items)
- for test_class in test_classes:
- for sub_cls in plugin_base.generate_sub_tests(
- test_class.cls, test_class.parent.module
- ):
- if sub_cls is not test_class.cls:
- per_cls_dict = rebuilt_items[test_class.cls]
- # support pytest 5.4.0 and above pytest.Class.from_parent
- ctor = getattr(pytest.Class, "from_parent", pytest.Class)
- for inst in ctor(
- name=sub_cls.__name__, parent=test_class.parent.parent
- ).collect():
- for t in inst.collect():
- per_cls_dict[t.name].append(t)
+ def setup_test_classes():
+ for test_class in test_classes:
+ for sub_cls in plugin_base.generate_sub_tests(
+ test_class.cls, test_class.parent.module
+ ):
+ if sub_cls is not test_class.cls:
+ per_cls_dict = rebuilt_items[test_class.cls]
+
+ # support pytest 5.4.0 and above pytest.Class.from_parent
+ ctor = getattr(pytest.Class, "from_parent", pytest.Class)
+ for inst in ctor(
+ name=sub_cls.__name__, parent=test_class.parent.parent
+ ).collect():
+ for t in inst.collect():
+ per_cls_dict[t.name].append(t)
+
+ # class requirements will sometimes need to access the DB to check
+ # capabilities, so need to do this for async
+ asyncio._maybe_async_provisioning(setup_test_classes)
newitems = []
for item in items:
@@ -238,6 +253,10 @@ def pytest_collection_modifyitems(session, config, items):
def pytest_pycollect_makeitem(collector, name, obj):
if inspect.isclass(obj) and plugin_base.want_class(name, obj):
+ from sqlalchemy.testing import config
+
+ if config.any_async and getattr(obj, "__asyncio_wrap__", True):
+ obj = _apply_maybe_async(obj)
ctor = getattr(pytest.Class, "from_parent", pytest.Class)
@@ -258,6 +277,38 @@ def pytest_pycollect_makeitem(collector, name, obj):
return []
+def _apply_maybe_async(obj, recurse=True):
+ from sqlalchemy.testing import asyncio
+
+ setup_names = {"setup", "setup_class", "teardown", "teardown_class"}
+ for name, value in vars(obj).items():
+ if (
+ (callable(value) or isinstance(value, classmethod))
+ and not getattr(value, "_maybe_async_applied", False)
+ and (name.startswith("test_") or name in setup_names)
+ ):
+ is_classmethod = False
+ if isinstance(value, classmethod):
+ value = value.__func__
+ is_classmethod = True
+
+ @_pytest_fn_decorator
+ def make_async(fn, *args, **kwargs):
+ return asyncio._maybe_async(fn, *args, **kwargs)
+
+ do_async = make_async(value)
+ if is_classmethod:
+ do_async = classmethod(do_async)
+ do_async._maybe_async_applied = True
+
+ setattr(obj, name, do_async)
+ if recurse:
+ for cls in obj.mro()[1:]:
+ if cls != object:
+ _apply_maybe_async(cls, False)
+ return obj
+
+
_current_class = None
@@ -297,6 +348,8 @@ def _parametrize_cls(module, cls):
def pytest_runtest_setup(item):
+ from sqlalchemy.testing import asyncio
+
# here we seem to get called only based on what we collected
# in pytest_collection_modifyitems. So to do class-based stuff
# we have to tear that out.
@@ -307,7 +360,7 @@ def pytest_runtest_setup(item):
# ... so we're doing a little dance here to figure it out...
if _current_class is None:
- class_setup(item.parent.parent)
+ asyncio._maybe_async(class_setup, item.parent.parent)
_current_class = item.parent.parent
# this is needed for the class-level, to ensure that the
@@ -315,20 +368,22 @@ def pytest_runtest_setup(item):
# class-level teardown...
def finalize():
global _current_class
- class_teardown(item.parent.parent)
+ asyncio._maybe_async(class_teardown, item.parent.parent)
_current_class = None
item.parent.parent.addfinalizer(finalize)
- test_setup(item)
+ asyncio._maybe_async(test_setup, item)
def pytest_runtest_teardown(item):
+ from sqlalchemy.testing import asyncio
+
# ...but this works better as the hook here rather than
# using a finalizer, as the finalizer seems to get in the way
# of the test reporting failures correctly (you get a bunch of
# pytest assertion stuff instead)
- test_teardown(item)
+ asyncio._maybe_async(test_teardown, item)
def test_setup(item):
@@ -342,7 +397,9 @@ def test_teardown(item):
def class_setup(item):
- plugin_base.start_test_class(item.cls)
+ from sqlalchemy.testing import asyncio
+
+ asyncio._maybe_async_provisioning(plugin_base.start_test_class, item.cls)
def class_teardown(item):
@@ -372,17 +429,19 @@ def _pytest_fn_decorator(target):
if add_positional_parameters:
spec.args.extend(add_positional_parameters)
- metadata = dict(target="target", fn="__fn", name=fn.__name__)
+ metadata = dict(
+ __target_fn="__target_fn", __orig_fn="__orig_fn", name=fn.__name__
+ )
metadata.update(format_argspec_plus(spec, grouped=False))
code = (
"""\
def %(name)s(%(args)s):
- return %(target)s(%(fn)s, %(apply_kw)s)
+ return %(__target_fn)s(%(__orig_fn)s, %(apply_kw)s)
"""
% metadata
)
decorated = _exec_code_in_env(
- code, {"target": target, "__fn": fn}, fn.__name__
+ code, {"__target_fn": target, "__orig_fn": fn}, fn.__name__
)
if not add_positional_parameters:
decorated.__defaults__ = getattr(fn, "__func__", fn).__defaults__
@@ -554,14 +613,49 @@ class PytestFixtureFunctions(plugin_base.FixtureFunctions):
return pytest.param(*parameters[1:], id=ident)
def fixture(self, *arg, **kw):
- return pytest.fixture(*arg, **kw)
+ from sqlalchemy.testing import config
+ from sqlalchemy.testing import asyncio
+
+ # wrapping pytest.fixture function. determine if
+ # decorator was called as @fixture or @fixture().
+ if len(arg) > 0 and callable(arg[0]):
+ # was called as @fixture(), we have the function to wrap.
+ fn = arg[0]
+ arg = arg[1:]
+ else:
+ # was called as @fixture, don't have the function yet.
+ fn = None
+
+ # create a pytest.fixture marker. because the fn is not being
+ # passed, this is always a pytest.FixtureFunctionMarker()
+ # object (or whatever pytest is calling it when you read this)
+ # that is waiting for a function.
+ fixture = pytest.fixture(*arg, **kw)
+
+ # now apply wrappers to the function, including fixture itself
+
+ def wrap(fn):
+ if config.any_async:
+ fn = asyncio._maybe_async_wrapper(fn)
+ # other wrappers may be added here
+
+ # now apply FixtureFunctionMarker
+ fn = fixture(fn)
+ return fn
+
+ if fn:
+ return wrap(fn)
+ else:
+ return wrap
def get_current_test_name(self):
return os.environ.get("PYTEST_CURRENT_TEST")
def async_test(self, fn):
+ from sqlalchemy.testing import asyncio
+
@_pytest_fn_decorator
def decorate(fn, *args, **kwargs):
- asyncio.get_event_loop().run_until_complete(fn(*args, **kwargs))
+ asyncio._assume_async(fn, *args, **kwargs)
return decorate(fn)
diff --git a/lib/sqlalchemy/testing/provision.py b/lib/sqlalchemy/testing/provision.py
index c4f489a69..fb3d77dc4 100644
--- a/lib/sqlalchemy/testing/provision.py
+++ b/lib/sqlalchemy/testing/provision.py
@@ -94,11 +94,11 @@ def generate_db_urls(db_urls, extra_drivers):
--dburi postgresql://db2 \
--dbdriver=psycopg2 --dbdriver=asyncpg?async_fallback=true
- Noting that the default postgresql driver is psycopg2. the output
+ Noting that the default postgresql driver is psycopg2, the output
would be::
postgresql+psycopg2://db1
- postgresql+asyncpg://db1?async_fallback=true
+ postgresql+asyncpg://db1
postgresql+psycopg2://db2
postgresql+psycopg2://db3
@@ -108,6 +108,12 @@ def generate_db_urls(db_urls, extra_drivers):
for a driver that is both coming from --dburi as well as --dbdrivers,
we want to keep it in that dburi.
+ Driver specific query options can be specified by added them to the
+ driver name. For example, to enable the async fallback option for
+ asyncpg::
+
+ --dburi postgresql://db1 \
+ --dbdriver=asyncpg?async_fallback=true
"""
urls = set()
diff --git a/lib/sqlalchemy/testing/util.py b/lib/sqlalchemy/testing/util.py
index c6626b9e0..bbaf5034f 100644
--- a/lib/sqlalchemy/testing/util.py
+++ b/lib/sqlalchemy/testing/util.py
@@ -11,13 +11,24 @@ import random
import sys
import types
+from . import config
from . import mock
+from .. import inspect
+from ..schema import Column
+from ..schema import DropConstraint
+from ..schema import DropTable
+from ..schema import ForeignKeyConstraint
+from ..schema import MetaData
+from ..schema import Table
+from ..sql import schema
+from ..sql.sqltypes import Integer
from ..util import decorator
from ..util import defaultdict
from ..util import has_refcount_gc
from ..util import inspect_getfullargspec
from ..util import py2k
+
if not has_refcount_gc:
def non_refcount_gc_collect(*args):
@@ -198,9 +209,9 @@ def fail(msg):
def provide_metadata(fn, *args, **kw):
"""Provide bound MetaData for a single test, dropping afterwards."""
- from . import config
+ # import cycle that only occurs with py2k's import resolver
+ # in py3k this can be moved top level.
from . import engines
- from sqlalchemy import schema
metadata = schema.MetaData(config.db)
self = args[0]
@@ -243,8 +254,6 @@ def flag_combinations(*combinations):
"""
- from . import config
-
keys = set()
for d in combinations:
@@ -264,8 +273,6 @@ def flag_combinations(*combinations):
def lambda_combinations(lambda_arg_sets, **kw):
- from . import config
-
args = inspect_getfullargspec(lambda_arg_sets)
arg_sets = lambda_arg_sets(*[mock.Mock() for arg in args[0]])
@@ -302,11 +309,8 @@ def resolve_lambda(__fn, **kw):
def metadata_fixture(ddl="function"):
"""Provide MetaData for a pytest fixture."""
- from . import config
-
def decorate(fn):
def run_ddl(self):
- from sqlalchemy import schema
metadata = self.metadata = schema.MetaData()
try:
@@ -328,8 +332,6 @@ def force_drop_names(*names):
isolating for foreign key cycles
"""
- from . import config
- from sqlalchemy import inspect
@decorator
def go(fn, *args, **kw):
@@ -358,14 +360,6 @@ class adict(dict):
def drop_all_tables(engine, inspector, schema=None, include_names=None):
- from sqlalchemy import (
- Column,
- Table,
- Integer,
- MetaData,
- ForeignKeyConstraint,
- )
- from sqlalchemy.schema import DropTable, DropConstraint
if include_names is not None:
include_names = set(include_names)