summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSam Thursfield <sam@afuera.me.uk>2019-09-14 10:47:12 +0000
committerSam Thursfield <sam@afuera.me.uk>2019-09-14 10:47:12 +0000
commite072f03b8bb5a88d5b094318a369345a774a5db0 (patch)
treebbc0c4ba9ad260f0b819aba4ceb4ce0c16045296
parent62a0ebbd679b3ed43edafb5c460ad838fdde4c36 (diff)
parent60b0f7fb6abb4cbd03707260e14419b885e30635 (diff)
downloadtracker-e072f03b8bb5a88d5b094318a369345a774a5db0.tar.gz
Merge branch 'sam/functional-tests-quiet' into 'master'
Rewrite how functional tests are implemented Closes #123 See merge request GNOME/tracker!130
-rw-r--r--.gitlab-ci.yml21
-rw-r--r--HACKING.md63
-rw-r--r--README.md2
-rw-r--r--meson.build5
-rw-r--r--src/libtracker-common/tracker-log.c16
-rw-r--r--src/libtracker-sparql/tracker-sparql.pc.in1
-rw-r--r--src/tracker-store/tracker-main.vala6
-rw-r--r--src/tracker/tracker-daemon.c7
-rw-r--r--[-rwxr-xr-x]tests/functional-tests/01-insertion.py4
-rw-r--r--[-rwxr-xr-x]tests/functional-tests/02-sparql-bugs.py7
-rw-r--r--[-rwxr-xr-x]tests/functional-tests/03-fts-functions.py4
-rw-r--r--[-rwxr-xr-x]tests/functional-tests/04-group-concat.py4
-rw-r--r--[-rwxr-xr-x]tests/functional-tests/05-coalesce.py4
-rw-r--r--[-rwxr-xr-x]tests/functional-tests/06-distance.py4
-rw-r--r--[-rwxr-xr-x]tests/functional-tests/07-graph.py4
-rw-r--r--[-rwxr-xr-x]tests/functional-tests/08-unique-insertions.py4
-rw-r--r--[-rwxr-xr-x]tests/functional-tests/09-concurrent-query.py4
-rw-r--r--[-rwxr-xr-x]tests/functional-tests/14-signals.py7
-rw-r--r--[-rwxr-xr-x]tests/functional-tests/15-statistics.py4
-rw-r--r--[-rwxr-xr-x]tests/functional-tests/16-collation.py3
-rw-r--r--[-rwxr-xr-x]tests/functional-tests/17-ontology-changes.py174
-rw-r--r--tests/functional-tests/configuration.json.in8
-rw-r--r--tests/functional-tests/configuration.py40
-rw-r--r--tests/functional-tests/expectedFailure.py8
-rw-r--r--tests/functional-tests/ipc/meson.build18
-rw-r--r--tests/functional-tests/ipc/test-insert-or-replace.vala8
-rw-r--r--tests/functional-tests/meson.build37
-rw-r--r--tests/functional-tests/storetest.py27
-rwxr-xr-xtests/functional-tests/test-runner.sh.in20
-rw-r--r--tests/services/meson.build2
-rw-r--r--tests/test-bus.conf.in8
-rwxr-xr-xutils/sandbox/tracker-sandbox.py143
-rw-r--r--utils/trackertestutils/dbusdaemon.py222
-rw-r--r--utils/trackertestutils/dconf.py69
-rw-r--r--utils/trackertestutils/helpers.py283
-rw-r--r--utils/trackertestutils/meson.build4
-rw-r--r--utils/trackertestutils/psutil_mini.py98
37 files changed, 738 insertions, 605 deletions
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 1922acfb5..29a459acd 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,3 +1,12 @@
+variables:
+ # These can be used to see verbose log output from the functional-tests.
+ # See HACKING.md for more information.
+ TRACKER_VERBOSITY: "0"
+ TRACKER_TESTS_VERBOSE: "no"
+
+ # This can be used when debugging test failures that only occur within GitLab CI.
+ MESON_TEST_EXTRA_ARGS: ""
+
stages:
- test
@@ -15,7 +24,17 @@ test-fedora-latest:
# screenful of junk each time unless we strip these.
unset $(env|grep -o '^CI_[^=]*')
- su tracker -c 'cd build; meson test --print-errorlogs'
+ su tracker -c 'cd build; meson test --print-errorlogs ${MESON_TEST_EXTRA_ARGS}'
+
+ after_script:
+ - |
+ echo "Test suite settings:"
+ echo
+ echo " TRACKER_VERBOSITY: ${TRACKER_VERBOSITY}"
+ echo " TRACKER_TESTS_VERBOSE: ${TRACKER_TESTS_VERBOSE}"
+ echo " MESON_TEST_EXTRA_ARGS: ${MESON_TEST_EXTRA_ARGS}"
+ echo
+ echo "These values can be set at https://gitlab.gnome.org/GNOME/tracker/pipelines/new"
artifacts:
when: always
diff --git a/HACKING.md b/HACKING.md
new file mode 100644
index 000000000..b907f3112
--- /dev/null
+++ b/HACKING.md
@@ -0,0 +1,63 @@
+# Logging
+
+The following environment variables control logging from Tracker daemons:
+
+ * `TRACKER_VERBOSITY`: takes a value of 1, 2 or 3 and causes increasing
+ amounts of log output from Tracker code to be written to stdout.
+ * `G_MESSAGES_DEBUG`: controls log output from GLib-based libraries that
+ are used in the Tracker process. Use `G_MESSAGES_DEBUG=all` for maximal
+ log output.
+
+Internally, Tracker will set `G_MESSAGES_DEBUG=Tracker` if `TRACKER_VERBOSITY`
+is set and `G_MESSAGES_DEBUG` is not set, to enable printing of its own log
+messages to stdout.
+
+You can set these variables when using `tracker-sandbox`, and when running the
+Tracker test suite. Note that Meson will not print log output from tests by
+default, use `meson test --verbose` or `meson test --print-errorlogs` to
+enable.
+
+The functional tests understand an additional variable, `TRACKER_TESTS_VERBOSE`
+with can be set to `1` or `yes` to see detailed logging from the test harness
+itself, and full log output from the internal D-Bus daemon. By default, these
+tests filter output from the D-Bus daemon to only show log messages from
+Tracker processes. Anything written directly to stdout, for example by
+`g_print()` or by the dbus-daemon itself, will not be displayed unless
+`TRACKER_TESTS_VERBOSE` is set.
+
+When working with GitLab CI, you can use the
+[Run Pipeline dialog](https://gitlab.gnome.org/GNOME/tracker/pipelines/new)
+to set the values of these variables and increase the verbosity of the tests in
+CI.
+
+# Attaching a debugger to Tracker daemons
+
+Tracker daemons are not started directly. Instead they are started by the D-Bus
+daemon by request. When using tracker-sandbox or the functional-tests, it's
+difficult to start the daemon manually under `gdb`.
+
+Instead, we recommend adding a 10 second timeout at the top of the daemon's
+main() function. In Vala code, try this:
+
+ print("Pausing to attach debugger. Run: gdb attach %i\n", Posix.getpid());
+ Posix.usleep(10 * 1000 * 1000);
+ print("Waking up again\n");
+
+Run the test, using the `meson build --timeout-multiplier=10000`
+option to avoid your process being killed by the test runner. When you see
+the 'Pausing' message, run the `gdb attach``command in another terminal within
+10 seconds.
+
+# Running Tracker daemons under Valgrind
+
+The Tracker daemons are launched using D-Bus autolaunch. When running them from
+the source tree using tracker-sandbox or the functional tests, the commandline
+is controlled by the D-Bus .service.in files stored in `./tests/services`. Just
+change the `Exec=` line to add Valgrind, like this:
+
+ Exec=/usr/bin/valgrind @abs_top_builddir@/src/tracker-store/tracker-store
+
+By default the tracker-sandbox utility and the functional-tests will only
+show output from Tracker code. For the functional-tests, set
+TRACKER_TESTS_VERBOSE=1 to see output from Valgrind. For tracker-sandbox use
+the `--debug-dbus` option.
diff --git a/README.md b/README.md
index f1ec0f855..c94639b87 100644
--- a/README.md
+++ b/README.md
@@ -131,4 +131,4 @@ interactive shell inside the sandbox. From here you can use debugging tools
such as GDB.
For more information about developing Tracker, look at
-https://wiki.gnome.org/Projects/Tracker.
+https://wiki.gnome.org/Projects/Tracker and HACKING.md.
diff --git a/meson.build b/meson.build
index 5b30c1982..af7ad6bc7 100644
--- a/meson.build
+++ b/meson.build
@@ -273,6 +273,7 @@ conf.set('exec_prefix', get_option('prefix'))
conf.set('bindir', join_paths(get_option('prefix'), get_option('bindir')))
conf.set('datadir', datadir)
conf.set('datarootdir', join_paths(get_option('prefix'), get_option('datadir')))
+conf.set('dbus_services_dir', dbus_services_dir)
conf.set('includedir', join_paths(get_option('prefix'), get_option('includedir')))
conf.set('libdir', libdir)
conf.set('libexecdir', join_paths(get_option('prefix'), get_option('libexecdir')))
@@ -283,10 +284,6 @@ conf.set('tracker_store', join_paths ('${libexecdir}', 'tracker-store'))
conf.set('ontologies_dir', join_paths ('${datadir}', 'tracker', 'ontologies'))
conf.set('domain_ontologies_dir', join_paths('${datadir}', 'tracker', 'domain-ontologies'))
-# Configure functional tests to run completely from source tree.
-conf.set('FUNCTIONAL_TESTS_ONTOLOGIES_DIR', join_paths(meson.current_source_dir(), 'tests', 'functional-tests', 'test-ontologies'))
-conf.set('FUNCTIONAL_TESTS_TRACKER_STORE_PATH', join_paths(meson.current_build_dir(), 'src', 'tracker-store', 'tracker-store'))
-
configure_file(input: 'config.h.meson.in',
output: 'config.h',
configuration: conf)
diff --git a/src/libtracker-common/tracker-log.c b/src/libtracker-common/tracker-log.c
index d300ee795..f3228516e 100644
--- a/src/libtracker-common/tracker-log.c
+++ b/src/libtracker-common/tracker-log.c
@@ -151,6 +151,18 @@ hide_log_handler (const gchar *domain,
/* do nothing */
}
+static void
+ensure_g_messages_debug_set ()
+{
+ const gchar *value;
+
+ value = g_getenv ("G_MESSAGES_DEBUG");
+
+ if (value == NULL) {
+ g_setenv ("G_MESSAGES_DEBUG", "Tracker", TRUE);
+ }
+}
+
gboolean
tracker_log_init (gint this_verbosity,
gchar **used_filename)
@@ -192,8 +204,8 @@ tracker_log_init (gint this_verbosity,
/* If we have debug enabled, we imply G_MESSAGES_DEBUG or we
* see nothing, this came in since GLib 2.32.
*/
- if (this_verbosity > 1) {
- g_setenv ("G_MESSAGES_DEBUG", "all", TRUE);
+ if (this_verbosity > 0) {
+ ensure_g_messages_debug_set ();
}
if (use_log_files) {
diff --git a/src/libtracker-sparql/tracker-sparql.pc.in b/src/libtracker-sparql/tracker-sparql.pc.in
index dcdf0b7df..7b3aa3718 100644
--- a/src/libtracker-sparql/tracker-sparql.pc.in
+++ b/src/libtracker-sparql/tracker-sparql.pc.in
@@ -6,6 +6,7 @@ libexecdir=@libexecdir@
datadir=@datadir@
tracker_store=@tracker_store@
ontologies_dir=@ontologies_dir@
+dbus_services_dir=@dbus_services_dir@
domain_ontologies_dir=@domain_ontologies_dir@
Name: tracker-sparql
diff --git a/src/tracker-store/tracker-main.vala b/src/tracker-store/tracker-main.vala
index 1248709b7..e1b0cc86b 100644
--- a/src/tracker-store/tracker-main.vala
+++ b/src/tracker-store/tracker-main.vala
@@ -118,14 +118,12 @@ License which can be viewed at:
do_shutdown ();
if (strsignal (signo) != null) {
- print ("\n");
- print ("Received signal:%d->'%s'", signo, strsignal (signo));
+ message ("Received signal:%d->'%s'", signo, strsignal (signo));
}
break;
default:
if (strsignal (signo) != null) {
- print ("\n");
- print ("Received signal:%d->'%s'", signo, strsignal (signo));
+ message ("Received signal:%d->'%s'", signo, strsignal (signo));
}
break;
}
diff --git a/src/tracker/tracker-daemon.c b/src/tracker/tracker-daemon.c
index 2c25b2c07..c73658477 100644
--- a/src/tracker/tracker-daemon.c
+++ b/src/tracker/tracker-daemon.c
@@ -228,10 +228,9 @@ signal_handler (gpointer user_data)
/* Fall through */
default:
if (g_strsignal (signo)) {
- g_print ("\n");
- g_print ("Received signal:%d->'%s'\n",
- signo,
- g_strsignal (signo));
+ g_message ("Received signal:%d->'%s'",
+ signo,
+ g_strsignal (signo));
}
break;
}
diff --git a/tests/functional-tests/01-insertion.py b/tests/functional-tests/01-insertion.py
index 00cc4e017..8b5b40c80 100755..100644
--- a/tests/functional-tests/01-insertion.py
+++ b/tests/functional-tests/01-insertion.py
@@ -1,5 +1,3 @@
-#!/usr/bin/python3
-#
# Copyright (C) 2010, Nokia <ivan.frade@nokia.com>
# Copyright (C) 2019, Sam Thursfield <sam@afuera.me.uk>
#
@@ -871,4 +869,4 @@ class TrackerStorePhoneNumberTest (CommonTrackerStoreTest):
if __name__ == "__main__":
- ut.main()
+ ut.main(verbosity=2)
diff --git a/tests/functional-tests/02-sparql-bugs.py b/tests/functional-tests/02-sparql-bugs.py
index 4305ea0a9..e312e9109 100755..100644
--- a/tests/functional-tests/02-sparql-bugs.py
+++ b/tests/functional-tests/02-sparql-bugs.py
@@ -1,5 +1,3 @@
-#!/usr/bin/python3
-#
# Copyright (C) 2010, Nokia <ivan.frade@nokia.com>
# Copyright (C) 2019, Sam Thursfield <sam@afuera.me.uk>
#
@@ -111,21 +109,18 @@ class TrackerStoreSparqlBugsTests (CommonTrackerStoreTest):
"""
results1 = self.tracker.query(query1)
- print("1", results1)
self.assertEqual(len(results1), 1)
self.assertEqual(len(results1[0]), 2)
self.assertEqual(results1[0][0], "contact:test")
self.assertEqual(results1[0][1], "98653")
results2 = self.tracker.query(query2)
- print("2", results2)
self.assertEqual(len(results2), 1)
self.assertEqual(len(results2[0]), 2)
self.assertEqual(results2[0][0], "contact:test")
self.assertEqual(results2[0][1], "98653")
results3 = self.tracker.query(query3)
- print("3", results3)
self.assertEqual(len(results3), 1)
self.assertEqual(len(results3[0]), 2)
self.assertEqual(results3[0][0], "contact:test")
@@ -243,4 +238,4 @@ class TrackerStoreSparqlBugsTests (CommonTrackerStoreTest):
if __name__ == "__main__":
- ut.main()
+ ut.main(verbosity=2)
diff --git a/tests/functional-tests/03-fts-functions.py b/tests/functional-tests/03-fts-functions.py
index 46c43f368..ec23a3ed6 100755..100644
--- a/tests/functional-tests/03-fts-functions.py
+++ b/tests/functional-tests/03-fts-functions.py
@@ -1,5 +1,3 @@
-#!/usr/bin/python3
-#
# Copyright (C) 2010, Nokia <ivan.frade@nokia.com>
# Copyright (C) 2019, Sam Thursfield <sam@afuera.me.uk>
#
@@ -129,4 +127,4 @@ class TestFTSFunctions (CommonTrackerStoreTest):
if __name__ == '__main__':
- ut.main()
+ ut.main(verbosity=2)
diff --git a/tests/functional-tests/04-group-concat.py b/tests/functional-tests/04-group-concat.py
index a8064a828..d36523004 100755..100644
--- a/tests/functional-tests/04-group-concat.py
+++ b/tests/functional-tests/04-group-concat.py
@@ -1,5 +1,3 @@
-#!/usr/bin/python3
-#
# Copyright (C) 2010, Nokia <ivan.frade@nokia.com>
# Copyright (C) 2019, Sam Thursfield <sam@afuera.me.uk>
#
@@ -89,4 +87,4 @@ class TestGroupConcat (CommonTrackerStoreTest):
if __name__ == '__main__':
- ut.main()
+ ut.main(verbosity=2)
diff --git a/tests/functional-tests/05-coalesce.py b/tests/functional-tests/05-coalesce.py
index 48d8e6eb6..176ae6b66 100755..100644
--- a/tests/functional-tests/05-coalesce.py
+++ b/tests/functional-tests/05-coalesce.py
@@ -1,5 +1,3 @@
-#!/usr/bin/python3
-#
# Copyright (C) 2010, Nokia <ivan.frade@nokia.com>
# Copyright (C) 2019, Sam Thursfield <sam@afuera.me.uk>
#
@@ -116,4 +114,4 @@ class TestCoalesce (CommonTrackerStoreTest):
if __name__ == '__main__':
- ut.main()
+ ut.main(verbosity=2)
diff --git a/tests/functional-tests/06-distance.py b/tests/functional-tests/06-distance.py
index 80d35dfb9..42989a946 100755..100644
--- a/tests/functional-tests/06-distance.py
+++ b/tests/functional-tests/06-distance.py
@@ -1,5 +1,3 @@
-#!/usr/bin/python3
-#
# Copyright (C) 2010, Nokia <ivan.frade@nokia.com>
# Copyright (C) 2019, Sam Thursfield <sam@afuera.me.uk>
#
@@ -126,4 +124,4 @@ class TestDistanceFunctions (CommonTrackerStoreTest):
if __name__ == '__main__':
- ut.main()
+ ut.main(verbosity=2)
diff --git a/tests/functional-tests/07-graph.py b/tests/functional-tests/07-graph.py
index d9d173bcd..c1a6e8697 100755..100644
--- a/tests/functional-tests/07-graph.py
+++ b/tests/functional-tests/07-graph.py
@@ -1,5 +1,3 @@
-#!/usr/bin/python3
-#
# Copyright (C) 2010, Nokia <ivan.frade@nokia.com>
# Copyright (C) 2019, Sam Thursfield <sam@afuera.me.uk>
#
@@ -147,4 +145,4 @@ class TestGraphs (CommonTrackerStoreTest):
if __name__ == '__main__':
- ut.main()
+ ut.main(verbosity=2)
diff --git a/tests/functional-tests/08-unique-insertions.py b/tests/functional-tests/08-unique-insertions.py
index 25ea6a13f..69e16573b 100755..100644
--- a/tests/functional-tests/08-unique-insertions.py
+++ b/tests/functional-tests/08-unique-insertions.py
@@ -1,5 +1,3 @@
-#!/usr/bin/python3
-#
# Copyright (C) 2010, Nokia <ivan.frade@nokia.com>
# Copyright (C) 2019, Sam Thursfield <sam@afuera.me.uk>
#
@@ -77,4 +75,4 @@ class TestMinerInsertBehaviour (CommonTrackerStoreTest):
if __name__ == '__main__':
- ut.main()
+ ut.main(verbosity=2)
diff --git a/tests/functional-tests/09-concurrent-query.py b/tests/functional-tests/09-concurrent-query.py
index b24fdcc40..7164babb6 100755..100644
--- a/tests/functional-tests/09-concurrent-query.py
+++ b/tests/functional-tests/09-concurrent-query.py
@@ -1,5 +1,3 @@
-#!/usr/bin/python3
-#
# Copyright (C) 2010, Nokia <ivan.frade@nokia.com>
# Copyright (C) 2019, Sam Thursfield <sam@afuera.me.uk>
#
@@ -99,4 +97,4 @@ class TestConcurrentQuery (CommonTrackerStoreTest):
return False
if __name__ == "__main__":
- ut.main()
+ ut.main(verbosity=2)
diff --git a/tests/functional-tests/14-signals.py b/tests/functional-tests/14-signals.py
index 242ae8480..8cf349edd 100755..100644
--- a/tests/functional-tests/14-signals.py
+++ b/tests/functional-tests/14-signals.py
@@ -1,5 +1,3 @@
-#!/usr/bin/python3
-#
# Copyright (C) 2010, Nokia <ivan.frade@nokia.com>
# Copyright (C) 2019, Sam Thursfield <sam@afuera.me.uk>
#
@@ -52,7 +50,7 @@ class TrackerStoreSignalsTests (CommonTrackerStoreTest):
self.loop = GLib.MainLoop()
self.timeout_id = 0
- self.bus = Gio.bus_get_sync(Gio.BusType.SESSION, None)
+ self.bus = self.sandbox.get_connection()
self.results_classname = None
self.results_deletes = None
@@ -125,7 +123,6 @@ class TrackerStoreSignalsTests (CommonTrackerStoreTest):
"""
self.__connect_signal()
self.tracker.update(CONTACT)
- time.sleep(1)
self.__wait_for_signal()
# validate results
@@ -192,4 +189,4 @@ class TrackerStoreSignalsTests (CommonTrackerStoreTest):
if __name__ == "__main__":
- ut.main()
+ ut.main(verbosity=2)
diff --git a/tests/functional-tests/15-statistics.py b/tests/functional-tests/15-statistics.py
index 6f6ca3014..a883b1708 100755..100644
--- a/tests/functional-tests/15-statistics.py
+++ b/tests/functional-tests/15-statistics.py
@@ -1,5 +1,3 @@
-#!/usr/bin/python3
-#
# Copyright (C) 2010, Nokia <ivan.frade@nokia.com>
# Copyright (C) 2019, Sam Thursfield <sam@afuera.me.uk>
#
@@ -125,4 +123,4 @@ class TrackerStoreStatisticsTests (CommonTrackerStoreTest):
self.assertEqual(old_stats[k], new_stats[k])
if __name__ == "__main__":
- ut.main()
+ ut.main(verbosity=2)
diff --git a/tests/functional-tests/16-collation.py b/tests/functional-tests/16-collation.py
index 40a993d82..962db9640 100755..100644
--- a/tests/functional-tests/16-collation.py
+++ b/tests/functional-tests/16-collation.py
@@ -1,4 +1,3 @@
-#!/usr/bin/python3
# -*- coding: utf-8 -*-
#
# Copyright (C) 2010, Nokia <ivan.frade@nokia.com>
@@ -132,4 +131,4 @@ if __name__ == "__main__":
* Check what happens in non-english encoding
* Dynamic change of collation (not implemented yet in tracker)
""")
- ut.main()
+ ut.main(verbosity=2)
diff --git a/tests/functional-tests/17-ontology-changes.py b/tests/functional-tests/17-ontology-changes.py
index b7e00d9d0..e2cb132f1 100755..100644
--- a/tests/functional-tests/17-ontology-changes.py
+++ b/tests/functional-tests/17-ontology-changes.py
@@ -1,5 +1,3 @@
-#!/usr/bin/python3
-#
# Copyright (C) 2010, Nokia <ivan.frade@nokia.com>
# Copyright (C) 2019, Sam Thursfield <sam@afuera.me.uk>
#
@@ -26,8 +24,8 @@ changes and checking if the data is still there.
from gi.repository import GLib
-import logging
import os
+import pathlib
import shutil
import re
import tempfile
@@ -48,101 +46,6 @@ XSD_INTEGER = "http://www.w3.org/2001/XMLSchema#integer"
TEST_PREFIX = "http://example.org/ns#"
-TEST_ENV_VARS = {"LC_COLLATE": "en_GB.utf8"}
-
-REASONABLE_TIMEOUT = 5
-
-log = logging.getLogger()
-
-
-class UnableToBootException (Exception):
- pass
-
-
-class TrackerSystemAbstraction (object):
-
- def __init__(self, settings=None):
- self.store = None
- self._dirs = {}
-
- def xdg_data_home(self):
- return os.path.join(self._basedir, 'data')
-
- def xdg_cache_home(self):
- return os.path.join(self._basedir, 'cache')
-
- def set_up_environment(self, settings=None, ontodir=None):
- """
- Sets up the XDG_*_HOME variables and make sure the directories exist
-
- Settings should be a dict mapping schema names to dicts that hold the
- settings that should be changed in those schemas. The contents dicts
- should map key->value, where key is a key name and value is a suitable
- GLib.Variant instance.
- """
- self._basedir = tempfile.mkdtemp()
-
- self._dirs = {
- "XDG_DATA_HOME": self.xdg_data_home(),
- "XDG_CACHE_HOME": self.xdg_cache_home()
- }
-
- for var, directory in list(self._dirs.items()):
- os.makedirs(directory)
- os.makedirs(os.path.join(directory, 'tracker'))
- os.environ[var] = directory
-
- if ontodir:
- log.debug("export %s=%s", "TRACKER_DB_ONTOLOGIES_DIR", ontodir)
- os.environ["TRACKER_DB_ONTOLOGIES_DIR"] = ontodir
-
- for var, value in TEST_ENV_VARS.items():
- log.debug("export %s=%s", var, value)
- os.environ[var] = value
-
- # Previous loop should have set DCONF_PROFILE to the test location
- if settings is not None:
- self._apply_settings(settings)
-
- def _apply_settings(self, settings):
- for schema_name, contents in settings.items():
- dconf = trackertestutils.dconf.DConfClient(schema_name)
- dconf.reset()
- for key, value in contents.items():
- dconf.write(key, value)
-
- def tracker_store_testing_start(self, confdir=None, ontodir=None):
- """
- Stops any previous instance of the store, calls set_up_environment,
- and starts a new instances of the store
- """
- self.set_up_environment(confdir, ontodir)
-
- self.store = trackertestutils.helpers.StoreHelper(cfg.TRACKER_STORE_PATH)
- self.store.start()
-
- def tracker_store_restart_with_new_ontologies(self, ontodir):
- self.store.stop()
- if ontodir:
- os.environ["TRACKER_DB_ONTOLOGIES_DIR"] = ontodir
- try:
- self.store.start()
- except GLib.Error:
- raise UnableToBootException(
- "Unable to boot the store \n(" + str(e) + ")")
-
- def finish(self):
- """
- Stop all running processes and remove all test data.
- """
-
- if self.store:
- self.store.stop()
-
- for path in list(self._dirs.values()):
- shutil.rmtree(path)
- os.rmdir(self._basedir)
-
class OntologyChangeTestTemplate (ut.TestCase):
"""
@@ -158,35 +61,43 @@ class OntologyChangeTestTemplate (ut.TestCase):
Check doc in those methods for the specific details.
"""
- def get_ontology_dir(self, param):
- return os.path.join(cfg.TEST_ONTOLOGIES_DIR, param)
-
def setUp(self):
- self.system = TrackerSystemAbstraction()
+ self.tmpdir = tempfile.mkdtemp(prefix='tracker-test-')
def tearDown(self):
- self.system.finish()
+ shutil.rmtree(self.tmpdir, ignore_errors=True)
- def template_test_ontology_change(self):
+ def get_ontology_dir(self, param):
+ return str(pathlib.Path(__file__).parent.joinpath('test-ontologies', param))
+ def template_test_ontology_change(self):
self.set_ontology_dirs()
- basic_ontologies = self.get_ontology_dir(self.FIRST_ONTOLOGY_DIR)
- modified_ontologies = self.get_ontology_dir(self.SECOND_ONTOLOGY_DIR)
+ self.__assert_ontology_dates(self.FIRST_ONTOLOGY_DIR, self.SECOND_ONTOLOGY_DIR)
- self.__assert_ontology_dates(basic_ontologies, modified_ontologies)
+ extra_env = cfg.test_environment(self.tmpdir)
+ extra_env['LC_COLLATE'] = 'en_GB.utf8'
+ extra_env['TRACKER_DB_ONTOLOGIES_DIR'] = self.get_ontology_dir(self.FIRST_ONTOLOGY_DIR)
- self.system.tracker_store_testing_start(ontodir=basic_ontologies)
- self.tracker = self.system.store
+ sandbox1 = trackertestutils.helpers.TrackerDBusSandbox(
+ cfg.TEST_DBUS_DAEMON_CONFIG_FILE, extra_env=extra_env)
+ sandbox1.start()
+
+ self.tracker = trackertestutils.helpers.StoreHelper(sandbox1.get_connection())
+ self.tracker.start_and_wait_for_ready()
self.insert_data()
- try:
- # Boot the second set of ontologies
- self.system.tracker_store_restart_with_new_ontologies(
- modified_ontologies)
- except UnableToBootException as e:
- self.fail(str(self.__class__) + " " + str(e))
+ sandbox1.stop()
+
+ # Boot the second set of ontologies
+ extra_env['TRACKER_DB_ONTOLOGIES_DIR'] = self.get_ontology_dir(self.SECOND_ONTOLOGY_DIR)
+ sandbox2 = trackertestutils.helpers.TrackerDBusSandbox(
+ cfg.TEST_DBUS_DAEMON_CONFIG_FILE, extra_env=extra_env)
+ sandbox2.start()
+
+ self.tracker = trackertestutils.helpers.StoreHelper(sandbox2.get_connection())
+ self.tracker.start_and_wait_for_ready()
self.validate_status()
@@ -233,7 +144,7 @@ class OntologyChangeTestTemplate (ut.TestCase):
(member, dbus_result))
return
- def __assert_ontology_dates(self, first_dir, second_dir):
+ def __assert_ontology_dates(self, first, second):
"""
Asserts that 91-test.ontology in second_dir has a more recent
modification time than in first_dir
@@ -241,23 +152,24 @@ class OntologyChangeTestTemplate (ut.TestCase):
ISO9601_REGEX = "(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z)"
def get_ontology_date(ontology):
- for line in open(ontology, 'r'):
- if "nao:lastModified" in line:
- getmodtime = re.compile(
- 'nao:lastModified\ \"' + ISO9601_REGEX + '\"')
- modtime_match = getmodtime.search(line)
-
- if (modtime_match):
- nao_date = modtime_match.group(1)
- return time.strptime(nao_date, "%Y-%m-%dT%H:%M:%SZ")
- else:
- print("something funky in", line)
- break
+ with open(ontology, 'r') as f:
+ for line in f:
+ if "nao:lastModified" in line:
+ getmodtime = re.compile(
+ 'nao:lastModified\ \"' + ISO9601_REGEX + '\"')
+ modtime_match = getmodtime.search(line)
+
+ if (modtime_match):
+ nao_date = modtime_match.group(1)
+ return time.strptime(nao_date, "%Y-%m-%dT%H:%M:%SZ")
+ else:
+ print("something funky in", line)
+ break
first_date = get_ontology_date(
- os.path.join(first_dir, "91-test.ontology"))
+ os.path.join(self.get_ontology_dir(first), "91-test.ontology"))
second_date = get_ontology_date(
- os.path.join(second_dir, "91-test.ontology"))
+ os.path.join(self.get_ontology_dir(second), "91-test.ontology"))
if first_date >= second_date:
self.fail("nao:modifiedTime in '%s' is not more recent in the second ontology" % (
"91-test.ontology"))
@@ -1005,4 +917,4 @@ class PropertyRelegationTest (OntologyChangeTestTemplate):
if __name__ == "__main__":
- ut.main()
+ ut.main(verbosity=2)
diff --git a/tests/functional-tests/configuration.json.in b/tests/functional-tests/configuration.json.in
index 686dceae2..c3b71b42d 100644
--- a/tests/functional-tests/configuration.json.in
+++ b/tests/functional-tests/configuration.json.in
@@ -1,4 +1,8 @@
{
- "TEST_ONTOLOGIES_DIR": "@FUNCTIONAL_TESTS_ONTOLOGIES_DIR@",
- "TRACKER_STORE_PATH": "@FUNCTIONAL_TESTS_TRACKER_STORE_PATH@"
+ "TEST_DBUS_DAEMON_CONFIG_FILE": "@TEST_DBUS_DAEMON_CONFIG_FILE@",
+ "TEST_DCONF_PROFILE": "@TEST_DCONF_PROFILE@",
+ "TEST_GSETTINGS_SCHEMA_DIR": "@TEST_GSETTINGS_SCHEMA_DIR@",
+ "TEST_LANGUAGE_STOP_WORDS_DIR": "@TEST_LANGUAGE_STOP_WORDS_DIR@",
+ "TEST_ONTOLOGIES_DIR": "@TEST_ONTOLOGIES_DIR@",
+ "TEST_DOMAIN_ONTOLOGY_RULE": "@TEST_DOMAIN_ONTOLOGY_RULE@"
}
diff --git a/tests/functional-tests/configuration.py b/tests/functional-tests/configuration.py
index 7da696c48..cd5cb0cfb 100644
--- a/tests/functional-tests/configuration.py
+++ b/tests/functional-tests/configuration.py
@@ -18,10 +18,10 @@
# 02110-1301, USA.
#
-
import json
import logging
import os
+import pathlib
import sys
@@ -34,12 +34,21 @@ with open(os.environ['TRACKER_FUNCTIONAL_TEST_CONFIG']) as f:
config = json.load(f)
-TOP_SRCDIR = os.path.dirname(os.path.dirname(
- os.path.dirname(os.path.dirname(os.path.dirname(__file__)))))
-TOP_BUILDDIR = os.environ['TRACKER_FUNCTIONAL_TEST_BUILD_DIR']
+TEST_DBUS_DAEMON_CONFIG_FILE = config['TEST_DBUS_DAEMON_CONFIG_FILE']
+
-TEST_ONTOLOGIES_DIR = config['TEST_ONTOLOGIES_DIR']
-TRACKER_STORE_PATH = config['TRACKER_STORE_PATH']
+def test_environment(tmpdir):
+ return {
+ 'DCONF_PROFILE': config['TEST_DCONF_PROFILE'],
+ 'GSETTINGS_SCHEMA_DIR': config['TEST_GSETTINGS_SCHEMA_DIR'],
+ 'TRACKER_DB_ONTOLOGIES_DIR': config['TEST_ONTOLOGIES_DIR'],
+ 'TRACKER_LANGUAGE_STOP_WORDS_DIR': config['TEST_LANGUAGE_STOP_WORDS_DIR'],
+ 'TRACKER_TEST_DOMAIN_ONTOLOGY_RULE': config['TEST_DOMAIN_ONTOLOGY_RULE'],
+ 'XDG_CACHE_HOME': os.path.join(tmpdir, 'cache'),
+ 'XDG_CONFIG_HOME': os.path.join(tmpdir, 'config'),
+ 'XDG_DATA_HOME': os.path.join(tmpdir, 'data'),
+ 'XDG_RUNTIME_DIR': os.path.join(tmpdir, 'run'),
+ }
def get_environment_boolean(variable):
@@ -55,5 +64,24 @@ def get_environment_boolean(variable):
(variable, value))
+def get_environment_int(variable, default=0):
+ try:
+ return int(os.environ.get(variable))
+ except (TypeError, ValueError):
+ return default
+
+
if get_environment_boolean('TRACKER_TESTS_VERBOSE'):
+ # Output all logs to stderr
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
+else:
+ # Output some messages from D-Bus daemon to stderr by default. In practice,
+ # only errors and warnings should be output here unless the environment
+ # contains G_MESSAGES_DEBUG= and/or TRACKER_VERBOSITY=1 or more.
+ handler_stderr = logging.StreamHandler(stream=sys.stderr)
+ handler_stderr.addFilter(logging.Filter('trackertestutils.dbusdaemon.stderr'))
+ handler_stdout = logging.StreamHandler(stream=sys.stderr)
+ handler_stdout.addFilter(logging.Filter('trackertestutils.dbusdaemon.stdout'))
+ logging.basicConfig(level=logging.INFO,
+ handlers=[handler_stderr, handler_stdout],
+ format='%(message)s')
diff --git a/tests/functional-tests/expectedFailure.py b/tests/functional-tests/expectedFailure.py
index fe6769569..92d77ce57 100644
--- a/tests/functional-tests/expectedFailure.py
+++ b/tests/functional-tests/expectedFailure.py
@@ -27,6 +27,7 @@ on the files. Note that these tests are highly platform dependant.
from functools import wraps
import sys
+import unittest as ut
import configuration as cfg
@@ -38,11 +39,6 @@ def expectedFailureJournal():
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
- try:
- func(*args, **kwargs)
- except Exception:
- raise ut.case._ExpectedFailure(sys.exc_info())
- raise Exception(
- "Unexpected success. This should fail because journal is disabled")
+ ut.expectedFailure(func)
return wrapper
return decorator
diff --git a/tests/functional-tests/ipc/meson.build b/tests/functional-tests/ipc/meson.build
index 8ee7175fe..1c558c514 100644
--- a/tests/functional-tests/ipc/meson.build
+++ b/tests/functional-tests/ipc/meson.build
@@ -3,10 +3,21 @@ functional_ipc_test_c_args = [
'-DTEST_ONTOLOGIES_DIR="@0@"'.format(tracker_uninstalled_nepomuk_ontologies_dir),
]
+tracker_sandbox = find_program(join_paths(source_root, 'utils', 'sandbox', 'tracker-sandbox.py'))
+sandbox_args = ['--dbus-config', test_dbus_config, '--debug-sandbox', '--index-tmpdir', '--']
+
+sandbox_env = environment()
+
+test_env.prepend('PYTHONPATH', tracker_uninstalled_testutils_dir)
+test_env.set('TRACKER_DB_ONTOLOGIES_DIR', tracker_uninstalled_nepomuk_ontologies_dir)
+test_env.set('TRACKER_LANGUAGE_STOP_WORDS_DIR', tracker_uninstalled_stop_words_dir)
+test_env.set('TRACKER_TEST_DOMAIN_ONTOLOGY_RULE', tracker_uninstalled_domain_rule)
+
insert_or_replace_test = executable('test-insert-or-replace',
'test-insert-or-replace.vala', tracker_sparql_vapi,
dependencies: [tracker_common_dep, tracker_sparql_dep])
-test('functional-ipc-insert-or-replace', insert_or_replace_test,
+test('functional-ipc-insert-or-replace', tracker_sandbox,
+ args: sandbox_args + [insert_or_replace_test],
env: test_env)
bus_query_cancellation_test = executable('test-bus-query-cancellation',
@@ -14,7 +25,6 @@ bus_query_cancellation_test = executable('test-bus-query-cancellation',
c_args: functional_ipc_test_c_args,
dependencies: [tracker_common_dep, tracker_sparql_dep])
-test('functional-ipc-bus-query-cancellation',
- test_runner,
- args: bus_query_cancellation_test,
+test('functional-ipc-bus-query-cancellation', tracker_sandbox,
+ args: sandbox_args + [bus_query_cancellation_test],
env: test_env)
diff --git a/tests/functional-tests/ipc/test-insert-or-replace.vala b/tests/functional-tests/ipc/test-insert-or-replace.vala
index 718dfc8f4..ce64eecbe 100644
--- a/tests/functional-tests/ipc/test-insert-or-replace.vala
+++ b/tests/functional-tests/ipc/test-insert-or-replace.vala
@@ -3,11 +3,11 @@ using Tracker;
using Tracker.Sparql;
const string insert_query_replace = "
-DELETE { ?r nao:hasProperty ?property . }
+DELETE { ?r nao:hasTag ?tag . }
WHERE {
?r a nco:PhoneNumber;
nco:phoneNumber \"02141730585%d\";
- nao:hasProperty ?property .
+ nao:hasTag ?tag .
}
DELETE {
@@ -57,10 +57,10 @@ INSERT OR REPLACE {
}";
const string insert_query_original = "
-DELETE { ?r nao:hasProperty ?property . }
+DELETE { ?r nao:tag ?tag . }
WHERE {
?r a nco:PhoneNumber; nco:phoneNumber \"2141730585%d\";
- nao:hasProperty ?property .
+ nao:hasTag ?tag .
}
DELETE {
diff --git a/tests/functional-tests/meson.build b/tests/functional-tests/meson.build
index be3fc2a4a..8bc70c446 100644
--- a/tests/functional-tests/meson.build
+++ b/tests/functional-tests/meson.build
@@ -1,13 +1,24 @@
-test_runner = configure_file(
- input: 'test-runner.sh.in',
- output: 'test-runner.sh',
- configuration: conf)
-test_runner = find_program(test_runner)
+python = find_program('python3')
+
+# Configure functional tests to run completely from source tree.
+testconf = configuration_data()
+
+config_json_full_path = join_paths(meson.current_build_dir(), 'configuration.json')
+dconf_profile_full_path = join_paths(meson.current_source_dir(), 'trackertest')
+
+test_dbus_config = join_paths(build_root, 'tests', 'test-bus.conf')
+
+testconf.set('TEST_DBUS_DAEMON_CONFIG_FILE', test_dbus_config)
+testconf.set('TEST_DCONF_PROFILE', dconf_profile_full_path)
+testconf.set('TEST_DOMAIN_ONTOLOGY_RULE', tracker_uninstalled_domain_rule)
+testconf.set('TEST_GSETTINGS_SCHEMA_DIR', tracker_uninstalled_gsettings_schema_dir)
+testconf.set('TEST_ONTOLOGIES_DIR', tracker_uninstalled_nepomuk_ontologies_dir)
+testconf.set('TEST_LANGUAGE_STOP_WORDS_DIR', tracker_uninstalled_stop_words_dir)
config_json = configure_file(
input: 'configuration.json.in',
output: 'configuration.json',
- configuration: conf
+ configuration: testconf
)
functional_tests = [
@@ -26,25 +37,17 @@ functional_tests = [
'17-ontology-changes',
]
-config_json_full_path = join_paths(meson.current_build_dir(), 'configuration.json')
-dconf_profile_full_path = join_paths(meson.current_source_dir(), 'trackertest')
-
test_env = environment()
-test_env.set('DCONF_PROFILE', dconf_profile_full_path)
-test_env.set('GSETTINGS_SCHEMA_DIR', tracker_uninstalled_gsettings_schema_dir)
tracker_uninstalled_testutils_dir = join_paths(meson.current_source_dir(), '..', '..', 'utils')
test_env.prepend('PYTHONPATH', tracker_uninstalled_testutils_dir)
-test_env.set('TRACKER_DB_ONTOLOGIES_DIR', tracker_uninstalled_nepomuk_ontologies_dir)
-test_env.set('TRACKER_FUNCTIONAL_TEST_BUILD_DIR', build_root)
test_env.set('TRACKER_FUNCTIONAL_TEST_CONFIG', config_json_full_path)
-test_env.set('TRACKER_LANGUAGE_STOP_WORDS_DIR', tracker_uninstalled_stop_words_dir)
-test_env.set('TRACKER_TEST_DOMAIN_ONTOLOGY_RULE', tracker_uninstalled_domain_rule)
foreach t: functional_tests
- test('functional-' + t, test_runner,
- args: './' + t + '.py',
+ file = '@0@.py'.format(t)
+ test('functional-' + t, python,
+ args: [file],
env: test_env,
workdir: meson.current_source_dir(),
timeout: 60)
diff --git a/tests/functional-tests/storetest.py b/tests/functional-tests/storetest.py
index ed7aa82c5..d96294cc4 100644
--- a/tests/functional-tests/storetest.py
+++ b/tests/functional-tests/storetest.py
@@ -19,6 +19,8 @@
#
import os
+import shutil
+import tempfile
import time
import unittest as ut
@@ -35,11 +37,28 @@ class CommonTrackerStoreTest (ut.TestCase):
@classmethod
def setUpClass(self):
- extra_env = {'LC_COLLATE': 'en_GB.utf8'}
+ self.tmpdir = tempfile.mkdtemp(prefix='tracker-test-')
- self.tracker = trackertestutils.helpers.StoreHelper(cfg.TRACKER_STORE_PATH)
- self.tracker.start(extra_env=extra_env)
+ try:
+ extra_env = cfg.test_environment(self.tmpdir)
+ extra_env['LANG'] = 'en_GB.utf8'
+ extra_env['LC_COLLATE'] = 'en_GB.utf8'
+
+ self.sandbox = trackertestutils.helpers.TrackerDBusSandbox(
+ dbus_daemon_config_file=cfg.TEST_DBUS_DAEMON_CONFIG_FILE, extra_env=extra_env)
+ self.sandbox.start()
+
+ self.tracker = trackertestutils.helpers.StoreHelper(
+ self.sandbox.get_connection())
+ self.tracker.start_and_wait_for_ready()
+ self.tracker.start_watching_updates()
+ except Exception as e:
+ shutil.rmtree(self.tmpdir, ignore_errors=True)
+ raise
@classmethod
def tearDownClass(self):
- self.tracker.stop()
+ self.tracker.stop_watching_updates()
+ self.sandbox.stop()
+
+ shutil.rmtree(self.tmpdir, ignore_errors=True)
diff --git a/tests/functional-tests/test-runner.sh.in b/tests/functional-tests/test-runner.sh.in
deleted file mode 100755
index 6e295aec6..000000000
--- a/tests/functional-tests/test-runner.sh.in
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/bin/bash
-#
-# Test runner script for Tracker's functional tests
-
-set -e
-
-export TEMP_DIR=`mktemp --tmpdir -d tracker-test-XXXX`
-
-# We need to use the actual home directory for some tests because
-# Tracker will explicitly ignore files in /tmp ...
-export REAL_HOME=`echo ~`
-
-# ... but /tmp is preferred for test data, to avoid leaving debris
-# in the filesystem
-HOME=$TEMP_DIR
-
-echo "Running $@"
-dbus-run-session --config-file=@abs_top_builddir@/tests/test-bus.conf -- "$@"
-
-rm -R $TEMP_DIR
diff --git a/tests/services/meson.build b/tests/services/meson.build
index 182ba1e74..0223867e9 100644
--- a/tests/services/meson.build
+++ b/tests/services/meson.build
@@ -2,3 +2,5 @@ test_dbus_service_file = configure_file(
input: 'org.freedesktop.Tracker1.service.in',
output: 'org.freedesktop.Tracker1.service',
configuration: conf)
+
+tracker_test_dbus_services_dir = meson.current_build_dir()
diff --git a/tests/test-bus.conf.in b/tests/test-bus.conf.in
index 5b4f51ff9..f2df619e3 100644
--- a/tests/test-bus.conf.in
+++ b/tests/test-bus.conf.in
@@ -7,6 +7,13 @@
<listen>unix:tmpdir=./</listen>
<servicedir>@abs_top_builddir@/tests/services/</servicedir>
+ <standard_session_servicedirs/>
+
+ <!-- This timeout is annoying when the service is being debugged. We rely on
+ the test harness to stop the test and kill the daemon if it does
+ get stuck.
+ -->
+ <limit name="service_start_timeout">1000000</limit>
<policy context="default">
<!-- Allow everything to be sent -->
@@ -16,5 +23,4 @@
<!-- Allow anyone to own anything -->
<allow own="*"/>
</policy>
-
</busconfig>
diff --git a/utils/sandbox/tracker-sandbox.py b/utils/sandbox/tracker-sandbox.py
index cc8ebd786..db24679f3 100755
--- a/utils/sandbox/tracker-sandbox.py
+++ b/utils/sandbox/tracker-sandbox.py
@@ -24,19 +24,21 @@
#
import argparse
+import configparser
import locale
import logging
import os
import shlex
+import shutil
import signal
import subprocess
import sys
-import threading
-
-import configparser
+import tempfile
from gi.repository import GLib
+import trackertestutils.dbusdaemon
+
# Script
script_name = 'tracker-sandbox'
script_version = '1.0'
@@ -84,116 +86,6 @@ log = logging.getLogger('sandbox')
dbuslog = logging.getLogger('dbus')
-# Private DBus daemon
-
-class DBusDaemon:
- """The private D-Bus instance that provides the sandbox's session bus.
-
- We support reading and writing the session information to a file. This
- means that if the user runs two sandbox instances on the same data
- directory at the same time, they will share the same message bus.
- """
-
- def __init__(self, session_file=None):
- self.session_file = session_file
- self.existing_session = False
- self.process = None
-
- try:
- self.address, self.pid = self.read_session_file(session_file)
- self.existing_session = True
- except FileNotFoundError:
- log.debug("No existing D-Bus session file was found.")
-
- self.address = None
- self.pid = None
-
- def get_session_file(self):
- """Returns the path to the session file if we created it, or None."""
- if self.existing_session:
- return None
- return self.session_file
-
- def get_address(self):
- return self.address
-
- @staticmethod
- def read_session_file(session_file):
- with open(session_file, 'r') as f:
- content = f.read()
-
- try:
- address = content.splitlines()[0]
- pid = int(content.splitlines()[1])
- except ValueError:
- raise RuntimeError(f"D-Bus session file {session_file} is not valid. "
- "Remove this file to start a new session.")
-
- return address, pid
-
- @staticmethod
- def write_session_file(session_file, address, pid):
- os.makedirs(os.path.dirname(session_file), exist_ok=True)
-
- content = '%s\n%s' % (address, pid)
- with open(session_file, 'w') as f:
- f.write(content)
-
- def start_if_needed(self):
- if self.existing_session:
- log.debug('Using existing D-Bus session from file "%s" with address "%s"'
- ' with PID %d' % (self.session_file, self.address, self.pid))
- else:
- dbus_command = ['dbus-daemon', '--session', '--print-address=1', '--print-pid=1']
- self.process = subprocess.Popen(dbus_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-
- try:
- self.address = self.process.stdout.readline().strip().decode('ascii')
- self.pid = int(self.process.stdout.readline().strip().decode('ascii'))
- except ValueError:
- error = self.process.stderr.read().strip().decode('unicode-escape')
- raise RuntimeError(f"Failed to start D-Bus daemon.\n{error}")
-
- log.debug("Using new D-Bus session with address '%s' with PID %d",
- self.address, self.pid)
-
- self.write_session_file(self.session_file, self.address, self.pid)
- log.debug("Wrote D-Bus session file at %s", self.session_file)
-
- # We must read from the pipes continuously, otherwise the daemon
- # process will block.
- self._threads=[threading.Thread(target=self.pipe_to_log, args=(self.process.stdout, 'stdout'), daemon=True),
- threading.Thread(target=self.pipe_to_log, args=(self.process.stderr, 'stderr'), daemon=True)]
- self._threads[0].start()
- self._threads[1].start()
-
- def stop(self):
- if self.process:
- log.debug(" Stopping DBus daemon")
- self.process.terminate()
- self.process.wait()
-
- def pipe_to_log(self, pipe, source):
- """This function processes the output from our dbus-daemon instance."""
- while True:
- line_raw = pipe.readline()
-
- if len(line_raw) == 0:
- break
-
- line = line_raw.decode('utf-8').rstrip()
-
- if line.startswith('(tracker-'):
- # We set G_MESSAGES_PREFIXED=all, meaning that all log messages
- # output by Tracker processes have a prefix. Note that
- # g_print() will NOT be captured here.
- dbuslog.info(line)
- else:
- # Log messages from other daemons, including the dbus-daemon
- # itself, go here. Any g_print() messages also end up here.
- dbuslog.debug(line)
-
-
# Environment / Clean up
def environment_unset(dbus):
@@ -226,7 +118,7 @@ def environment_set_and_add_path(env, prefix, suffix):
os.environ[env] = full
-def environment_set(index_location, prefix, verbosity=0):
+def environment_set(index_location, prefix, verbosity=0, dbus_config=None):
# Environment
index_location = os.path.abspath(index_location)
prefix = os.path.abspath(os.path.expanduser(prefix))
@@ -260,8 +152,8 @@ def environment_set(index_location, prefix, verbosity=0):
dbus_session_file = os.path.join(
os.environ['XDG_RUNTIME_DIR'], 'dbus-session')
- dbus = DBusDaemon(dbus_session_file)
- dbus.start_if_needed()
+ dbus = trackertestutils.dbusdaemon.DBusDaemon(dbus_session_file)
+ dbus.start_if_needed(config_file=dbus_config)
# Important, other subprocesses must use our new bus
os.environ['DBUS_SESSION_BUS_ADDRESS'] = dbus.get_address()
@@ -347,6 +239,8 @@ def argument_parser():
"only show messages logged by Tracker daemons.")
parser.add_argument('--debug-sandbox', action='store_true',
help="show debugging info from tracker-sandbox")
+ parser.add_argument('--dbus-config', metavar='FILE',
+ help="use a custom config file for the private D-Bus daemon")
parser.add_argument('-v', '--verbosity', default='0',
choices=['0', '1', '2', '3', 'errors', 'minimal', 'detailed', 'debug'],
help="show debugging info from Tracker processes")
@@ -355,6 +249,9 @@ def argument_parser():
parser.add_argument('-i', '--index', metavar='DIR', type=str,
default=default_index_location, dest='index_location',
help=f"directory to the index (default={default_index_location})")
+ parser.add_argument('--index-tmpdir', action='store_true',
+ help="create index in a temporary directory and "
+ "delete it on exit (useful for automated testing)")
parser.add_argument('command', type=str, nargs='*', help="Command to run inside the shell")
return parser
@@ -405,8 +302,18 @@ if __name__ == "__main__":
verbosity = verbosity_as_int(args.verbosity)
+ index_location = None
+ index_tmpdir = None
+
+ if args.index_location != default_index_location and args.index_tmpdir:
+ raise RuntimeError("The --index-tmpdir flag is enabled, but --index= was also passed.")
+ if args.index_tmpdir:
+ index_location = index_tmpdir = tempfile.mkdtemp(prefix='tracker-sandbox')
+ else:
+ index_location = args.index_location
+
# Set up environment variables and foo needed to get started.
- dbus = environment_set(args.index_location, args.prefix, verbosity)
+ dbus = environment_set(index_location, args.prefix, verbosity, dbus_config=args.dbus_config)
config_set()
link_to_mime_data()
@@ -423,3 +330,5 @@ if __name__ == "__main__":
os.system(shell)
finally:
environment_unset(dbus)
+ if index_tmpdir:
+ shutil.rmtree(index_tmpdir, ignore_errors=True)
diff --git a/utils/trackertestutils/dbusdaemon.py b/utils/trackertestutils/dbusdaemon.py
new file mode 100644
index 000000000..c7e4707f3
--- /dev/null
+++ b/utils/trackertestutils/dbusdaemon.py
@@ -0,0 +1,222 @@
+# Copyright (C) 2018,2019, Sam Thursfield <sam@afuera.me.uk>
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
+# 02110-1301, USA.
+
+
+from gi.repository import Gio
+from gi.repository import GLib
+
+import logging
+import os
+import signal
+import subprocess
+import threading
+
+log = logging.getLogger(__name__)
+dbus_stderr_log = logging.getLogger(__name__ + '.stderr')
+dbus_stdout_log = logging.getLogger(__name__ + '.stdout')
+
+
+class DaemonNotStartedError(Exception):
+ pass
+
+
+class DBusDaemon:
+ """The private D-Bus instance that provides the sandbox's session bus.
+
+ We support reading and writing the session information to a file. This
+ means that if the user runs two sandbox instances on the same data
+ directory at the same time, they will share the same message bus.
+
+ """
+
+ def __init__(self, session_file=None):
+ self.session_file = session_file
+ self.existing_session = False
+ self.process = None
+
+ self.address = None
+ self.pid = None
+
+ self._gdbus_connection = None
+ self._previous_sigterm_handler = None
+
+ self._threads = []
+
+ if session_file:
+ try:
+ self.address, self.pid = self.read_session_file(session_file)
+ self.existing_session = True
+ except FileNotFoundError:
+ log.debug("No existing D-Bus session file was found.")
+
+ def get_session_file(self):
+ """Returns the path to the session file if we created it, or None."""
+ if self.existing_session:
+ return None
+ return self.session_file
+
+ def get_address(self):
+ if self.address is None:
+ raise DaemonNotStartedError()
+ return self.address
+
+ def get_connection(self):
+ if self._gdbus_connection is None:
+ raise DaemonNotStartedError()
+ return self._gdbus_connection
+
+ @staticmethod
+ def read_session_file(session_file):
+ with open(session_file, 'r') as f:
+ content = f.read()
+
+ try:
+ address = content.splitlines()[0]
+ pid = int(content.splitlines()[1])
+ except ValueError:
+ raise RuntimeError(f"D-Bus session file {session_file} is not valid. "
+ "Remove this file to start a new session.")
+
+ return address, pid
+
+ @staticmethod
+ def write_session_file(session_file, address, pid):
+ os.makedirs(os.path.dirname(session_file), exist_ok=True)
+
+ content = '%s\n%s' % (address, pid)
+ with open(session_file, 'w') as f:
+ f.write(content)
+
+ def start_if_needed(self, config_file=None, env=None):
+ if self.existing_session:
+ log.debug('Using existing D-Bus session from file "%s" with address "%s"'
+ ' with PID %d' % (self.session_file, self.address, self.pid))
+ else:
+ dbus_command = ['dbus-daemon', '--print-address=1', '--print-pid=1']
+ if config_file:
+ dbus_command += ['--config-file=' + config_file]
+ else:
+ dbus_command += ['--session']
+ log.debug("Running: %s", dbus_command)
+ self.process = subprocess.Popen(
+ dbus_command, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+
+ self._previous_sigterm_handler = signal.signal(
+ signal.SIGTERM, self._sigterm_handler)
+
+ try:
+ self.address = self.process.stdout.readline().strip().decode('ascii')
+ self.pid = int(self.process.stdout.readline().strip().decode('ascii'))
+ except ValueError:
+ error = self.process.stderr.read().strip().decode('unicode-escape')
+ raise RuntimeError(f"Failed to start D-Bus daemon.\n{error}")
+
+ log.debug("Using new D-Bus session with address '%s' with PID %d",
+ self.address, self.pid)
+
+ if self.session_file:
+ self.write_session_file(self.session_file, self.address, self.pid)
+ log.debug("Wrote D-Bus session file at %s", self.session_file)
+
+ # We must read from the pipes continuously, otherwise the daemon
+ # process will block.
+ self._threads=[threading.Thread(target=self.pipe_to_log, args=(self.process.stdout, dbus_stdout_log), daemon=True),
+ threading.Thread(target=self.pipe_to_log, args=(self.process.stderr, dbus_stdout_log), daemon=True)]
+ self._threads[0].start()
+ self._threads[1].start()
+
+ self._gdbus_connection = Gio.DBusConnection.new_for_address_sync(
+ self.address,
+ Gio.DBusConnectionFlags.AUTHENTICATION_CLIENT |
+ Gio.DBusConnectionFlags.MESSAGE_BUS_CONNECTION, None, None)
+
+ log.debug("Pinging the new D-Bus daemon...")
+ self.ping_sync()
+
+ def stop(self):
+ if self.process:
+ log.debug(" Stopping DBus daemon")
+ self.process.terminate()
+ self.process.wait()
+ self.process = None
+ if len(self._threads) > 0:
+ log.debug(" Stopping %i pipe reader threads", len(self._threads))
+ for thread in self._threads:
+ thread.join()
+ self.threads = []
+ if self._previous_sigterm_handler:
+ signal.signal(signal.SIGTERM, self._previous_sigterm_handler)
+ self._previous_sigterm_handler = None
+
+ def pipe_to_log(self, pipe, dbuslog):
+ """This function processes the output from our dbus-daemon instance."""
+ while True:
+ line_raw = pipe.readline()
+
+ if len(line_raw) == 0:
+ break
+
+ line = line_raw.decode('utf-8').rstrip()
+
+ if line.startswith('(tracker-'):
+ # We set G_MESSAGES_PREFIXED=all, meaning that all log messages
+ # output by Tracker processes have a prefix. Note that
+ # g_print() will NOT be captured here.
+ dbuslog.info(line)
+ else:
+ # Log messages from other daemons, including the dbus-daemon
+ # itself, go here. Any g_print() messages also end up here.
+ dbuslog.debug(line)
+ log.debug("Thread stopped")
+
+ # I'm not sure why this is needed, or if it's correct, but without it
+ # we see warnings like this:
+ #
+ # ResourceWarning: unclosed file <_io.BufferedReader name=3>
+ pipe.close()
+
+ def _sigterm_handler(self, signal, frame):
+ log.info("Received signal %s", signal)
+ self.stop()
+
+ def ping_sync(self):
+ """Call the daemon Ping() method to check that it is alive."""
+ self._gdbus_connection.call_sync(
+ 'org.freedesktop.DBus', '/', 'org.freedesktop.DBus', 'GetId',
+ None, None, Gio.DBusCallFlags.NONE, 10000, None)
+
+ def list_names_sync(self):
+ """Get the name of every client connected to the bus."""
+ conn = self.get_connection()
+ result = conn.call_sync('org.freedesktop.DBus',
+ '/org/freedesktop/DBus',
+ 'org.freedesktop.DBus', 'ListNames', None,
+ GLib.VariantType('(as)'),
+ Gio.DBusCallFlags.NONE, -1, None)
+ return result[0]
+
+ def get_connection_unix_process_id_sync(self, name):
+ """Get the process ID for one of the names connected to the bus."""
+ conn = self.get_connection()
+ result = conn.call_sync('org.freedesktop.DBus',
+ '/org/freedesktop/DBus',
+ 'org.freedesktop.DBus',
+ 'GetConnectionUnixProcessID',
+ GLib.Variant('(s)', [name]),
+ GLib.VariantType('(u)'),
+ Gio.DBusCallFlags.NONE, -1, None)
+ return result[0]
diff --git a/utils/trackertestutils/dconf.py b/utils/trackertestutils/dconf.py
index 4ad0e88e9..fe6d981fb 100644
--- a/utils/trackertestutils/dconf.py
+++ b/utils/trackertestutils/dconf.py
@@ -18,11 +18,9 @@
# 02110-1301, USA.
#
-from gi.repository import GLib
-from gi.repository import Gio
-
import logging
import os
+import subprocess
log = logging.getLogger(__name__)
@@ -36,28 +34,23 @@ class DConfClient(object):
this reason, and the constructor will fail if this isn't the profile in
use, to avoid any risk of modifying or removing your real configuration.
- The constructor will fail if DConf is not the default backend, because this
- probably indicates that the memory backend is in use. Without DConf the
- required configuration changes will not take effect, causing many tests to
- break.
+ We use the `gsettings` binary rather than using the Gio.Settings API.
+ This is to avoid the need to set DCONF_PROFILE in our own process
+ environment.
"""
- def __init__(self, schema):
- self._settings = Gio.Settings.new(schema)
-
- backend = self._settings.get_property('backend')
- self._check_settings_backend_is_dconf(backend)
- self._check_using_correct_dconf_profile()
-
- def _check_settings_backend_is_dconf(self, backend):
- typename = type(backend).__name__.split('.')[-1]
- if typename != 'DConfSettingsBackend':
- raise Exception(
- "The functional tests require DConf to be the default "
- "GSettings backend. Got %s instead." % typename)
+ def __init__(self, sandbox):
+ self.env = os.environ
+ self.env.update(sandbox.extra_env)
+ self.env['DBUS_SESSION_BUS_ADDRESS'] = sandbox.daemon.get_address()
def _check_using_correct_dconf_profile(self):
- profile = os.environ["DCONF_PROFILE"]
+ profile = self.env.get("DCONF_PROFILE")
+ if not profile:
+ raise Exception(
+ "DCONF_PROFILE is not set in the environment. This class must "
+ "be created inside a TrackerDBussandbox to avoid risk of "
+ "interfering with real settings.")
if not os.path.exists(profile):
raise Exception(
"Unable to find DConf profile '%s'. Check that Tracker and "
@@ -66,35 +59,11 @@ class DConfClient(object):
assert os.path.basename(profile) == "trackertest"
- def write(self, key, value):
+ def write(self, schema, key, value):
"""
Write a settings value.
"""
- self._settings.set_value(key, value)
-
- def read(self, schema, key):
- """
- Read a settings value.
- """
- return self._settings.get_value(key)
-
- def reset(self):
- """
- Remove all stored values, resetting configuration to the default.
-
- This can be done by removing the entire 'trackertest' configuration
- database.
- """
-
- self._check_using_correct_dconf_profile()
-
- # XDG_CONFIG_HOME is useless, so we use HOME. This code should not be
- # needed unless for some reason the test is not being run via the
- # 'test-runner.sh' script.
- dconf_db = os.path.join(os.environ["HOME"],
- ".config",
- "dconf",
- "trackertest")
- if os.path.exists(dconf_db):
- log.debug("[Conf] Removing dconf database: %s", dconf_db)
- os.remove(dconf_db)
+ subprocess.run(['gsettings', 'set', schema, key, value.print_(False)],
+ env=self.env,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
diff --git a/utils/trackertestutils/helpers.py b/utils/trackertestutils/helpers.py
index e6219be3c..ef450ee05 100644
--- a/utils/trackertestutils/helpers.py
+++ b/utils/trackertestutils/helpers.py
@@ -24,9 +24,12 @@ from gi.repository import GLib
import atexit
import logging
import os
-import subprocess
+import signal
+from . import dbusdaemon
from . import mainloop
+from . import psutil_mini as psutil
+
log = logging.getLogger(__name__)
@@ -54,172 +57,9 @@ def _cleanup_processes():
atexit.register(_cleanup_processes)
-class Helper:
+class StoreHelper():
"""
- Abstract helper for Tracker processes. Launches the process
- and waits for it to appear on the session bus.
-
- The helper will fail if the process is already running. Use
- test-runner.sh to ensure the processes run inside a separate DBus
- session bus.
-
- The process is watched using a timed GLib main loop source. If the process
- exits with an error code, the test will abort the next time the main loop
- is entered (or straight away if currently running the main loop).
- """
-
- STARTUP_TIMEOUT = 200 # milliseconds
- SHUTDOWN_TIMEOUT = 200 #
-
- def __init__(self, helper_name, bus_name, process_path):
- self.name = helper_name
- self.bus_name = bus_name
- self.process_path = process_path
-
- self.log = logging.getLogger(f'{__name__}.{self.name}')
-
- self.process = None
- self.available = False
-
- self.loop = mainloop.MainLoop()
-
- self.bus = Gio.bus_get_sync(Gio.BusType.SESSION, None)
-
- def _start_process(self, command_args=None, extra_env=None):
- global _process_list
- _process_list.append(self)
-
- command = [self.process_path] + (command_args or [])
- self.log.debug("Starting %s.", ' '.join(command))
-
- env = os.environ
- if extra_env:
- self.log.debug(" starting with extra environment: %s", extra_env)
- env.update(extra_env)
-
- try:
- return subprocess.Popen(command, env=env)
- except OSError as e:
- raise RuntimeError("Error starting %s: %s" % (self.process_path, e))
-
- def _bus_name_appeared(self, connection, name, owner):
- self.log.debug("%s appeared on the message bus, owned by %s", name, owner)
- self.available = True
- self.loop.quit()
-
- def _bus_name_vanished(self, connection, name):
- self.log.debug("%s vanished from the message bus", name)
- self.available = False
- self.loop.quit()
-
- def _process_watch_cb(self):
- if self.process_watch_timeout == 0:
- # GLib seems to call the timeout after we've removed it
- # sometimes, which causes errors unless we detect it.
- return False
-
- status = self.process.poll()
-
- if status is None:
- return True # continue
- elif status == 0 and not self.abort_if_process_exits_with_status_0:
- return True # continue
- else:
- self.process_watch_timeout = 0
- raise RuntimeError(f"{self.name} exited with status: {self.status}")
-
- def _process_startup_timeout_cb(self):
- self.log.debug(f"Process timeout of {self.STARTUP_TIMEOUT}ms was called")
- self.loop.quit()
- self.timeout_id = None
- return False
-
- def start(self, command_args=None, extra_env=None):
- """
- Start an instance of process and wait for it to appear on the bus.
- """
- if self.process is not None:
- raise RuntimeError("%s: already started" % self.name)
-
- self._bus_name_watch_id = Gio.bus_watch_name_on_connection(
- self.bus, self.bus_name, Gio.BusNameWatcherFlags.NONE,
- self._bus_name_appeared, self._bus_name_vanished)
-
- # We expect the _bus_name_vanished callback to be called here,
- # causing the loop to exit again.
- self.loop.run_checked()
-
- if self.available:
- # It's running, but we didn't start it...
- raise RuntimeError("Unable to start test instance of %s: "
- "already running" % self.name)
-
- self.process = self._start_process(command_args=command_args,
- extra_env=extra_env)
- self.log.debug('Started with PID %i', self.process.pid)
-
- self.process_startup_timeout = GLib.timeout_add(
- self.STARTUP_TIMEOUT, self._process_startup_timeout_cb)
-
- self.abort_if_process_exits_with_status_0 = True
-
- # Run the loop until the bus name appears, or the process dies.
- self.loop.run_checked()
-
- self.abort_if_process_exits_with_status_0 = False
-
- def stop(self):
- global _process_list
-
- if self.process is None:
- # Seems that it didn't even start...
- return
-
- if self.process_startup_timeout != 0:
- GLib.source_remove(self.process_startup_timeout)
- self.process_startup_timeout = 0
-
- if self.process.poll() == None:
- self.process.terminate()
- returncode = self.process.wait(timeout=self.SHUTDOWN_TIMEOUT * 1000)
- if returncode is None:
- self.log.debug("Process failed to terminate in time, sending kill!")
- self.process.kill()
- self.process.wait()
- elif returncode > 0:
- self.log.warn("Process returned error code %s", returncode)
-
- self.log.debug("Process stopped.")
-
- # Run the loop to handle the expected name_vanished signal.
- self.loop.run_checked()
- Gio.bus_unwatch_name(self._bus_name_watch_id)
-
- self.process = None
- _process_list.remove(self)
-
- def kill(self):
- global _process_list
-
- if self.process_watch_timeout != 0:
- GLib.source_remove(self.process_watch_timeout)
- self.process_watch_timeout = 0
-
- self.process.kill()
-
- # Name owner changed callback should take us out from this loop
- self.loop.run_checked()
- Gio.bus_unwatch_name(self._bus_name_watch_id)
-
- self.process = None
- _process_list.remove(self)
-
- self.log.debug("Process killed.")
-
-
-class StoreHelper (Helper):
- """
- Helper for starting and testing the tracker-store daemon.
+ Helper for testing the tracker-store daemon.
"""
TRACKER_BUSNAME = 'org.freedesktop.Tracker1'
@@ -235,32 +75,41 @@ class StoreHelper (Helper):
TRACKER_STATUS_OBJ_PATH = "/org/freedesktop/Tracker1/Status"
STATUS_IFACE = "org.freedesktop.Tracker1.Status"
- def __init__(self, process_path):
- Helper.__init__(self, "tracker-store", self.TRACKER_BUSNAME, process_path)
+ def __init__(self, dbus_connection):
+ self.log = logging.getLogger(__name__)
+ self.loop = mainloop.MainLoop()
- def start(self, command_args=None, extra_env=None):
- Helper.start(self, command_args, extra_env)
+ self.bus = dbus_connection
+ self.graph_updated_handler_id = 0
self.resources = Gio.DBusProxy.new_sync(
- self.bus, Gio.DBusProxyFlags.DO_NOT_AUTO_START, None,
+ self.bus, Gio.DBusProxyFlags.DO_NOT_AUTO_START_AT_CONSTRUCTION, None,
self.TRACKER_BUSNAME, self.TRACKER_OBJ_PATH, self.RESOURCES_IFACE)
self.backup_iface = Gio.DBusProxy.new_sync(
- self.bus, Gio.DBusProxyFlags.DO_NOT_AUTO_START, None,
+ self.bus, Gio.DBusProxyFlags.DO_NOT_AUTO_START_AT_CONSTRUCTION, None,
self.TRACKER_BUSNAME, self.TRACKER_BACKUP_OBJ_PATH, self.BACKUP_IFACE)
self.stats_iface = Gio.DBusProxy.new_sync(
- self.bus, Gio.DBusProxyFlags.DO_NOT_AUTO_START, None,
+ self.bus, Gio.DBusProxyFlags.DO_NOT_AUTO_START_AT_CONSTRUCTION, None,
self.TRACKER_BUSNAME, self.TRACKER_STATS_OBJ_PATH, self.STATS_IFACE)
self.status_iface = Gio.DBusProxy.new_sync(
- self.bus, Gio.DBusProxyFlags.DO_NOT_AUTO_START, None,
+ self.bus, Gio.DBusProxyFlags.DO_NOT_AUTO_START_AT_CONSTRUCTION, None,
self.TRACKER_BUSNAME, self.TRACKER_STATUS_OBJ_PATH, self.STATUS_IFACE)
+ def start_and_wait_for_ready(self):
+ # The daemon is autostarted as soon as a method is called.
+ #
+ # We set a big timeout to avoid interfering when a daemon is being
+ # interactively debugged.
self.log.debug("Calling %s.Wait() method", self.STATUS_IFACE)
- self.status_iface.Wait()
+ self.status_iface.call_sync('Wait', None, Gio.DBusCallFlags.NONE, 1000000, None)
self.log.debug("Ready")
+ def start_watching_updates(self):
+ assert self.graph_updated_handler_id == 0
+
self.reset_graph_updates_tracking()
def signal_handler(proxy, sender_name, signal_name, parameters):
@@ -269,12 +118,13 @@ class StoreHelper (Helper):
self.graph_updated_handler_id = self.resources.connect(
'g-signal', signal_handler)
+ self.log.debug("Watching for updates from Resources interface")
- def stop(self):
- Helper.stop(self)
-
+ def stop_watching_updates(self):
if self.graph_updated_handler_id != 0:
+ self.log.debug("No longer watching for updates from Resources interface")
self.resources.disconnect(self.graph_updated_handler_id)
+ self.graph_updated_handler_id = 0
# A system to follow GraphUpdated and make sure all changes are tracked.
# This code saves every change notification received, and exposes methods
@@ -329,6 +179,7 @@ class StoreHelper (Helper):
"""
assert (self.inserts_match_function == None)
assert (self.class_to_track == None), "Already waiting for resource of type %s" % self.class_to_track
+ assert (self.graph_updated_handler_id != 0), "You must call start_watching_updates() first."
self.class_to_track = rdf_class
@@ -413,6 +264,7 @@ class StoreHelper (Helper):
"""
assert (self.deletes_match_function == None)
assert (self.class_to_track == None)
+ assert (self.graph_updated_handler_id != 0), "You must call start_watching_updates() first."
def find_resource_deletion(deletes_list):
self.log.debug("find_resource_deletion: looking for %i in %s", id, deletes_list)
@@ -444,8 +296,8 @@ class StoreHelper (Helper):
# Run the event loop until the correct notification arrives
try:
self.loop.run_checked()
- except GraphUpdateTimeoutException:
- raise GraphUpdateTimeoutException("Resource %i has not been deleted." % id)
+ except GraphUpdateTimeoutException as e:
+ raise GraphUpdateTimeoutException("Resource %i has not been deleted." % id) from e
self.deletes_match_function = None
self.class_to_track = None
@@ -458,6 +310,7 @@ class StoreHelper (Helper):
assert (self.inserts_match_function == None)
assert (self.deletes_match_function == None)
assert (self.class_to_track == None)
+ assert (self.graph_updated_handler_id != 0), "You must call start_watching_updates() first."
self.log.debug("Await change to %i %s (%i, %i existing)", subject_id, property_uri, len(self.inserts_list), len(self.deletes_list))
@@ -505,14 +358,14 @@ class StoreHelper (Helper):
# is useful for testing this API surface, but we recommand that all regular
# applications use libtracker-sparql library to talk to the database.
- def query(self, query, timeout=5000, **kwargs):
- return self.resources.SparqlQuery('(s)', query, timeout=timeout, **kwargs)
+ def query(self, query, **kwargs):
+ return self.resources.SparqlQuery('(s)', query, **kwargs)
- def update(self, update_sparql, timeout=5000, **kwargs):
- return self.resources.SparqlUpdate('(s)', update_sparql, timeout=timeout, **kwargs)
+ def update(self, update_sparql, **kwargs):
+ return self.resources.SparqlUpdate('(s)', update_sparql, **kwargs)
- def load(self, ttl_uri, timeout=5000, **kwargs):
- return self.resources.Load('(s)', ttl_uri, timeout=timeout, **kwargs)
+ def load(self, ttl_uri, **kwargs):
+ return self.resources.Load('(s)', ttl_uri, **kwargs)
def batch_update(self, update_sparql, **kwargs):
return self.resources.BatchSparqlUpdate('(s)', update_sparql, **kwargs)
@@ -582,3 +435,61 @@ class StoreHelper (Helper):
return False
else:
raise Exception("Something fishy is going on")
+
+
+class TrackerDBusSandbox:
+ """
+ Private D-Bus session bus which executes a sandboxed Tracker instance.
+
+ """
+ def __init__(self, dbus_daemon_config_file, extra_env=None):
+ self.dbus_daemon_config_file = dbus_daemon_config_file
+ self.extra_env = extra_env or {}
+
+ self.daemon = dbusdaemon.DBusDaemon()
+
+ def start(self):
+ env = os.environ
+ env.update(self.extra_env)
+ env['G_MESSAGES_PREFIXED'] = 'all'
+
+ # Precreate runtime dir, to avoid this warning from dbus-daemon:
+ #
+ # Unable to set up transient service directory: XDG_RUNTIME_DIR "/home/sam/tracker-tests/tmp_59i3ev1/run" not available: No such file or directory
+ #
+ xdg_runtime_dir = env.get('XDG_RUNTIME_DIR')
+ if xdg_runtime_dir:
+ os.makedirs(xdg_runtime_dir, exist_ok=True)
+
+ log.info("Starting D-Bus daemon for sandbox.")
+ log.debug("Added environment variables: %s", self.extra_env)
+ self.daemon.start_if_needed(self.dbus_daemon_config_file, env=env)
+
+ def stop(self):
+ tracker_processes = []
+
+ log.info("Looking for active Tracker processes on the bus")
+ for busname in self.daemon.list_names_sync():
+ if busname.startswith('org.freedesktop.Tracker1'):
+ pid = self.daemon.get_connection_unix_process_id_sync(busname)
+ tracker_processes.append(pid)
+
+ log.info("Terminating %i Tracker processes", len(tracker_processes))
+ for pid in tracker_processes:
+ os.kill(pid, signal.SIGTERM)
+
+ log.info("Waiting for %i Tracker processes", len(tracker_processes))
+ for pid in tracker_processes:
+ psutil.wait_pid(pid)
+
+ # We need to wait until Tracker processes have stopped before we
+ # terminate the D-Bus daemon, otherwise lots of criticals like this
+ # appear in the log output:
+ #
+ # (tracker-miner-fs:14955): GLib-GIO-CRITICAL **: 11:38:40.386: Error while sending AddMatch() message: The connection is closed
+
+ log.info("Stopping D-Bus daemon for sandbox.")
+ self.daemon.stop()
+
+ def get_connection(self):
+ return self.daemon.get_connection()
diff --git a/utils/trackertestutils/meson.build b/utils/trackertestutils/meson.build
index 99573e323..e8ab94c72 100644
--- a/utils/trackertestutils/meson.build
+++ b/utils/trackertestutils/meson.build
@@ -1,8 +1,10 @@
sources = [
'__init__.py',
+ 'dbusdaemon.py',
'dconf.py',
'helpers.py',
- 'mainloop.py'
+ 'mainloop.py',
+ 'psutil_mini.py',
]
install_data(sources,
diff --git a/utils/trackertestutils/psutil_mini.py b/utils/trackertestutils/psutil_mini.py
new file mode 100644
index 000000000..d0c93565d
--- /dev/null
+++ b/utils/trackertestutils/psutil_mini.py
@@ -0,0 +1,98 @@
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+#
+# Use of this source code is governed by a BSD-style license that can be
+# found at https://github.com/giampaolo/psutil/blob/master/LICENSE
+#
+# Taken from https://github.com/giampaolo/psutil/blob/master/psutil/_psposix.py
+# by Sam Thursfield to avoid adding a dependency between the Tracker testsuite
+# and the 'psutil' module.
+
+
+import os
+import time
+
+
+class TimeoutExpired(Exception):
+ pass
+
+
+def pid_exists(pid):
+ """Check whether pid exists in the current process table."""
+ if pid == 0:
+ # According to "man 2 kill" PID 0 has a special meaning:
+ # it refers to <<every process in the process group of the
+ # calling process>> so we don't want to go any further.
+ # If we get here it means this UNIX platform *does* have
+ # a process with id 0.
+ return True
+ try:
+ os.kill(pid, 0)
+ except ProcessLookupError:
+ return False
+ except PermissionError:
+ # EPERM clearly means there's a process to deny access to
+ return True
+ # According to "man 2 kill" possible error values are
+ # (EINVAL, EPERM, ESRCH)
+ else:
+ return True
+
+
+def wait_pid(pid, timeout=None, proc_name=None):
+ """Wait for process with pid 'pid' to terminate and return its
+ exit status code as an integer.
+ If pid is not a children of os.getpid() (current process) just
+ waits until the process disappears and return None.
+ If pid does not exist at all return None immediately.
+ Raise TimeoutExpired on timeout expired.
+ """
+ def check_timeout(delay):
+ if timeout is not None:
+ if timer() >= stop_at:
+ raise TimeoutExpired(timeout, pid=pid, name=proc_name)
+ time.sleep(delay)
+ return min(delay * 2, 0.04)
+
+ timer = getattr(time, 'monotonic', time.time)
+ if timeout is not None:
+ def waitcall():
+ return os.waitpid(pid, os.WNOHANG)
+ stop_at = timer() + timeout
+ else:
+ def waitcall():
+ return os.waitpid(pid, 0)
+
+ delay = 0.0001
+ while True:
+ try:
+ retpid, status = waitcall()
+ except InterruptedError:
+ delay = check_timeout(delay)
+ except ChildProcessError:
+ # This has two meanings:
+ # - pid is not a child of os.getpid() in which case
+ # we keep polling until it's gone
+ # - pid never existed in the first place
+ # In both cases we'll eventually return None as we
+ # can't determine its exit status code.
+ while True:
+ if pid_exists(pid):
+ delay = check_timeout(delay)
+ else:
+ return
+ else:
+ if retpid == 0:
+ # WNOHANG was used, pid is still running
+ delay = check_timeout(delay)
+ continue
+ # process exited due to a signal; return the integer of
+ # that signal
+ if os.WIFSIGNALED(status):
+ return -os.WTERMSIG(status)
+ # process exited using exit(2) system call; return the
+ # integer exit(2) system call has been called with
+ elif os.WIFEXITED(status):
+ return os.WEXITSTATUS(status)
+ else:
+ # should never happen
+ raise ValueError("unknown process exit status %r" % status)