summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorNicholas Car <nicholas.car@surroundaustralia.com>2021-07-02 23:49:46 +1000
committerGitHub <noreply@github.com>2021-07-02 23:49:46 +1000
commitc59de84101830c3deced0032af1d5ae735728f1d (patch)
treec801a876e90b249440d338279d6b58629056a665
parente64a9fdcf31442265bdbecc5a56f933d68983d20 (diff)
parent2bedfbb38eef48666fd386ced19f6442db9eb5d2 (diff)
downloadrdflib-c59de84101830c3deced0032af1d5ae735728f1d.tar.gz
Merge branch 'master' into docco_clean
-rw-r--r--CHANGELOG.md12
-rw-r--r--docs/apidocs/examples.rst4
-rw-r--r--docs/intro_to_sparql.rst9
-rw-r--r--docs/persistence.rst25
-rw-r--r--docs/plugin_stores.rst2
-rw-r--r--examples/berkeleydb_example.py134
-rw-r--r--examples/film.py1
-rw-r--r--examples/simple_example.py10
-rw-r--r--examples/sleepycat_example.py57
-rw-r--r--rdflib/plugin.py2
-rw-r--r--rdflib/plugins/stores/berkeleydb.py (renamed from rdflib/plugins/stores/sleepycat.py)44
-rw-r--r--requirements.dev.txt1
-rw-r--r--test/test_store_berkeleydb.py114
13 files changed, 319 insertions, 96 deletions
diff --git a/CHANGELOG.md b/CHANGELOG.md
index a72b4d66..678db1a4 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -978,7 +978,7 @@ Fixed a range of minor issues:
* http://code.google.com/p/rdflib/issues/detail?id=149
- Sleepycat Store broken with create=False
+ BerkeleyDB Store broken with create=False
* http://code.google.com/p/rdflib/issues/detail?id=134
@@ -1228,7 +1228,7 @@ Fixed conversion of (exiplicit) MySQL ports to integers.
Fixed MySQL store so it properly calculates ```__len__``` of
individual Graphs
-Aligned with how Sleepycat is generating events (remove events
+Aligned with how BerkeleyDB is generating events (remove events
are expressed in terms of interned strings)
Added code to catch unpickling related exceptions
@@ -1248,7 +1248,7 @@ TripleRemoved.
Added Journal Reader and Writer.
-Removed Sleepycat level journaling.
+Removed BerkeleyDB level journaling.
Added support for triple quoted Literal's.
@@ -1329,7 +1329,7 @@ argument to util's date_time method.
Fixed a relativize bug in the rdf/xml serializer.
Fixed NameError: global name 'URIRef' is not defined error in
-Sleepycat.py by adding missing import.
+BerkeleyDB.py by adding missing import.
Applied patch for Seq to sort list by integer, added by Drew
Hess.
@@ -1360,7 +1360,7 @@ Added N3 support to Graph and Store.
Added Sean's n3p parser, and ntriples parser.
-Sleepycat implementation has been revamped in the process of
+BerkeleyDB implementation has been revamped in the process of
expanding it to support the new requirements n3
requirements. It also now persists a journal -- more to come.
@@ -1390,7 +1390,7 @@ it provides Atomicity in the best case scenario.
2005/10/10 RELEASE 2.2.3
========================
-Fixed Sleepycat backend to commit after an add and
+Fixed BerkeleyDB backend to commit after an add and
remove. This should help just a bit with those unclean
shutdowns ;)
diff --git a/docs/apidocs/examples.rst b/docs/apidocs/examples.rst
index f386408b..84a9bee9 100644
--- a/docs/apidocs/examples.rst
+++ b/docs/apidocs/examples.rst
@@ -58,10 +58,10 @@ These examples all live in ``./examples`` in the source-distribution of RDFLib.
:undoc-members:
:show-inheritance:
-:mod:`sleepycat_example` Module
+:mod:`berkeleydb_example` Module
--------------------------------
-.. automodule:: examples.sleepycat_example
+.. automodule:: examples.berkeleydb_example
:members:
:undoc-members:
:show-inheritance:
diff --git a/docs/intro_to_sparql.rst b/docs/intro_to_sparql.rst
index e80cae58..ef7aab78 100644
--- a/docs/intro_to_sparql.rst
+++ b/docs/intro_to_sparql.rst
@@ -16,8 +16,8 @@ Queries can be evaluated against a graph with the
:meth:`rdflib.graph.Graph.query` method, and updates with
:meth:`rdflib.graph.Graph.update`.
-A query method returns a :class:`rdflib.query.Result` instance. For
-``SELECT`` queries, iterating over this returns
+The query method returns a :class:`rdflib.query.Result` instance. For
+SELECT queries, iterating over this returns
:class:`rdflib.query.ResultRow` instances, each containing a set of
variable bindings. For ``CONSTRUCT``/``DESCRIBE`` queries, iterating over the
result object gives the triples. For ``ASK`` queries, iterating will yield
@@ -43,6 +43,8 @@ For example...
for row in qres:
print(f"{row.aname} knows {row.bname}")
+
+
The results are tuples of values in the same order as your ``SELECT``
arguments. Alternatively, the values can be accessed by variable
name, either as attributes, or as items, e.g. ``row.b`` and ``row["b"]`` are
@@ -122,6 +124,7 @@ example:
# y: a d:
+
Querying a Remote Service
^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -147,6 +150,8 @@ The ``SERVICE`` keyword of SPARQL 1.1 can send a query to a remote SPARQL endpoi
for row in qres:
print(row.s)
+
+
This example sends a query to `DBPedia <https://dbpedia.org/>`_'s SPARQL endpoint service so that it can run the query
and then send back the result:
diff --git a/docs/persistence.rst b/docs/persistence.rst
index 6b373580..43c56176 100644
--- a/docs/persistence.rst
+++ b/docs/persistence.rst
@@ -19,10 +19,10 @@ this API for a different store.
Stores currently shipped with core RDFLib
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-* :class:`Memory <rdflib.plugins.stores.memory.Memory>` (not persistent!)
-* :class:`~rdflib.plugins.stores.sleepycat.Sleepycat` (on disk persistence via Python's :ref:`bsddb` or :ref:`bsddb3` packages)
-* :class:`~rdflib.plugins.stores.sparqlstore.SPARQLStore` - a read-only wrapper around a remote SPARQL Query endpoint.
-* :class:`~rdflib.plugins.stores.sparqlstore.SPARQLUpdateStore` - a read-write wrapper around a remote SPARQL query/update endpoint pair.
+* :class:`Memory <rdflib.plugins.stores.memory.Memory>` - not persistent!
+* :class:`~rdflib.plugins.stores.berkeleydb.BerkeleyDB` - on disk persistence via Python's `berkeleydb package <https://pypi.org/project/berkeleydb/>`_
+* :class:`~rdflib.plugins.stores.sparqlstore.SPARQLStore` - a read-only wrapper around a remote SPARQL Query endpoint
+* :class:`~rdflib.plugins.stores.sparqlstore.SPARQLUpdateStore` - a read-write wrapper around a remote SPARQL query/update endpoint pair
Usage
^^^^^
@@ -33,7 +33,7 @@ In most cases, passing the name of the store to the Graph constructor is enough:
from rdflib import Graph
- graph = Graph(store='Sleepycat')
+ graph = Graph(store='BerkeleyDB')
Most stores offering on-disk persistence will need to be opened before reading or writing.
@@ -42,13 +42,20 @@ an identifier with which you can open the graph:
.. code-block:: python
- graph = Graph('Sleepycat', identifier='mygraph')
+ graph = Graph('BerkeleyDB', identifier='mygraph')
# first time create the store:
- graph.open('/home/user/data/myRDFLibStore', create = True)
+ graph.open('/home/user/data/myRDFLibStore', create=True)
# work with the graph:
- graph.add( mytriples )
+ data = """
+ PREFIX : <https://example.org/>
+
+ :a :b :c .
+ :d :e :f .
+ :d :g :h .
+ """
+ graph.parse(data=data, format="ttl")
# when done!
graph.close()
@@ -70,5 +77,5 @@ More store implementations are available in RDFLib extension projects:
Example
^^^^^^^
-* :mod:`examples.sleepycat_example` contains an example for using a Sleepycat store.
+* :mod:`examples.berkeleydb_example` contains an example for using a BerkeleyDB store.
* :mod:`examples.sparqlstore_example` contains an example for using a SPARQLStore.
diff --git a/docs/plugin_stores.rst b/docs/plugin_stores.rst
index a936c54e..8fd511d3 100644
--- a/docs/plugin_stores.rst
+++ b/docs/plugin_stores.rst
@@ -14,6 +14,6 @@ SimpleMemory :class:`~rdflib.plugins.stores.memory.SimpleMemory`
Memory :class:`~rdflib.plugins.stores.memory.Memory`
SPARQLStore :class:`~rdflib.plugins.stores.sparqlstore.SPARQLStore`
SPARQLUpdateStore :class:`~rdflib.plugins.stores.sparqlstore.SPARQLUpdateStore`
-Sleepycat :class:`~rdflib.plugins.stores.sleepycat.Sleepycat`
+BerkeleyDB :class:`~rdflib.plugins.stores.berkeleydb.BerkeleyDB`
default :class:`~rdflib.plugins.stores.memory.Memory`
================= ============================================================
diff --git a/examples/berkeleydb_example.py b/examples/berkeleydb_example.py
new file mode 100644
index 00000000..d50352b1
--- /dev/null
+++ b/examples/berkeleydb_example.py
@@ -0,0 +1,134 @@
+"""
+BerkeleyDB in use as a persistent Graph store.
+
+Example 1: simple actions
+
+* creating a ConjunctiveGraph using the BerkeleyDB Store
+* adding triples to it
+* counting them
+* closing the store, emptying the graph
+* re-opening the store using the same DB files
+* getting the same count of triples as before
+
+Example 2: larger data
+
+* loads multiple graphs downloaded from GitHub into a BerkeleyDB-baked graph stored in the folder gsq_vocabs.
+* does not delete the DB at the end so you can see it on disk
+"""
+import os
+from rdflib import ConjunctiveGraph, Namespace, Literal
+from rdflib.store import NO_STORE, VALID_STORE
+from tempfile import mktemp
+
+
+def example_1():
+ """Creates a ConjunctiveGraph and performs some BerkeleyDB tasks with it
+ """
+ path = mktemp()
+
+ # Declare we are using a BerkeleyDB Store
+ graph = ConjunctiveGraph("BerkeleyDB")
+
+ # Open previously created store, or create it if it doesn't exist yet
+ # (always doesn't exist in this example as using temp file location)
+ rt = graph.open(path, create=False)
+
+ if rt == NO_STORE:
+ # There is no underlying BerkeleyDB infrastructure, so create it
+ print("Creating new DB")
+ graph.open(path, create=True)
+ else:
+ print("Using existing DB")
+ assert rt == VALID_STORE, "The underlying store is corrupt"
+
+ print("Triples in graph before add:", len(graph))
+ print("(will always be 0 when using temp file for DB)")
+
+ # Now we'll add some triples to the graph & commit the changes
+ EG = Namespace("http://example.net/test/")
+ graph.bind("eg", EG)
+
+ graph.add((EG["pic:1"], EG.name, Literal("Jane & Bob")))
+ graph.add((EG["pic:2"], EG.name, Literal("Squirrel in Tree")))
+
+ graph.commit()
+
+ print("Triples in graph after add:", len(graph))
+ print("(should be 2)")
+
+ # display the graph in Turtle
+ print(graph.serialize())
+
+ # close when done, otherwise BerkeleyDB will leak lock entries.
+ graph.close()
+
+ graph = None
+
+ # reopen the graph
+ graph = ConjunctiveGraph("BerkeleyDB")
+
+ graph.open(path, create=False)
+
+ print("Triples still in graph:", len(graph))
+ print("(should still be 2)")
+
+ graph.close()
+
+ # Clean up the temp folder to remove the BerkeleyDB database files...
+ for f in os.listdir(path):
+ os.unlink(path + "/" + f)
+ os.rmdir(path)
+
+
+def example_2():
+ """Loads a number of SKOS vocabularies from GitHub into a BerkeleyDB-backed graph stored in the local folder
+ 'gsq_vocabs'
+
+ Should print out the number of triples after each load, e.g.:
+ 177
+ 248
+ 289
+ 379
+ 421
+ 628
+ 764
+ 813
+ 965
+ 1381
+ 9666
+ 9719
+ ...
+ """
+ from urllib.request import urlopen, Request
+ from urllib.error import HTTPError
+ import json
+ import base64
+
+ g = ConjunctiveGraph("BerkeleyDB")
+ g.open("gsg_vocabs", create=True)
+
+ # gsq_vocabs = "https://api.github.com/repos/geological-survey-of-queensland/vocabularies/git/trees/master"
+ gsq_vocabs = "https://api.github.com/repos/geological-survey-of-queensland/vocabularies/git/trees/cd7244d39337c1f4ef164b1cf1ea1f540a7277db"
+ try:
+ res = urlopen(Request(gsq_vocabs, headers={"Accept": "application/json"}))
+ except HTTPError as e:
+ return e.code, str(e), None
+
+ data = res.read()
+ encoding = res.info().get_content_charset('utf-8')
+ j = json.loads(data.decode(encoding))
+ for v in j["tree"]:
+ # process the element in GitHub result if it's a Turtle file
+ if v["path"].endswith(".ttl"):
+ # for each file, call it by URL, decode it and parse it into the graph
+ r = urlopen(v['url'])
+ content = json.loads(r.read().decode())["content"]
+ g.parse(data=base64.b64decode(content).decode(), format="turtle")
+ print(len(g))
+
+ print("loading complete")
+
+
+if __name__ == "__main__":
+ example_1()
+ example_2()
diff --git a/examples/film.py b/examples/film.py
index 5582326b..a23a3c0f 100644
--- a/examples/film.py
+++ b/examples/film.py
@@ -1,7 +1,6 @@
#!/usr/bin/env python
"""
film.py: a simple tool to manage your movies reviews
-
Simon Rozet, http://atonie.org/
- manage directors and writers
diff --git a/examples/simple_example.py b/examples/simple_example.py
index 077382a3..49f08408 100644
--- a/examples/simple_example.py
+++ b/examples/simple_example.py
@@ -37,16 +37,16 @@ if __name__ == "__main__":
print()
print("RDF Serializations:")
+ # Serialize as Turtle (default)
+ print("--- start: turtle ---")
+ print(store.serialize())
+ print("--- end: turtle ---\n")
+
# Serialize as XML
print("--- start: rdf-xml ---")
print(store.serialize(format="pretty-xml"))
print("--- end: rdf-xml ---\n")
- # Serialize as Turtle
- print("--- start: turtle ---")
- print(store.serialize(format="turtle"))
- print("--- end: turtle ---\n")
-
# Serialize as NTriples
print("--- start: ntriples ---")
print(store.serialize(format="nt"))
diff --git a/examples/sleepycat_example.py b/examples/sleepycat_example.py
deleted file mode 100644
index 484484b9..00000000
--- a/examples/sleepycat_example.py
+++ /dev/null
@@ -1,57 +0,0 @@
-"""
-A simple example showing how to use a Sleepycat store to do on-disk
-persistence.
-"""
-
-from rdflib import ConjunctiveGraph, Namespace, Literal
-from rdflib.store import NO_STORE, VALID_STORE
-
-from tempfile import mktemp
-
-if __name__ == "__main__":
- path = mktemp()
-
- # Open previously created store, or create it if it doesn't exist yet
- graph = ConjunctiveGraph("Sleepycat")
-
- rt = graph.open(path, create=False)
-
- if rt == NO_STORE:
- # There is no underlying Sleepycat infrastructure, so create it
- graph.open(path, create=True)
- else:
- assert rt == VALID_STORE, "The underlying store is corrupt"
-
- print("Triples in graph before add: ", len(graph))
-
- # Now we'll add some triples to the graph & commit the changes
- rdflib = Namespace("http://rdflib.net/test/")
- graph.bind("test", "http://rdflib.net/test/")
-
- graph.add((rdflib["pic:1"], rdflib.name, Literal("Jane & Bob")))
- graph.add((rdflib["pic:2"], rdflib.name, Literal("Squirrel in Tree")))
-
- print("Triples in graph after add: ", len(graph))
-
- # display the graph in RDF/XML
- print(graph.serialize(format="n3"))
-
- # close when done, otherwise sleepycat will leak lock entries.
- graph.close()
-
- # reopen the graph
-
- graph = ConjunctiveGraph("Sleepycat")
-
- graph.open(path, create=False)
-
- print("Triples still in graph: ", len(graph))
-
- graph.close()
-
- # Clean up the temp folder to remove the Sleepycat database files...
- import os
-
- for f in os.listdir(path):
- os.unlink(path + "/" + f)
- os.rmdir(path)
diff --git a/rdflib/plugin.py b/rdflib/plugin.py
index 1e364e2c..87812504 100644
--- a/rdflib/plugin.py
+++ b/rdflib/plugin.py
@@ -138,7 +138,7 @@ register("Memory", Store, "rdflib.plugins.stores.memory", "Memory")
register("SimpleMemory", Store, "rdflib.plugins.stores.memory", "SimpleMemory")
register("Auditable", Store, "rdflib.plugins.stores.auditable", "AuditableStore")
register("Concurrent", Store, "rdflib.plugins.stores.concurrent", "ConcurrentStore")
-register("Sleepycat", Store, "rdflib.plugins.stores.sleepycat", "Sleepycat")
+register("BerkeleyDB", Store, "rdflib.plugins.stores.berkeleydb", "BerkeleyDB")
register("SPARQLStore", Store, "rdflib.plugins.stores.sparqlstore", "SPARQLStore")
register(
"SPARQLUpdateStore", Store, "rdflib.plugins.stores.sparqlstore", "SPARQLUpdateStore"
diff --git a/rdflib/plugins/stores/sleepycat.py b/rdflib/plugins/stores/berkeleydb.py
index b6b90470..d02a2158 100644
--- a/rdflib/plugins/stores/sleepycat.py
+++ b/rdflib/plugins/stores/berkeleydb.py
@@ -12,7 +12,7 @@ def bb(u):
try:
- from bsddb3 import db
+ from berkeleydb import db
has_bsddb = True
except ImportError:
@@ -33,10 +33,30 @@ if has_bsddb:
logger = logging.getLogger(__name__)
-__all__ = ["Sleepycat"]
+__all__ = ["BerkeleyDB"]
-class Sleepycat(Store):
+class BerkeleyDB(Store):
+ """\
+ A store that allows for on-disk persistent using BerkeleyDB, a fast
+ key/value DB.
+
+ This store implementation used to be known, previous to rdflib 6.0.0
+ as 'Sleepycat' due to that being the then name of the Python wrapper
+ for BerkeleyDB.
+
+ This store allows for quads as well as triples. See examples of use
+ in both the `examples.berkeleydb_example` and `test.test_store_berkeleydb`
+ files.
+
+ **NOTE on installation**:
+
+ To use this store, you must have BerkeleyDB installed on your system
+ separately to Python (`brew install berkeley-db` on a Mac) and also have
+ the BerkeleyDB Python wrapper installed (`pip install berkeleydb`).
+ You may need to install BerkeleyDB Python wrapper like this:
+ `YES_I_HAVE_THE_RIGHT_TO_USE_THIS_BERKELEY_DB_VERSION=1 pip install berkeleydb`
+ """
context_aware = True
formula_aware = True
transaction_aware = False
@@ -45,10 +65,10 @@ class Sleepycat(Store):
def __init__(self, configuration=None, identifier=None):
if not has_bsddb:
- raise ImportError("Unable to import bsddb/bsddb3, store is unusable.")
+ raise ImportError("Unable to import berkeleydb, store is unusable.")
self.__open = False
self.__identifier = identifier
- super(Sleepycat, self).__init__(configuration)
+ super(BerkeleyDB, self).__init__(configuration)
self._loads = self.node_pickler.loads
self._dumps = self.node_pickler.dumps
@@ -252,7 +272,7 @@ class Sleepycat(Store):
value = cspo.get(bb("%s^%s^%s^%s^" % (c, s, p, o)), txn=txn)
if value is None:
- self.__contexts.put(bb(c), "", txn=txn)
+ self.__contexts.put(bb(c), b"", txn=txn)
contexts_value = cspo.get(
bb("%s^%s^%s^%s^" % ("", s, p, o)), txn=txn
@@ -262,9 +282,9 @@ class Sleepycat(Store):
contexts_value = "^".encode("latin-1").join(contexts)
assert contexts_value is not None
- cspo.put(bb("%s^%s^%s^%s^" % (c, s, p, o)), "", txn=txn)
- cpos.put(bb("%s^%s^%s^%s^" % (c, p, o, s)), "", txn=txn)
- cosp.put(bb("%s^%s^%s^%s^" % (c, o, s, p)), "", txn=txn)
+ cspo.put(bb("%s^%s^%s^%s^" % (c, s, p, o)), b"", txn=txn)
+ cpos.put(bb("%s^%s^%s^%s^" % (c, p, o, s)), b"", txn=txn)
+ cosp.put(bb("%s^%s^%s^%s^" % (c, o, s, p)), b"", txn=txn)
if not quoted:
cspo.put(bb("%s^%s^%s^%s^" % ("", s, p, o)), contexts_value, txn=txn)
cpos.put(bb("%s^%s^%s^%s^" % ("", p, o, s)), contexts_value, txn=txn)
@@ -510,7 +530,7 @@ class Sleepycat(Store):
cursor.close()
def add_graph(self, graph):
- self.__contexts.put(bb(self._to_string(graph)), "")
+ self.__contexts.put(bb(self._to_string(graph)), b"")
def remove_graph(self, graph):
self.remove((None, None, None), graph)
@@ -523,14 +543,14 @@ class Sleepycat(Store):
k = self._dumps(term)
i = self.__k2i.get(k, txn=txn)
if i is None:
- # weird behavoir from bsddb not taking a txn as a keyword argument
+ # weird behaviour from bsddb not taking a txn as a keyword argument
# for append
if self.transaction_aware:
i = "%s" % self.__i2k.append(k, txn)
else:
i = "%s" % self.__i2k.append(k)
- self.__k2i.put(k, i, txn=txn)
+ self.__k2i.put(k, i.encode(), txn=txn)
else:
i = i.decode()
return i
diff --git a/requirements.dev.txt b/requirements.dev.txt
index 77fda028..09500eb1 100644
--- a/requirements.dev.txt
+++ b/requirements.dev.txt
@@ -5,3 +5,4 @@ nose-timer
coverage
flake8
doctest-ignore-unicode==0.1.2
+berkeleydb \ No newline at end of file
diff --git a/test/test_store_berkeleydb.py b/test/test_store_berkeleydb.py
new file mode 100644
index 00000000..634b7221
--- /dev/null
+++ b/test/test_store_berkeleydb.py
@@ -0,0 +1,114 @@
+import unittest
+from tempfile import mktemp
+from rdflib import ConjunctiveGraph, URIRef
+from rdflib.store import VALID_STORE
+
+
+class BerkeleyDBTestCase(unittest.TestCase):
+ def setUp(self):
+ self.store_name = "BerkeleyDB"
+ self.path = mktemp()
+ self.g = ConjunctiveGraph(store=self.store_name)
+ self.rt = self.g.open(self.path, create=True)
+ assert self.rt == VALID_STORE, "The underlying store is corrupt"
+ assert len(self.g) == 0, "There must be zero triples in the graph just after store (file) creation"
+ data = """
+ PREFIX : <https://example.org/>
+
+ :a :b :c .
+ :d :e :f .
+ :d :g :h .
+ """
+ self.g.parse(data=data, format="ttl")
+
+ def tearDown(self):
+ self.g.close()
+
+ def test_write(self):
+ assert len(self.g) == 3, "There must be three triples in the graph after the first data chunk parse"
+ data2 = """
+ PREFIX : <https://example.org/>
+
+ :d :i :j .
+ """
+ self.g.parse(data=data2, format="ttl")
+ assert len(self.g) == 4, "There must be four triples in the graph after the second data chunk parse"
+ data3 = """
+ PREFIX : <https://example.org/>
+
+ :d :i :j .
+ """
+ self.g.parse(data=data3, format="ttl")
+ assert len(self.g) == 4, "There must still be four triples in the graph after the thrd data chunk parse"
+
+ def test_read(self):
+ sx = None
+ for s in self.g.subjects(predicate=URIRef("https://example.org/e"), object=URIRef("https://example.org/f")):
+ sx = s
+ assert sx == URIRef("https://example.org/d")
+
+ def test_sparql_query(self):
+ q = """
+ PREFIX : <https://example.org/>
+
+ SELECT (COUNT(*) AS ?c)
+ WHERE {
+ :d ?p ?o .
+ }"""
+
+ c = 0
+ for row in self.g.query(q):
+ c = int(row.c)
+ assert c == 2, "SPARQL COUNT must return 2"
+
+ def test_sparql_insert(self):
+ q = """
+ PREFIX : <https://example.org/>
+
+ INSERT DATA {
+ :x :y :z .
+ }"""
+
+ self.g.update(q)
+ assert len(self.g) == 4, "After extra triple insert, length must be 4"
+
+ def test_multigraph(self):
+ q = """
+ PREFIX : <https://example.org/>
+
+ INSERT DATA {
+ GRAPH :m {
+ :x :y :z .
+ }
+ GRAPH :n {
+ :x :y :z .
+ }
+ }"""
+
+ self.g.update(q)
+
+ q = """
+ SELECT (COUNT(?g) AS ?c)
+ WHERE {
+ SELECT DISTINCT ?g
+ WHERE {
+ GRAPH ?g {
+ ?s ?p ?o
+ }
+ }
+ }
+ """
+ c = 0
+ for row in self.g.query(q):
+ c = int(row.c)
+ assert c == 3, "SPARQL COUNT must return 3 (default, :m & :n)"
+
+ def test_open_shut(self):
+ assert len(self.g) == 3, "Initially we must have 3 triples from setUp"
+ self.g.close()
+ self.g = None
+
+ # reopen the graph
+ self.g = ConjunctiveGraph("BerkeleyDB")
+ self.g.open(self.path, create=False)
+ assert len(self.g) == 3, "After close and reopen, we should still have the 3 originally added triples"