aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorMatthias Baumgartner <dev@igsor.net>2023-01-21 16:31:08 +0100
committerMatthias Baumgartner <dev@igsor.net>2023-01-21 16:31:08 +0100
commit965f4dfe41afd552ed6477c75e1286c14e3580f6 (patch)
tree36f403910222c4536f1fe6ed3330228cee8c178e
parente2f08efc0d8a3c875994bdb69623c30cce5079d9 (diff)
downloadbsfs-965f4dfe41afd552ed6477c75e1286c14e3580f6.tar.gz
bsfs-965f4dfe41afd552ed6477c75e1286c14e3580f6.tar.bz2
bsfs-965f4dfe41afd552ed6477c75e1286c14e3580f6.zip
Fetch in triple stores:
* fetch interface * sparql fetch ast parser * sparql fetch implementation
-rw-r--r--bsfs/triple_store/base.py33
-rw-r--r--bsfs/triple_store/sparql/parse_fetch.py109
-rw-r--r--bsfs/triple_store/sparql/parse_filter.py45
-rw-r--r--bsfs/triple_store/sparql/sparql.py50
-rw-r--r--bsfs/triple_store/sparql/utils.py141
-rw-r--r--test/triple_store/sparql/test_parse_fetch.py263
-rw-r--r--test/triple_store/sparql/test_parse_filter.py150
-rw-r--r--test/triple_store/sparql/test_sparql.py70
-rw-r--r--test/triple_store/sparql/test_utils.py155
-rw-r--r--test/triple_store/test_base.py3
10 files changed, 898 insertions, 121 deletions
diff --git a/bsfs/triple_store/base.py b/bsfs/triple_store/base.py
index 7e03714..1baa63b 100644
--- a/bsfs/triple_store/base.py
+++ b/bsfs/triple_store/base.py
@@ -11,7 +11,7 @@ import typing
# inner-module imports
from bsfs.query import ast
from bsfs.utils import URI, typename
-import bsfs.schema as _schema
+import bsfs.schema as bsc
# exports
__all__: typing.Sequence[str] = (
@@ -82,12 +82,12 @@ class TripleStoreBase(abc.ABC):
@property
@abc.abstractmethod
- def schema(self) -> _schema.Schema:
+ def schema(self) -> bsc.Schema:
"""Return the store's local schema."""
@schema.setter
@abc.abstractmethod
- def schema(self, schema: _schema.Schema):
+ def schema(self, schema: bsc.Schema):
"""Migrate to new schema by adding or removing class definitions.
Commits before and after the migration.
@@ -112,17 +112,28 @@ class TripleStoreBase(abc.ABC):
@abc.abstractmethod
def get(
self,
- node_type: _schema.Node,
- query: typing.Optional[ast.filter.FilterExpression] = None,
+ node_type: bsc.Node,
+ filter: typing.Optional[ast.filter.FilterExpression] = None, # pylint: disable=redefined-builtin
) -> typing.Iterator[URI]:
- """Return guids of nodes of type *node_type* that match the *query*.
- Return all guids of the respective type if *query* is None.
+ """Return guids of nodes of type *node_type* that match the *filter*.
+ Return all guids of the respective type if *filter* is None.
+ """
+
+ @abc.abstractmethod
+ def fetch(
+ self,
+ node_type: bsc.Node,
+ filter: ast.filter.FilterExpression, # pylint: disable=redefined-builtin
+ fetch: ast.fetch.FetchExpression,
+ ) -> typing.Iterator[typing.Tuple[URI, str, typing.Any]]:
+ """Return (guid, name, value) triples where the guid is determined by the *filter*
+ query and the name matches the *fetch* query.
"""
@abc.abstractmethod
def exists(
self,
- node_type: _schema.Node,
+ node_type: bsc.Node,
guids: typing.Iterable[URI],
) -> typing.Iterable[URI]:
"""Return those *guids* that exist and have type *node_type* or a subclass thereof."""
@@ -130,7 +141,7 @@ class TripleStoreBase(abc.ABC):
@abc.abstractmethod
def create(
self,
- node_type: _schema.Node,
+ node_type: bsc.Node,
guids: typing.Iterable[URI],
):
"""Create *guid* nodes with type *subject*."""
@@ -138,9 +149,9 @@ class TripleStoreBase(abc.ABC):
@abc.abstractmethod
def set(
self,
- node_type: _schema.Node, # FIXME: is the node_type even needed? Couldn't I infer from the predicate?
+ node_type: bsc.Node, # FIXME: is the node_type even needed? Couldn't I infer from the predicate?
guids: typing.Iterable[URI],
- predicate: _schema.Predicate,
+ predicate: bsc.Predicate,
values: typing.Iterable[typing.Any],
):
"""Add triples to the graph.
diff --git a/bsfs/triple_store/sparql/parse_fetch.py b/bsfs/triple_store/sparql/parse_fetch.py
new file mode 100644
index 0000000..20d4e74
--- /dev/null
+++ b/bsfs/triple_store/sparql/parse_fetch.py
@@ -0,0 +1,109 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# standard imports
+import typing
+
+# bsfs imports
+from bsfs import schema as bsc
+from bsfs.query import ast
+from bsfs.utils import errors
+
+# inner-module imports
+from .utils import GenHopName, Query
+
+# exports
+__all__: typing.Sequence[str] = (
+ 'Fetch',
+ )
+
+
+## code ##
+
+class Fetch():
+ """Translate `bsfs.query.ast.fetch` structures into Sparql queries."""
+
+ def __init__(self, schema):
+ self.schema = schema
+ self.ngen = GenHopName(prefix='?fch')
+
+ def __call__(
+ self,
+ root_type: bsc.Node,
+ root: ast.fetch.FetchExpression,
+ ) -> Query:
+ """
+ """
+ # check root_type
+ if not isinstance(root_type, bsc.Node):
+ raise errors.BackendError(f'expected Node, found {root_type}')
+ if root_type not in self.schema.nodes():
+ raise errors.ConsistencyError(f'node {root_type} is not in the schema')
+ # parse root
+ terms, expr = self._parse_fetch_expression(root_type, root, '?ent')
+ # assemble query
+ return Query(
+ root_type=root_type.uri,
+ root_head='?ent',
+ select=terms,
+ where=expr,
+ )
+
+ def _parse_fetch_expression(
+ self,
+ node_type: bsc.Vertex,
+ node: ast.fetch.FetchExpression,
+ head: str,
+ ):
+ """Route *node* to the handler of the respective FetchExpression subclass."""
+ if isinstance(node, ast.fetch.All):
+ return self._all(node_type, node, head)
+ if isinstance(node, ast.fetch.Fetch):
+ return self._fetch(node_type, node, head)
+ if isinstance(node, ast.fetch.Node):
+ return self._node(node_type, node, head)
+ if isinstance(node, ast.fetch.Value):
+ return self._value(node_type, node, head)
+ if isinstance(node, ast.fetch.This):
+ return self._this(node_type, node, head)
+ # invalid node
+ raise errors.BackendError(f'expected fetch expression, found {node}')
+
+ def _all(self, node_type: bsc.Vertex, node: ast.fetch.All, head: str):
+ # child expressions
+ terms, exprs = zip(*[self._parse_fetch_expression(node_type, expr, head) for expr in node])
+ terms = {term for sub in terms for term in sub}
+ exprs = ' .\n'.join({expr for expr in exprs if len(expr.strip()) > 0})
+ return terms, exprs
+
+ def _fetch(self, node_type: bsc.Vertex, node: ast.fetch.Fetch, head: str): # pylint: disable=unused-argument # (node_type)
+ # child expressions
+ rng = self.schema.predicate(node.predicate).range
+ nexthead = next(self.ngen)
+ terms, expr = self._parse_fetch_expression(rng, node.expr, nexthead)
+ return terms, f'OPTIONAL{{ {head} <{node.predicate}> {nexthead} .\n {expr} }}'
+
+ def _node(self, node_type: bsc.Vertex, node: ast.fetch.Node, head: str): # pylint: disable=unused-argument # (node_type)
+ if f'?{node.name}'.startswith(self.ngen.prefix):
+ raise errors.BackendError(f'Node name must start with {self.ngen.prefix}')
+ # compose and return statement
+ term = next(self.ngen)
+ return {(term, node.name)}, f'OPTIONAL{{ {head} <{node.predicate}> {term} }}'
+
+ def _value(self, node_type: bsc.Vertex, node: ast.fetch.Value, head: str): # pylint: disable=unused-argument # (node_type)
+ if f'?{node.name}'.startswith(self.ngen.prefix):
+ raise errors.BackendError(f'Value name must start with {self.ngen.prefix}')
+ # compose and return statement
+ term = next(self.ngen)
+ return {(term, node.name)}, f'OPTIONAL{{ {head} <{node.predicate}> {term} }}'
+
+ def _this(self, node_type: bsc.Vertex, node: ast.fetch.This, head: str): # pylint: disable=unused-argument # (node_type)
+ if f'?{node.name}'.startswith(self.ngen.prefix):
+ raise errors.BackendError(f'This name must start with {self.ngen.prefix}')
+ # compose and return statement
+ return {(head, node.name)}, ''
+
+## EOF ##
diff --git a/bsfs/triple_store/sparql/parse_filter.py b/bsfs/triple_store/sparql/parse_filter.py
index 8b6b976..dca0aea 100644
--- a/bsfs/triple_store/sparql/parse_filter.py
+++ b/bsfs/triple_store/sparql/parse_filter.py
@@ -19,6 +19,7 @@ from bsfs.utils import URI, errors
# inner-module imports
from .distance import DISTANCE_FU
+from .utils import GenHopName, Query
# exports
__all__: typing.Sequence[str] = (
@@ -28,25 +29,6 @@ __all__: typing.Sequence[str] = (
## code ##
-class _GenHopName():
- """Generator that produces a new unique symbol name with each iteration."""
-
- # Symbol name prefix.
- prefix: str
-
- # Current counter.
- curr: int
-
- def __init__(self, prefix: str = '?hop', start: int = 0):
- self.prefix = prefix
- self.curr = start - 1
-
- def __next__(self):
- """Generate and return the next unique name."""
- self.curr += 1
- return self.prefix + str(self.curr)
-
-
class Filter():
"""Translate `bsfs.query.ast.filter` structures into Sparql queries."""
@@ -54,18 +36,18 @@ class Filter():
schema: bsc.Schema
# Generator that produces unique symbol names.
- ngen: _GenHopName
+ ngen: GenHopName
def __init__(self, graph, schema):
self.graph = graph
self.schema = schema
- self.ngen = _GenHopName()
+ self.ngen = GenHopName(prefix='?flt')
def __call__(
self,
root_type: bsc.Node,
root: typing.Optional[ast.filter.FilterExpression] = None,
- ) -> str:
+ ) -> Query:
"""
"""
# check root_type
@@ -79,15 +61,18 @@ class Filter():
else:
cond = self._parse_filter_expression(root_type, root, '?ent')
# assemble query
- return f'''
- SELECT ?ent
- WHERE {{
- ?ent <{ns.rdf.type}>/<{ns.rdfs.subClassOf}>* <{root_type.uri}> .
- {cond}
- }}
- '''
+ return Query(
+ root_type=root_type.uri,
+ root_head='?ent',
+ where=cond,
+ )
- def _parse_filter_expression(self, type_: bsc.Vertex, node: ast.filter.FilterExpression, head: str) -> str:
+ def _parse_filter_expression(
+ self,
+ type_: bsc.Vertex,
+ node: ast.filter.FilterExpression,
+ head: str,
+ ) -> str:
"""Route *node* to the handler of the respective FilterExpression subclass."""
if isinstance(node, ast.filter.Is):
return self._is(type_, node, head)
diff --git a/bsfs/triple_store/sparql/sparql.py b/bsfs/triple_store/sparql/sparql.py
index fedd227..a0dd12e 100644
--- a/bsfs/triple_store/sparql/sparql.py
+++ b/bsfs/triple_store/sparql/sparql.py
@@ -16,6 +16,7 @@ from bsfs.query import ast
from bsfs.utils import errors, URI
# inner-module imports
+from . import parse_fetch
from . import parse_filter
from .. import base
from .distance import DISTANCE_FU
@@ -92,13 +93,16 @@ class SparqlStore(base.TripleStoreBase):
# Filter parser
_filter_parser: parse_filter.Filter
+ # Fetch parser
+ _fetch_parser: parse_fetch.Fetch
+
def __init__(self):
super().__init__(None)
self._graph = rdflib.Graph()
self._transaction = _Transaction(self._graph)
- # NOTE: parsing bsfs.query.ast.filter.Has requires xsd:integer.
self._schema = bsc.Schema(literals={bsc.ROOT_NUMBER.child(ns.xsd.integer)})
self._filter_parser = parse_filter.Filter(self._graph, self._schema)
+ self._fetch_parser = parse_fetch.Fetch(self._schema)
# NOTE: mypy and pylint complain about the **kwargs not being listed (contrasting super)
# However, not having it here is clearer since it's explicit that there are no arguments.
@@ -197,17 +201,53 @@ class SparqlStore(base.TripleStoreBase):
# migrate schema
self._schema = schema
self._filter_parser.schema = schema
+ self._fetch_parser.schema = schema
+
+ def fetch(
+ self,
+ node_type: bsc.Node,
+ filter: ast.filter.FilterExpression, # pylint: disable=redefined-builtin
+ fetch: ast.fetch.FetchExpression,
+ ) -> typing.Iterator[typing.Tuple[URI, str, typing.Any]]:
+ if node_type not in self.schema.nodes():
+ raise errors.ConsistencyError(f'{node_type} is not defined in the schema')
+ if not isinstance(filter, ast.filter.FilterExpression):
+ raise TypeError(filter)
+ if not isinstance(fetch, ast.fetch.FetchExpression):
+ raise TypeError(fetch)
+ # compose a query from fetch and filter ast
+ query = self._filter_parser(node_type, filter)
+ query += self._fetch_parser(node_type, fetch)
+ # run query
+ emitted = set()
+ for result in query(self._graph):
+ guid = URI(result[0])
+ for name, raw in zip(query.names, result[1:]):
+ if raw is None: # undefined value
+ continue
+ if isinstance(raw, rdflib.Literal):
+ value = raw.value
+ else:
+ value = URI(raw)
+ # emit triple
+ triple = (guid, name, value)
+ if triple not in emitted: # FIXME: needs a better solution!
+ emitted.add(triple)
+ yield guid, name, value
def get(
self,
node_type: bsc.Node,
- query: typing.Optional[ast.filter.FilterExpression] = None,
+ filter: typing.Optional[ast.filter.FilterExpression] = None, # pylint: disable=redefined-builtin
) -> typing.Iterator[URI]:
if node_type not in self.schema.nodes():
raise errors.ConsistencyError(f'{node_type} is not defined in the schema')
- if not isinstance(query, ast.filter.FilterExpression):
- raise TypeError(query)
- for guid, in self._graph.query(self._filter_parser(node_type, query)):
+ if not isinstance(filter, ast.filter.FilterExpression):
+ raise TypeError(filter)
+ # compose query
+ query = self._filter_parser(node_type, filter)
+ # run query
+ for guid, in query(self._graph):
yield URI(guid)
def _has_type(self, subject: URI, node_type: bsc.Node) -> bool:
diff --git a/bsfs/triple_store/sparql/utils.py b/bsfs/triple_store/sparql/utils.py
new file mode 100644
index 0000000..deca4d8
--- /dev/null
+++ b/bsfs/triple_store/sparql/utils.py
@@ -0,0 +1,141 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# standard imports
+import typing
+
+# external imports
+import rdflib
+
+# bsfs imports
+from bsfs.namespace import ns
+from bsfs.utils import typename
+
+# exports
+__all__: typing.Sequence[str] = (
+ 'GenHopName',
+ 'Query',
+ )
+
+
+## code ##
+
+class GenHopName():
+ """Generator that produces a new unique symbol name with each iteration."""
+
+ # Symbol name prefix.
+ prefix: str
+
+ # Current counter.
+ curr: int
+
+ def __init__(self, prefix: str = '?hop', start: int = 0):
+ self.prefix = prefix
+ self.curr = start - 1
+
+ def __next__(self):
+ """Generate and return the next unique name."""
+ self.curr += 1
+ return self.prefix + str(self.curr)
+
+
+class Query():
+ """Hold, manage, and complete partial Sparql queries."""
+
+ # root node type URI.
+ root_type: str
+
+ # root node variable name.
+ root_head: str
+
+ # (head, name) tuples (w/o root)
+ select: typing.Tuple[typing.Tuple[str, str], ...]
+
+ # where statements.
+ where: str
+
+ def __init__(
+ self,
+ root_type: str,
+ root_head: str = '?ent',
+ select: typing.Optional[typing.Iterable[typing.Tuple[str, str]]] = None,
+ where: typing.Optional[str] = None,
+ ):
+ # check arguments
+ if select is None:
+ select = []
+ if where is None:
+ where = ''
+ # set members
+ self.root_type = root_type
+ self.root_head = root_head
+ self.select = tuple(select) # tuple ensures presistent order
+ self.where = where.strip()
+
+ def __str__(self) -> str:
+ return self.query
+
+ def __repr__(self) -> str:
+ return f'{typename(self)}({self.root_type}, {self.root_head}, {self.select}, {self.where})'
+
+ def __eq__(self, other: typing.Any) -> bool:
+ return isinstance(other, type(self)) \
+ and self.root_type == other.root_type \
+ and self.root_head == other.root_head \
+ and self.select == other.select \
+ and self.where == other.where
+
+ def __hash__(self) -> int:
+ return hash((type(self), self.root_type, self.root_head, self.select, self.where))
+
+ def __add__(self, other: typing.Any) -> 'Query':
+ # check other's type
+ if not isinstance(other, type(self)):
+ return NotImplemented
+ # check query compatibility
+ if not self.root_type == other.root_type:
+ raise ValueError(other)
+ if not self.root_head == other.root_head:
+ raise ValueError(other)
+ # combine selections
+ select = self.select + other.select
+ # combine conditions
+ conds = []
+ if self.where != '':
+ conds.append(self.where)
+ if other.where != '':
+ conds.append(other.where)
+ where = ' . '.join(conds)
+ # return new query
+ return Query(
+ root_type=self.root_type,
+ root_head=self.root_head,
+ select=select,
+ where=where,
+ )
+
+ @property
+ def names(self) -> typing.Tuple[str, ...]:
+ """Return a tuple of selected variable names, excluding the root."""
+ return tuple(name for _, name in self.select)
+
+ @property
+ def query(self) -> str:
+ """Return an executable sparql query."""
+ select = ' '.join(f'({head} as ?{name})' for head, name in self.select)
+ return f'''
+ SELECT {self.root_head} {select}
+ WHERE {{
+ {self.root_head} <{ns.rdf.type}>/<{ns.rdfs.subClassOf}>* <{self.root_type}> .
+ {self.where}
+ }}
+ '''
+
+ def __call__(self, graph: rdflib.Graph) -> rdflib.query.Result:
+ """Execute the query on a *graph* and return the query result."""
+ return graph.query(self.query)
+
+## EOF ##
diff --git a/test/triple_store/sparql/test_parse_fetch.py b/test/triple_store/sparql/test_parse_fetch.py
new file mode 100644
index 0000000..0961789
--- /dev/null
+++ b/test/triple_store/sparql/test_parse_fetch.py
@@ -0,0 +1,263 @@
+"""
+
+Part of the bsfs test suite.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import rdflib
+import unittest
+
+# bsie imports
+from bsfs import schema
+from bsfs.namespace import Namespace, ns
+from bsfs.query import ast
+from bsfs.utils import errors, URI
+
+# objects to test
+from bsfs.triple_store.sparql.parse_fetch import Fetch
+
+
+## code ##
+
+bsfs = Namespace('http://bsfs.ai/schema', fsep='/')
+bse = Namespace('http://bsfs.ai/schema/Entity')
+bst = Namespace('http://bsfs.ai/schema/Tag')
+bsc = Namespace('http://bsfs.ai/schema/Collection')
+
+class TestParseFetch(unittest.TestCase):
+
+ def setUp(self):
+ self.schema = schema.from_string('''
+ prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#>
+ prefix xsd: <http://www.w3.org/2001/XMLSchema#>
+
+ prefix bsfs: <http://bsfs.ai/schema/>
+ prefix bse: <http://bsfs.ai/schema/Entity#>
+ prefix bst: <http://bsfs.ai/schema/Tag#>
+ prefix bsc: <http://bsfs.ai/schema/Collection#>
+
+ # nodes
+ bsfs:Entity rdfs:subClassOf bsfs:Node .
+ bsfs:Tag rdfs:subClassOf bsfs:Node .
+ bsfs:Collection rdfs:subClassOf bsfs:Node .
+
+ # literals
+ xsd:integer rdfs:subClassOf bsfs:Literal .
+ xsd:string rdfs:subClassOf bsfs:Literal .
+
+ # predicates
+ bse:tag rdfs:subClassOf bsfs:Predicate ;
+ rdfs:domain bsfs:Entity ;
+ rdfs:range bsfs:Tag .
+
+ bse:collection rdfs:subClassOf bsfs:Predicate ;
+ rdfs:domain bsfs:Entity ;
+ rdfs:range bsfs:Collection .
+
+ bse:filename rdfs:subClassOf bsfs:Predicate ;
+ rdfs:domain bsfs:Entity ;
+ rdfs:range xsd:string .
+
+ bse:rank rdfs:subClassOf bsfs:Predicate ;
+ rdfs:domain bsfs:Entity ;
+ rdfs:range xsd:integer .
+
+ bst:main rdfs:subClassOf bsfs:Predicate ;
+ rdfs:domain bsfs:Tag ;
+ rdfs:range bsfs:Entity .
+
+ bst:label rdfs:subClassOf bsfs:Predicate ;
+ rdfs:domain bsfs:Tag ;
+ rdfs:range xsd:string .
+
+ bsc:tag rdfs:subClassOf bsfs:Predicate ;
+ rdfs:domain bsfs:Collection ;
+ rdfs:range bsfs:Tag .
+
+ bsc:label rdfs:subClassOf bsfs:Predicate ;
+ rdfs:domain bsfs:Collection ;
+ rdfs:range xsd:string .
+
+ bsc:rating rdfs:subClassOf bsfs:Predicate ;
+ rdfs:domain bsfs:Collection ;
+ rdfs:range xsd:integer .
+
+ ''')
+
+ # graph to test queries
+ self.graph = rdflib.Graph()
+ # schema hierarchies
+ self.graph.add((rdflib.URIRef('http://bsfs.ai/schema/Entity'), rdflib.RDFS.subClassOf, rdflib.URIRef('http://bsfs.ai/schema/Node')))
+ self.graph.add((rdflib.URIRef('http://bsfs.ai/schema/Collection'), rdflib.RDFS.subClassOf, rdflib.URIRef('http://bsfs.ai/schema/Node')))
+ self.graph.add((rdflib.URIRef('http://bsfs.ai/schema/Tag'), rdflib.RDFS.subClassOf, rdflib.URIRef('http://bsfs.ai/schema/Node')))
+ # entities
+ self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Entity')))
+ self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Entity')))
+ # tags
+ self.graph.add((rdflib.URIRef('http://example.com/tag#1234'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Tag')))
+ self.graph.add((rdflib.URIRef('http://example.com/tag#4321'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Tag')))
+ # collections
+ self.graph.add((rdflib.URIRef('http://example.com/collection#1234'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Collection')))
+ self.graph.add((rdflib.URIRef('http://example.com/collection#4321'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Collection')))
+ # entity literals
+ self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(bse.rank), rdflib.Literal('1234', datatype=rdflib.XSD.integer)))
+ self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(bse.filename), rdflib.Literal('filename_1234', datatype=rdflib.XSD.string)))
+ #self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef(bse.rank), rdflib.Literal('4321', datatype=rdflib.XSD.integer)))
+ self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef(bse.filename), rdflib.Literal('filename_4321', datatype=rdflib.XSD.string)))
+ # tag literals
+ self.graph.add((rdflib.URIRef('http://example.com/tag#1234'), rdflib.URIRef(bst.label), rdflib.Literal('tag_label_1234', datatype=rdflib.XSD.string)))
+ self.graph.add((rdflib.URIRef('http://example.com/tag#4321'), rdflib.URIRef(bst.label), rdflib.Literal('tag_label_4321', datatype=rdflib.XSD.string)))
+ # collection literals
+ self.graph.add((rdflib.URIRef('http://example.com/collection#1234'), rdflib.URIRef(bsc.label), rdflib.Literal('collection_label_1234', datatype=rdflib.XSD.string)))
+ self.graph.add((rdflib.URIRef('http://example.com/collection#1234'), rdflib.URIRef(bsc.rating), rdflib.Literal('1234', datatype=rdflib.XSD.integer)))
+ self.graph.add((rdflib.URIRef('http://example.com/collection#4321'), rdflib.URIRef(bsc.label), rdflib.Literal('collection_label_4321', datatype=rdflib.XSD.string)))
+ self.graph.add((rdflib.URIRef('http://example.com/collection#4321'), rdflib.URIRef(bsc.rating), rdflib.Literal('4321', datatype=rdflib.XSD.integer)))
+ # entity-tag links
+ self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(bse.tag), rdflib.URIRef('http://example.com/tag#1234')))
+ self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef(bse.tag), rdflib.URIRef('http://example.com/tag#4321')))
+ # entity-collection links
+ self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(bse.collection), rdflib.URIRef('http://example.com/collection#1234')))
+ self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef(bse.collection), rdflib.URIRef('http://example.com/collection#4321')))
+ # collection-tag links
+ self.graph.add((rdflib.URIRef('http://example.com/collection#1234'), rdflib.URIRef(bsc.tag), rdflib.URIRef('http://example.com/tag#1234')))
+ self.graph.add((rdflib.URIRef('http://example.com/collection#4321'), rdflib.URIRef(bsc.tag), rdflib.URIRef('http://example.com/tag#4321')))
+ # tag-entity links # NOTE: cross-over
+ self.graph.add((rdflib.URIRef('http://example.com/tag#1234'), rdflib.URIRef(bst.main), rdflib.URIRef('http://example.com/entity#4321')))
+ self.graph.add((rdflib.URIRef('http://example.com/tag#4321'), rdflib.URIRef(bst.main), rdflib.URIRef('http://example.com/entity#1234')))
+
+ # default parser
+ self.parser = Fetch(self.schema)
+ self.ent = self.schema.node(ns.bsfs.Entity)
+
+
+ def test_call(self):
+ # NOTE: The individual ast components are considered in the respective tests. Here, we test __call__ specifics.
+
+ # __call__ requires a valid root type
+ self.assertRaises(errors.BackendError, self.parser, self.schema.literal(ns.bsfs.Literal), ast.fetch.This('this'))
+ self.assertRaises(errors.ConsistencyError, self.parser, self.schema.node(ns.bsfs.Node).child(ns.bsfs.Invalid), ast.fetch.This('this'))
+ # __call__ requires a parseable root
+ self.assertRaises(errors.BackendError, self.parser, self.ent, ast.filter.FilterExpression())
+ # __call__ returns an executable query
+ q = self.parser(self.ent, ast.fetch.Fetch(bse.tag, ast.fetch.Value(bst.label, 'label')))
+ self.assertSetEqual(set(q(self.graph)), {
+ (rdflib.URIRef('http://example.com/entity#1234'), rdflib.Literal('tag_label_1234', datatype=rdflib.XSD.string)),
+ (rdflib.URIRef('http://example.com/entity#4321'), rdflib.Literal('tag_label_4321', datatype=rdflib.XSD.string)),
+ })
+
+
+ def test_routing(self):
+ self.assertRaises(errors.BackendError, self.parser._parse_fetch_expression, self.ent, ast.fetch.FetchExpression(), '?head')
+
+
+ def test_all(self):
+ # multiple values query
+ q = self.parser(self.ent, ast.fetch.All(
+ ast.fetch.Value(bse.filename, name='filename'),
+ ast.fetch.Value(bse.rank, name='rank')),
+ )
+ self.assertSetEqual(set(q.names), {'filename', 'rank'})
+ if q.names == ('filename', 'rank'):
+ self.assertSetEqual(set(q(self.graph)), {
+ (rdflib.URIRef('http://example.com/entity#1234'), rdflib.Literal('filename_1234', datatype=rdflib.XSD.string), rdflib.Literal('1234', datatype=rdflib.XSD.integer)),
+ (rdflib.URIRef('http://example.com/entity#4321'), rdflib.Literal('filename_4321', datatype=rdflib.XSD.string), None),
+ })
+ else:
+ self.assertSetEqual(set(q(self.graph)), {
+ (rdflib.URIRef('http://example.com/entity#1234'), rdflib.Literal('1234', datatype=rdflib.XSD.integer), rdflib.Literal('filename_1234', datatype=rdflib.XSD.string)),
+ (rdflib.URIRef('http://example.com/entity#4321'), None, rdflib.Literal('filename_4321', datatype=rdflib.XSD.string)),
+ })
+ # mixed values and node query
+ q = self.parser(self.ent, ast.fetch.All(
+ ast.fetch.Value(bse.filename, name='filename'),
+ ast.fetch.Node(bse.tag, name='tag'),
+ ))
+ self.assertSetEqual(set(q.names), {'filename', 'tag'})
+ if q.names == ('filename', 'tag'):
+ self.assertSetEqual(set(q(self.graph)), {
+ (rdflib.URIRef('http://example.com/entity#1234'), rdflib.Literal('filename_1234', datatype=rdflib.XSD.string), rdflib.URIRef('http://example.com/tag#1234')),
+ (rdflib.URIRef('http://example.com/entity#4321'), rdflib.Literal('filename_4321', datatype=rdflib.XSD.string), rdflib.URIRef('http://example.com/tag#4321')),
+ })
+ else:
+ self.assertSetEqual(set(q(self.graph)), {
+ (rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef('http://example.com/tag#1234'), rdflib.Literal('filename_1234', datatype=rdflib.XSD.string)),
+ (rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef('http://example.com/tag#4321'), rdflib.Literal('filename_4321', datatype=rdflib.XSD.string)),
+ })
+ # multiple values and second hop
+ q = self.parser(self.ent, ast.fetch.Fetch(bse.tag, ast.fetch.All(
+ ast.fetch.This(name='tag'),
+ ast.fetch.Value(bst.label, name='label'),
+ )))
+ self.assertSetEqual(set(q.names), {'tag', 'label'})
+ if q.names == ('tag', 'label'):
+ self.assertSetEqual(set(q(self.graph)), {
+ (rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef('http://example.com/tag#1234'), rdflib.Literal('tag_label_1234', datatype=rdflib.XSD.string)),
+ (rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef('http://example.com/tag#4321'), rdflib.Literal('tag_label_4321', datatype=rdflib.XSD.string)),
+ })
+ else:
+ self.assertSetEqual(set(q(self.graph)), {
+ (rdflib.URIRef('http://example.com/entity#1234'), rdflib.Literal('tag_label_1234', datatype=rdflib.XSD.string), rdflib.URIRef('http://example.com/tag#1234')),
+ (rdflib.URIRef('http://example.com/entity#4321'), rdflib.Literal('tag_label_4321', datatype=rdflib.XSD.string), rdflib.URIRef('http://example.com/tag#4321')),
+ })
+
+
+
+ def test_fetch(self):
+ # two-hop query
+ q = self.parser(self.ent, ast.fetch.Fetch(bse.tag, ast.fetch.Value(bst.label, 'tag_label')))
+ self.assertSetEqual(set(q(self.graph)), {
+ (rdflib.URIRef('http://example.com/entity#1234'), rdflib.Literal('tag_label_1234', datatype=rdflib.XSD.string)),
+ (rdflib.URIRef('http://example.com/entity#4321'), rdflib.Literal('tag_label_4321', datatype=rdflib.XSD.string)),
+ })
+ # three-hop-query
+ q = self.parser(self.ent, ast.fetch.Fetch(bse.tag, ast.fetch.Fetch(bst.main, ast.fetch.Value(bse.rank, 'entity_rank'))))
+ self.assertSetEqual(set(q(self.graph)), {
+ (rdflib.URIRef('http://example.com/entity#1234'), None),
+ (rdflib.URIRef('http://example.com/entity#4321'), rdflib.Literal('1234', datatype=rdflib.XSD.integer)),
+ })
+
+
+ def test_node(self):
+ # cannot use the internal hop name
+ self.assertRaises(errors.BackendError, self.parser, self.ent, ast.fetch.Node(bse.tag, self.parser.ngen.prefix[1:] + '123'))
+ # a simple Node statement
+ q = self.parser(self.ent, ast.fetch.Node(bse.tag, 'tag'))
+ self.assertSetEqual(set(q.names), {'tag'})
+ self.assertSetEqual(set(q(self.graph)), {
+ (rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef('http://example.com/tag#1234')),
+ (rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef('http://example.com/tag#4321')),
+ })
+
+
+ def test_value(self):
+ # cannot use the internal hop name
+ self.assertRaises(errors.BackendError, self.parser, self.schema.node(ns.bsfs.Entity), ast.fetch.Value(bse.filename, self.parser.ngen.prefix[1:] + '123'))
+ # a simple Value statement
+ q = self.parser(self.ent, ast.fetch.Value(bse.filename, 'filename'))
+ self.assertSetEqual(set(q.names), {'filename'})
+ self.assertSetEqual(set(q(self.graph)), {
+ (rdflib.URIRef('http://example.com/entity#1234'), rdflib.Literal('filename_1234', datatype=rdflib.XSD.string)),
+ (rdflib.URIRef('http://example.com/entity#4321'), rdflib.Literal('filename_4321', datatype=rdflib.XSD.string)),
+ })
+
+
+ def test_this(self):
+ # cannot use the internal hop name
+ self.assertRaises(errors.BackendError, self.parser, self.ent, ast.fetch.This(self.parser.ngen.prefix[1:] + '123'))
+ # a simple This statement
+ self.assertEqual(self.parser._this(self.ent, ast.fetch.This('this'), '?head'),
+ ({('?head', 'this')}, ''))
+ q = self.parser(self.ent, ast.fetch.This('this'))
+ self.assertSetEqual(set(q(self.graph)), {
+ (rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef('http://example.com/entity#1234')),
+ (rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef('http://example.com/entity#4321')),
+ })
+
+
+## main ##
+
+if __name__ == '__main__':
+ unittest.main()
+
+## EOF ##
diff --git a/test/triple_store/sparql/test_parse_filter.py b/test/triple_store/sparql/test_parse_filter.py
index 8764535..6fa0cd3 100644
--- a/test/triple_store/sparql/test_parse_filter.py
+++ b/test/triple_store/sparql/test_parse_filter.py
@@ -149,13 +149,13 @@ class TestParseFilter(unittest.TestCase):
ast.filter.Or(
ast.filter.Is('http://example.com/entity#1234'),
ast.filter.Is('http://example.com/entity#5678')))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, {'http://example.com/entity#1234'})
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234'})
# root is optional
q = self.parser(self.schema.node(ns.bsfs.Entity))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234', 'http://example.com/entity#4321', 'http://example.com/image#1234', 'http://example.com/image#4321'})
q = self.parser(self.schema.node(ns.bsfs.Tag))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/tag#1234', 'http://example.com/tag#4321'})
@@ -164,7 +164,7 @@ class TestParseFilter(unittest.TestCase):
self.assertRaises(errors.BackendError, self.parser._is, self.schema.literal(ns.bsfs.Literal), ast.filter.Is('http://example.com/entity#1234'), '?ent')
# a single Is statement
q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Is('http://example.com/entity#1234'))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234'})
# an aggregate of Is statements
q = self.parser(self.schema.node(ns.bsfs.Entity),
@@ -172,7 +172,7 @@ class TestParseFilter(unittest.TestCase):
ast.filter.Is('http://example.com/entity#1234'),
ast.filter.Is('http://example.com/entity#4321'),
))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234', 'http://example.com/entity#4321'})
# combined with other filters
q = self.parser(self.schema.node(ns.bsfs.Entity),
@@ -185,12 +185,12 @@ class TestParseFilter(unittest.TestCase):
ast.filter.Equals('Me, Myself, and I')
),
))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234'})
# as argument of Any/All
q = self.parser(self.schema.node(ns.bsfs.Entity),
ast.filter.Any(ns.bse.tag, ast.filter.Is('http://example.com/tag#1234')))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234', 'http://example.com/image#1234'})
@@ -199,15 +199,15 @@ class TestParseFilter(unittest.TestCase):
self.assertRaises(errors.BackendError, self.parser._equals, self.schema.node(ns.bsfs.Entity), ast.filter.Equals('hello world'), '?ent')
# a single Equals statement
q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.Equals('hello world')))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234', 'http://example.com/entity#4321'})
# a single Equals statement that includes subtypes
q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234', 'http://example.com/image#1234'})
# an Equals statement on an integer
q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.filesize, ast.filter.Equals(4321)))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#4321', 'http://example.com/image#4321'})
@@ -216,18 +216,18 @@ class TestParseFilter(unittest.TestCase):
self.assertRaises(errors.BackendError, self.parser._substring, self.schema.node(ns.bsfs.Entity), ast.filter.Substring('hello world'), '?ent')
# a single Substring statement
q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.Substring('hello')))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234', 'http://example.com/entity#4321'})
q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.Substring('lo wo')))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234', 'http://example.com/entity#4321'})
# a single Substring statement that includes subtypes
q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.Substring('Myself')))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234', 'http://example.com/image#1234'})
# an Substring statement on an integer
q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.filesize, ast.filter.Substring('32')))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#4321', 'http://example.com/image#4321'})
@@ -236,15 +236,15 @@ class TestParseFilter(unittest.TestCase):
self.assertRaises(errors.BackendError, self.parser._starts_with, self.schema.node(ns.bsfs.Entity), ast.filter.StartsWith('hello world'), '?ent')
# a single StartsWith statement
q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.StartsWith('hello')))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234', 'http://example.com/entity#4321'})
# a single StartsWith statement that includes subtypes
q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.StartsWith('Me, Mys')))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234', 'http://example.com/image#1234'})
# an StartsWith statement on an integer
q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.filesize, ast.filter.StartsWith(432)))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#4321', 'http://example.com/image#4321'})
@@ -253,15 +253,15 @@ class TestParseFilter(unittest.TestCase):
self.assertRaises(errors.BackendError, self.parser._ends_with, self.schema.node(ns.bsfs.Entity), ast.filter.EndsWith('hello world'), '?ent')
# a single EndsWith statement
q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.EndsWith('orld')))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234', 'http://example.com/entity#4321'})
# a single EndsWith statement that includes subtypes
q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.EndsWith('and I')))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234', 'http://example.com/image#1234'})
# an EndsWith statement on an integer
q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.filesize, ast.filter.EndsWith(321)))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#4321', 'http://example.com/image#4321'})
@@ -270,22 +270,22 @@ class TestParseFilter(unittest.TestCase):
self.assertRaises(errors.BackendError, self.parser._less_than, self.schema.node(ns.bsfs.Entity), ast.filter.LessThan(2000), '?ent')
# a single LessThan statement
q = self.parser(self.schema.node(ns.bsfs.Image), ast.filter.Any(ns.bse.iso, ast.filter.LessThan(2000)))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/image#1234'})
# _less_than respects boundary
q = self.parser(self.schema.node(ns.bsfs.Image), ast.filter.Any(ns.bse.iso, ast.filter.LessThan(1234, strict=True)))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set())
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)}, set())
q = self.parser(self.schema.node(ns.bsfs.Image), ast.filter.Any(ns.bse.iso, ast.filter.LessThan(1234, strict=False)))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/image#1234'})
# a single LessThan statement that includes subtypes
q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.filesize, ast.filter.LessThan(2000)))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234', 'http://example.com/image#1234'})
# an LessThan statement on a string
# always negative; note that http://example.com/tag#4321 is also not returned although its comment is a pure number
q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.LessThan(10_000)))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set())
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)}, set())
def test_greater_than(self):
@@ -293,22 +293,22 @@ class TestParseFilter(unittest.TestCase):
self.assertRaises(errors.BackendError, self.parser._greater_than, self.schema.node(ns.bsfs.Entity), ast.filter.GreaterThan(2000), '?ent')
# a single GreaterThan statement
q = self.parser(self.schema.node(ns.bsfs.Image), ast.filter.Any(ns.bse.iso, ast.filter.GreaterThan(2000)))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/image#4321'})
# _greater_than respects boundary
q = self.parser(self.schema.node(ns.bsfs.Image), ast.filter.Any(ns.bse.iso, ast.filter.GreaterThan(4321, strict=True)))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set())
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)}, set())
q = self.parser(self.schema.node(ns.bsfs.Image), ast.filter.Any(ns.bse.iso, ast.filter.GreaterThan(4321, strict=False)))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/image#4321'})
# a single GreaterThan statement that includes subtypes
q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.filesize, ast.filter.GreaterThan(2000)))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#4321', 'http://example.com/image#4321'})
# an GreaterThan statement on a string
# always positive
q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.GreaterThan(0)))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234', 'http://example.com/entity#4321', 'http://example.com/image#1234'})
@@ -331,7 +331,7 @@ class TestParseFilter(unittest.TestCase):
ast.filter.Any(ns.bse.filesize, ast.filter.Equals(1234)),
ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')),
))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234'})
# all conditions have to match
q = self.parser(self.schema.node(ns.bsfs.Entity),
@@ -340,21 +340,21 @@ class TestParseFilter(unittest.TestCase):
ast.filter.Any(ns.bse.filesize, ast.filter.Equals(1234)),
ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')),
))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set())
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)}, set())
q = self.parser(self.schema.node(ns.bsfs.Entity),
ast.filter.And(
ast.filter.Is('http://example.com/entity#1234'),
ast.filter.Any(ns.bse.filesize, ast.filter.Equals(4321)),
ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')),
))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set())
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)}, set())
q = self.parser(self.schema.node(ns.bsfs.Entity),
ast.filter.And(
ast.filter.Is('http://example.com/entity#1234'),
ast.filter.Any(ns.bse.filesize, ast.filter.Equals(1234)),
ast.filter.Any(ns.bse.comment, ast.filter.Equals('foobar')),
))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set())
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)}, set())
# And can be nested
q = self.parser(self.schema.node(ns.bsfs.Entity),
ast.filter.And(
@@ -364,7 +364,7 @@ class TestParseFilter(unittest.TestCase):
ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')),
),
))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234'})
@@ -387,7 +387,7 @@ class TestParseFilter(unittest.TestCase):
ast.filter.Any(ns.bse.filesize, ast.filter.Equals(4321)),
ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')),
))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234', 'http://example.com/image#1234', 'http://example.com/entity#4321', 'http://example.com/image#4321'})
# at least one condition has to match
q = self.parser(self.schema.node(ns.bsfs.Entity),
@@ -396,14 +396,14 @@ class TestParseFilter(unittest.TestCase):
ast.filter.Any(ns.bse.filesize, ast.filter.Equals(8765)),
ast.filter.Any(ns.bse.comment, ast.filter.Equals('foobar')),
))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set())
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)}, set())
q = self.parser(self.schema.node(ns.bsfs.Entity),
ast.filter.Or(
ast.filter.Is('http://example.com/entity#1234'),
ast.filter.Any(ns.bse.filesize, ast.filter.Equals(8765)),
ast.filter.Any(ns.bse.comment, ast.filter.Equals('foobar')),
))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234'})
q = self.parser(self.schema.node(ns.bsfs.Entity),
ast.filter.Or(
@@ -411,7 +411,7 @@ class TestParseFilter(unittest.TestCase):
ast.filter.Any(ns.bse.filesize, ast.filter.Equals(4321)),
ast.filter.Any(ns.bse.comment, ast.filter.Equals('foobar')),
))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#4321', 'http://example.com/image#4321'})
q = self.parser(self.schema.node(ns.bsfs.Entity),
ast.filter.Or(
@@ -419,7 +419,7 @@ class TestParseFilter(unittest.TestCase):
ast.filter.Any(ns.bse.filesize, ast.filter.Equals(8765)),
ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')),
))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234', 'http://example.com/image#1234'})
# Or can be nested
q = self.parser(self.schema.node(ns.bsfs.Entity),
@@ -430,7 +430,7 @@ class TestParseFilter(unittest.TestCase):
ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')),
),
))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234', 'http://example.com/image#1234', 'http://example.com/entity#4321', 'http://example.com/image#4321'})
@@ -451,14 +451,14 @@ class TestParseFilter(unittest.TestCase):
# _any returns a valid query
q = self.parser(self.schema.node(ns.bsfs.Entity),
ast.filter.Any(ns.bse.filesize, ast.filter.Equals(1234)))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234', 'http://example.com/image#1234'})
# _any can be nested
q = self.parser(self.schema.node(ns.bsfs.Entity),
ast.filter.Any(ns.bse.tag,
ast.filter.Any(ns.bse.representative,
ast.filter.Is('http://example.com/image#1234'))))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234', 'http://example.com/image#1234'})
@@ -468,12 +468,12 @@ class TestParseFilter(unittest.TestCase):
# All Nodes
q = self.parser(self.schema.node(ns.bsfs.Entity),
ast.filter.All(ns.bse.tag, ast.filter.Is('http://example.com/tag#1234')))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234', 'http://example.com/image#1234'})
# All values
q = self.parser(self.schema.node(ns.bsfs.Entity),
ast.filter.All(ns.bse.comment, ast.filter.Equals('hello world')))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#4321'})
# All on value within Or branch
# entity#1234 is selected because all of its comments are in ("hello world", "Me, Myself, and I")
@@ -481,12 +481,12 @@ class TestParseFilter(unittest.TestCase):
ast.filter.All(ns.bse.comment, ast.filter.Or(
ast.filter.Equals('hello world'),
ast.filter.Equals('Me, Myself, and I'))))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234', 'http://example.com/entity#4321', 'http://example.com/image#1234'})
# All requires at least one predicate/value
q = self.parser(self.schema.node(ns.bsfs.Entity),
ast.filter.All(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/image#1234'})
# All within a statement
q = self.parser(self.schema.node(ns.bsfs.Entity),
@@ -498,18 +498,18 @@ class TestParseFilter(unittest.TestCase):
))
)
)
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234', 'http://example.com/image#1234'})
# All with reversed Predicate
q = self.parser(self.schema.node(ns.bsfs.Tag),
ast.filter.All(ast.filter.Predicate(ns.bse.tag, reverse=True), ast.filter.Is('http://example.com/entity#4321')))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/tag#4321'})
# All with multiple predicates
q = self.parser(self.schema.node(ns.bsfs.Entity),
ast.filter.All(ast.filter.OneOf(ns.bse.tag, ns.bse.buddy), # entity#1234 (tag:tag#1234), entity#1234 (buddy:image#1234), image#1234(tag:tag#1234)
ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')))) # entity#1234, image#1234, tag#1234
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234', 'http://example.com/image#1234'})
@@ -518,22 +518,22 @@ class TestParseFilter(unittest.TestCase):
# Not applies on conditions
q = self.parser(self.schema.node(ns.bsfs.Entity),
ast.filter.Not(ast.filter.Is('http://example.com/entity#1234')))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/image#1234', 'http://example.com/entity#4321', 'http://example.com/image#4321'})
# Not applies on conditions within branches
q = self.parser(self.schema.node(ns.bsfs.Entity),
ast.filter.Any(ns.bse.comment, ast.filter.Not(ast.filter.Equals('Me, Myself, and I'))))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234', 'http://example.com/entity#4321'})
# Not applies on branches
q = self.parser(self.schema.node(ns.bsfs.Entity),
ast.filter.Not(ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I'))))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#4321', 'http://example.com/image#4321'})
# Double Not cancel each other
q = self.parser(self.schema.node(ns.bsfs.Entity),
ast.filter.Not(ast.filter.Not(ast.filter.Is('http://example.com/entity#1234'))))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234'})
# Not works within aggregation (and)
q = self.parser(self.schema.node(ns.bsfs.Entity),
@@ -541,7 +541,7 @@ class TestParseFilter(unittest.TestCase):
ast.filter.Not(ast.filter.Is('http://example.com/entity#1234')),
ast.filter.Any(ns.bse.comment, ast.filter.Equals('hello world')),
))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#4321'})
# Not works within aggregation (or)
q = self.parser(self.schema.node(ns.bsfs.Entity),
@@ -549,7 +549,7 @@ class TestParseFilter(unittest.TestCase):
ast.filter.Not(ast.filter.Is('http://example.com/entity#1234')),
ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')),
))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234', 'http://example.com/entity#4321', 'http://example.com/image#1234', 'http://example.com/image#4321'})
# Not works outside aggregation (and)
q = self.parser(self.schema.node(ns.bsfs.Entity),
@@ -558,7 +558,7 @@ class TestParseFilter(unittest.TestCase):
ast.filter.Is('http://example.com/entity#1234'),
ast.filter.Any(ns.bse.comment, ast.filter.Equals('hello world')),
)))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#4321', 'http://example.com/image#1234', 'http://example.com/image#4321'})
# Not works outside aggregation (or)
q = self.parser(self.schema.node(ns.bsfs.Entity),
@@ -567,7 +567,7 @@ class TestParseFilter(unittest.TestCase):
ast.filter.Is('http://example.com/entity#4321'),
ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')),
)))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/image#4321'})
# Not mixed with branch, aggregation, id, and value
q = self.parser(self.schema.node(ns.bsfs.Entity),
@@ -580,7 +580,7 @@ class TestParseFilter(unittest.TestCase):
),
ast.filter.Any(ns.bse.comment, ast.filter.Not(ast.filter.Equals('foobar'))), # entity#1234, entity#4321, image#1234
))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/image#1234'})
@@ -590,21 +590,21 @@ class TestParseFilter(unittest.TestCase):
# Has with GreaterThan constraint
q = self.parser(self.schema.node(ns.bsfs.Entity),
ast.filter.Has(ns.bse.comment, ast.filter.GreaterThan(0)))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234', 'http://example.com/entity#4321', 'http://example.com/image#1234'})
q = self.parser(self.schema.node(ns.bsfs.Entity),
ast.filter.Has(ns.bse.comment, ast.filter.GreaterThan(1)))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234'})
# Has with Equals constraint
q = self.parser(self.schema.node(ns.bsfs.Entity),
ast.filter.Has(ns.bse.comment, 1))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#4321', 'http://example.com/image#1234'})
# Has with LessThan constraint
q = self.parser(self.schema.node(ns.bsfs.Entity),
ast.filter.Has(ns.bse.comment, ast.filter.LessThan(2)))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#4321', 'http://example.com/image#1234', 'http://example.com/image#4321'})
# Has with multiple constraints
self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.bse.comment), rdflib.Literal('extra1', datatype=rdflib.XSD.string)))
@@ -616,17 +616,17 @@ class TestParseFilter(unittest.TestCase):
self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef(ns.bse.comment), rdflib.Literal('extra2', datatype=rdflib.XSD.string)))
q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Has(ns.bse.comment,
ast.filter.And(ast.filter.GreaterThan(1), ast.filter.LessThan(5))))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#4321'})
# Has with OneOf predicate
q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Has(ast.filter.OneOf(ns.bse.tag, ns.bse.buddy),
ast.filter.GreaterThan(1)))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234', 'http://example.com/entity#4321'})
# Has with reversed predicate
q = self.parser(self.schema.node(ns.bsfs.Tag), ast.filter.Has(ast.filter.Predicate(ns.bse.tag, reverse=True),
ast.filter.GreaterThan(1)))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/tag#1234'})
@@ -643,23 +643,23 @@ class TestParseFilter(unittest.TestCase):
self.assertRaises(errors.ConsistencyError, self.parser._distance, self.schema.literal(ns.bsfs.Colors), ast.filter.Distance([1,2,3,4,5], 1), '')
# _distance respects threshold
q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.colors, ast.filter.Distance([2,4,3,1], 4)))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234', 'http://example.com/entity#4321', 'http://example.com/image#1234'})
q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.colors, ast.filter.Distance([2,4,3,1], 3)))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#4321', 'http://example.com/image#1234'})
q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.colors, ast.filter.Distance([2,4,3,1], 2)))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/image#1234'})
# result set can be empty
q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.colors, ast.filter.Distance([2,4,3,1], 1)))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set())
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)}, set())
# _distance respects strict
q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.colors, ast.filter.Distance([1,2,3,4], 0, False)))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234'})
q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.colors, ast.filter.Distance([1,2,3,4], 0, True)))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set())
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)}, set())
def test_one_of(self):
# _one_of expects a node
@@ -725,7 +725,7 @@ class TestParseFilter(unittest.TestCase):
ast.filter.Any(ast.filter.OneOf(ns.bse.tag, ns.bse.buddy),
ast.filter.Any(ast.filter.OneOf(ns.bse.comment),
ast.filter.Equals('Me, Myself, and I'))))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234', 'http://example.com/image#1234'})
@@ -757,13 +757,13 @@ class TestParseFilter(unittest.TestCase):
ast.filter.Any(ns.bse.representative,
ast.filter.Any(ns.bse.filesize,
ast.filter.Equals(1234)))))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/entity#1234', 'http://example.com/image#1234'})
q = self.parser(self.schema.node(ns.bsfs.Tag),
ast.filter.Any(ast.filter.Predicate(ns.bse.tag, reverse=True),
ast.filter.Any(ns.bse.filesize,
ast.filter.LessThan(2000))))
- self.assertSetEqual({str(guid) for guid, in self.graph.query(q)},
+ self.assertSetEqual({str(guid) for guid, in q(self.graph)},
{'http://example.com/tag#1234'})
diff --git a/test/triple_store/sparql/test_sparql.py b/test/triple_store/sparql/test_sparql.py
index 7fbfb65..c58fae3 100644
--- a/test/triple_store/sparql/test_sparql.py
+++ b/test/triple_store/sparql/test_sparql.py
@@ -556,6 +556,76 @@ class TestSparqlStore(unittest.TestCase):
self.assertSetEqual(set(q), tag_ids)
+ def test_fetch(self):
+ # store setup
+ store = SparqlStore.Open()
+ store.schema = self.schema
+ # add instances
+ ent_type = self.schema.node(ns.bsfs.Entity)
+ tag_type = self.schema.node(ns.bsfs.Tag)
+ ent_ids = {URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')}
+ tag_ids = {URI('http://example.com/me/tag#1234'), URI('http://example.com/me/tag#4321')}
+ store.create(ent_type, ent_ids)
+ store.create(tag_type, tag_ids)
+ store.set(ent_type, ent_ids, self.schema.predicate(ns.bse.tag), tag_ids)
+ store.set(ent_type, {URI('http://example.com/me/entity#1234')}, self.schema.predicate(ns.bse.filesize), {1234})
+ store.set(ent_type, {URI('http://example.com/me/entity#4321')}, self.schema.predicate(ns.bse.filesize), {4321})
+ store.set(ent_type, {URI('http://example.com/me/entity#1234')}, self.schema.predicate(ns.bse.comment), {'hello world'})
+ # node_type must be a node from the schema
+ self.assertRaises(errors.ConsistencyError, list, store.fetch(self.schema.literal(ns.bsfs.Literal),
+ ast.filter.FilterExpression(), ast.fetch.FetchExpression()))
+ self.assertRaises(errors.ConsistencyError, list, store.fetch(self.schema.node(ns.bsfs.Node).child(ns.bsfs.Invalid),
+ ast.filter.FilterExpression(), ast.fetch.FetchExpression()))
+ # requires a filter and a fetch query
+ self.assertRaises(TypeError, list, store.fetch(self.schema.node(ns.bsfs.Entity), None, ast.fetch.FetchExpression()))
+ self.assertRaises(TypeError, list, store.fetch(self.schema.node(ns.bsfs.Entity), 1234, ast.fetch.FetchExpression()))
+ self.assertRaises(TypeError, list, store.fetch(self.schema.node(ns.bsfs.Entity), 'hello', ast.fetch.FetchExpression()))
+ self.assertRaises(TypeError, list, store.fetch(self.schema.node(ns.bsfs.Entity), ast.filter.FilterExpression(), None))
+ self.assertRaises(TypeError, list, store.fetch(self.schema.node(ns.bsfs.Entity), ast.filter.FilterExpression(), 1234))
+ self.assertRaises(TypeError, list, store.fetch(self.schema.node(ns.bsfs.Entity), ast.filter.FilterExpression(), 'hello'))
+ # fetch emits triples
+ self.assertSetEqual(set(store.fetch(self.schema.node(ns.bsfs.Entity),
+ ast.filter.Is('http://example.com/me/entity#1234'),
+ ast.fetch.Value(ns.bse.filesize, 'filesize'),
+ )), {
+ (URI('http://example.com/me/entity#1234'), 'filesize', 1234),
+ })
+ # fetch respects filter query
+ self.assertSetEqual(set(store.fetch(self.schema.node(ns.bsfs.Entity),
+ ast.filter.IsIn('http://example.com/me/entity#1234', 'http://example.com/me/entity#4321'),
+ ast.fetch.Value(ns.bse.filesize, 'filesize'),
+ )), {
+ (URI('http://example.com/me/entity#1234'), 'filesize', 1234),
+ (URI('http://example.com/me/entity#4321'), 'filesize', 4321),
+ })
+ # fetch ignores missing data
+ self.assertSetEqual(set(store.fetch(self.schema.node(ns.bsfs.Entity),
+ ast.filter.IsIn('http://example.com/me/entity#1234', 'http://example.com/me/entity#4321'),
+ ast.fetch.Value(ns.bse.comment, 'comment'),
+ )), {
+ (URI('http://example.com/me/entity#1234'), 'comment', 'hello world'),
+ })
+ # fetch emits all triples
+ self.assertSetEqual(set(store.fetch(self.schema.node(ns.bsfs.Entity),
+ ast.filter.Is('http://example.com/me/entity#1234'),
+ ast.fetch.All(
+ ast.fetch.Value(ns.bse.filesize, 'filesize'),
+ ast.fetch.Node(ns.bse.tag, 'tag'),
+ )
+ )), {
+ (URI('http://example.com/me/entity#1234'), 'filesize', 1234),
+ (URI('http://example.com/me/entity#1234'), 'tag', URI('http://example.com/me/tag#1234')),
+ (URI('http://example.com/me/entity#1234'), 'tag', URI('http://example.com/me/tag#4321')),
+ })
+ # triples do not repeat
+ triples = list(store.fetch(self.schema.node(ns.bsfs.Entity), ast.filter.Is('http://example.com/me/entity#1234'),
+ ast.fetch.All(
+ ast.fetch.Value(ns.bse.filesize, 'filesize'),
+ ast.fetch.Node(ns.bse.tag, 'tag'),
+ )
+ ))
+ self.assertEqual(len(triples), 3)
+
def test_exists(self):
# store setup
store = SparqlStore.Open()
diff --git a/test/triple_store/sparql/test_utils.py b/test/triple_store/sparql/test_utils.py
new file mode 100644
index 0000000..073b8f8
--- /dev/null
+++ b/test/triple_store/sparql/test_utils.py
@@ -0,0 +1,155 @@
+"""
+
+Part of the bsfs test suite.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# standard imports
+import operator
+import re
+import unittest
+
+# external imports
+import rdflib
+
+# bsie imports
+from bsfs.namespace import ns
+
+# objects to test
+from bsfs.triple_store.sparql.utils import GenHopName, Query
+
+
+## code ##
+
+class TestGenHopName(unittest.TestCase):
+ def test_next(self):
+ # baseline
+ self.assertEqual(next(GenHopName(prefix='?foo', start=123)), '?foo123')
+ # respects prefix
+ self.assertEqual(next(GenHopName(prefix='?bar', start=123)), '?bar123')
+ # respects start
+ self.assertEqual(next(GenHopName(prefix='?foo', start=321)), '?foo321')
+ # counts up
+ cnt = GenHopName(prefix='?foo', start=998)
+ self.assertEqual(next(cnt), '?foo998')
+ self.assertEqual(next(cnt), '?foo999')
+ self.assertEqual(next(cnt), '?foo1000')
+ self.assertEqual(next(cnt), '?foo1001')
+
+ def test_essentials(self):
+ # can get the prefix
+ self.assertEqual(GenHopName(prefix='?foo', start=123).prefix, '?foo')
+ # can get the counter
+ self.assertEqual(GenHopName(prefix='?foo', start=123).curr, 122)
+
+
+class TestQuery(unittest.TestCase):
+ def setUp(self):
+ self.root_type = 'http://bsfs.ai/schema/Entity'
+ self.root_head = '?root'
+ self.select = (('?head', 'name'), )
+ self.where = f'?root <{ns.bse.tag}> ?head'
+
+ def test_essentials(self):
+ # can access members
+ q = Query(self.root_type, self.root_head, self.select, self.where)
+ self.assertEqual(q.root_type, self.root_type)
+ self.assertEqual(q.root_head, self.root_head)
+ self.assertEqual(q.select, self.select)
+ self.assertEqual(q.where, self.where)
+ # comparison
+ self.assertEqual(q, Query(self.root_type, self.root_head, self.select, self.where))
+ self.assertEqual(hash(q), hash(Query(self.root_type, self.root_head, self.select, self.where)))
+ # comparison respects root_type
+ self.assertNotEqual(q, Query('http://bsfs.ai/schema/Tag', self.root_head, self.select, self.where))
+ self.assertNotEqual(hash(q), hash(Query('http://bsfs.ai/schema/Tag', self.root_head, self.select, self.where)))
+ # comparison respects root_head
+ self.assertNotEqual(q, Query(self.root_type, '?foo', self.select, self.where))
+ self.assertNotEqual(hash(q), hash(Query(self.root_type, '?foo', self.select, self.where)))
+ # comparison respects select
+ self.assertNotEqual(q, Query(self.root_type, self.root_head, (('?head', 'foo'), ), self.where))
+ self.assertNotEqual(hash(q), hash(Query(self.root_type, self.root_head, (('?head', 'foo'), ), self.where)))
+ # comparison respects where
+ self.assertNotEqual(q, Query(self.root_type, self.root_head, self.select, '?root bse:filename ?head'))
+ self.assertNotEqual(hash(q), hash(Query(self.root_type, self.root_head, self.select, '?root bse:filename ?head')))
+ # string conversion
+ self.assertEqual(str(q), q.query)
+ self.assertEqual(repr(q), "Query(http://bsfs.ai/schema/Entity, ?root, (('?head', 'name'),), ?root <http://bsfs.ai/schema/Entity#tag> ?head)")
+
+ def test_add(self):
+ q = Query(self.root_type, self.root_head, self.select, self.where)
+ # can only add a query
+ self.assertRaises(TypeError, operator.add, q, 1234)
+ self.assertRaises(TypeError, operator.add, q, 'foobar')
+ # root type and head must match
+ self.assertRaises(ValueError, operator.add, q, Query('http://bsfs.ai/schema/Tag', self.root_head))
+ self.assertRaises(ValueError, operator.add, q, Query(self.root_type, '?foobar'))
+ # select and were are combined
+ combo = q + Query(self.root_type, self.root_head, (('?foo', 'bar'), ), f'?root <{ns.bse.filename}> ?foo')
+ self.assertEqual(combo.select, (('?head', 'name'), ('?foo', 'bar')))
+ self.assertEqual(combo.where, f'?root <{ns.bse.tag}> ?head . ?root <{ns.bse.filename}> ?foo')
+ # select can be empty
+ combo = q + Query(self.root_type, self.root_head, None, f'?root <{ns.bse.filename}> ?foo')
+ self.assertEqual(combo.select, (('?head', 'name'), ))
+ combo = Query(self.root_type, self.root_head, None, f'?root <{ns.bse.filename}> ?foo') + q
+ self.assertEqual(combo.select, (('?head', 'name'), ))
+ combo = Query(self.root_type, self.root_head, None, self.where) + Query(self.root_type, self.root_head, None, f'?root <{ns.bse.filename}> ?foo')
+ self.assertEqual(combo.select, tuple())
+ # where can be empty
+ combo = q + Query(self.root_type, self.root_head, (('?foo', 'bar'), ))
+ self.assertEqual(combo.where, self.where)
+ combo = Query(self.root_type, self.root_head, (('?foo', 'bar'), )) + q
+ self.assertEqual(combo.where, self.where)
+ combo = Query(self.root_type, self.root_head, self.select) + Query(self.root_type, self.root_head, (('?foo', 'bar'), ))
+ self.assertEqual(combo.where, '')
+
+ def test_names(self):
+ self.assertEqual(Query(self.root_type, self.root_head, (('?head', 'name'), ), self.where).names,
+ ('name', ))
+ self.assertEqual(Query(self.root_type, self.root_head, (('?head', 'name'), ('?foo', 'bar')), self.where).names,
+ ('name', 'bar'))
+
+ def test_query(self):
+ def normalize(value):
+ value = value.strip()
+ value = value.lower()
+ value = value.replace(r'\n', ' ')
+ value, _ = re.subn('\s\s+', ' ', value)
+ return value
+ # query composes a valid query
+ q = Query(self.root_type, self.root_head, self.select, self.where)
+ self.assertEqual(normalize(q.query), normalize(f'select ?root (?head as ?name) where {{ ?root <{ns.rdf.type}>/<{ns.rdfs.subClassOf}>* <http://bsfs.ai/schema/Entity> . ?root <{ns.bse.tag}> ?head }}'))
+ # select and where are optional
+ q = Query(self.root_type, self.root_head)
+ self.assertEqual(normalize(q.query), normalize(f'select ?root where {{ ?root <{ns.rdf.type}>/<{ns.rdfs.subClassOf}>* <http://bsfs.ai/schema/Entity> . }}'))
+ # select and where need not to correspond
+ q = Query(self.root_type, self.root_head, (('?head', 'name'), ))
+ self.assertEqual(normalize(q.query), normalize(f'select ?root (?head as ?name) where {{ ?root <{ns.rdf.type}>/<{ns.rdfs.subClassOf}>* <http://bsfs.ai/schema/Entity> . }}'))
+ # query is used for string representation
+ self.assertEqual(str(q), q.query)
+
+ def test_call(self):
+ graph = rdflib.Graph()
+ # schema
+ graph.add((rdflib.URIRef('http://bsfs.ai/schema/Document'), rdflib.URIRef(ns.rdfs.subClassOf), rdflib.URIRef('http://bsfs.ai/schema/Entity')))
+ # nodes
+ graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.rdf.type), rdflib.URIRef('http://bsfs.ai/schema/Entity')))
+ graph.add((rdflib.URIRef('http://example.com/doc#1234'), rdflib.URIRef(ns.rdf.type), rdflib.URIRef('http://bsfs.ai/schema/Document')))
+ # links
+ graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.bse.tag), rdflib.Literal('tag#1234', datatype=rdflib.XSD.string)))
+ graph.add((rdflib.URIRef('http://example.com/doc#1234'), rdflib.URIRef(ns.bse.tag), rdflib.Literal('tag#1234', datatype=rdflib.XSD.string)))
+ # run query on a given graph
+ query = Query(self.root_type, self.root_head, self.select, self.where)
+ self.assertSetEqual(set(query(graph)), {
+ (rdflib.URIRef('http://example.com/entity#1234'), rdflib.Literal('tag#1234', datatype=rdflib.XSD.string)),
+ (rdflib.URIRef('http://example.com/doc#1234'), rdflib.Literal('tag#1234', datatype=rdflib.XSD.string)),
+ })
+ # query actually considers the passed graph
+ self.assertSetEqual(set(query(rdflib.Graph())), set())
+
+## main ##
+
+if __name__ == '__main__':
+ unittest.main()
+
+## EOF ##
diff --git a/test/triple_store/test_base.py b/test/triple_store/test_base.py
index a0c3260..56a2539 100644
--- a/test/triple_store/test_base.py
+++ b/test/triple_store/test_base.py
@@ -38,6 +38,9 @@ class DummyBase(TripleStoreBase):
def get(self, node_type, query):
pass
+ def fetch(self, node_type, filter, fetch):
+ pass
+
def exists(self, node_type, guids):
pass