From 791918039979d0743fd2ea4b9a5e74593ff96fd0 Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Mon, 19 Dec 2022 13:32:34 +0100 Subject: query ast file structures and essential interfaces --- test/triple_store/sparql/__init__.py | 0 test/triple_store/sparql/test_sparql.py | 771 ++++++++++++++++++++++++++++++++ test/triple_store/test_sparql.py | 769 ------------------------------- 3 files changed, 771 insertions(+), 769 deletions(-) create mode 100644 test/triple_store/sparql/__init__.py create mode 100644 test/triple_store/sparql/test_sparql.py delete mode 100644 test/triple_store/test_sparql.py (limited to 'test/triple_store') diff --git a/test/triple_store/sparql/__init__.py b/test/triple_store/sparql/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/test/triple_store/sparql/test_sparql.py b/test/triple_store/sparql/test_sparql.py new file mode 100644 index 0000000..0bf664a --- /dev/null +++ b/test/triple_store/sparql/test_sparql.py @@ -0,0 +1,771 @@ +""" + +Part of the bsfs test suite. +A copy of the license is provided with the project. +Author: Matthias Baumgartner, 2022 +""" +# imports +import rdflib +import unittest + +# bsie imports +from bsfs import schema as _schema +from bsfs.namespace import ns +from bsfs.utils import errors, URI + +# objects to test +from bsfs.triple_store.sparql.sparql import SparqlStore + + +## code ## + +class TestSparqlStore(unittest.TestCase): + def setUp(self): + self.schema = _schema.Schema.from_string(''' + prefix rdfs: + prefix xsd: + + prefix bsfs: + prefix bse: + + bsfs:Entity rdfs:subClassOf bsfs:Node . + bsfs:Tag rdfs:subClassOf bsfs:Node . + bsfs:User rdfs:subClassOf bsfs:Node . + xsd:string rdfs:subClassOf bsfs:Literal . + xsd:integer rdfs:subClassOf bsfs:Literal . + + # non-unique literal + bse:comment rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range xsd:string ; + bsfs:unique "false"^^xsd:boolean . + + # unique literal + bse:filesize rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range xsd:integer ; + bsfs:unique "true"^^xsd:boolean . + + # non-unique node + bse:tag rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range bsfs:Tag ; + bsfs:unique "false"^^xsd:boolean . + + # unique node + bse:author rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range bsfs:User ; + bsfs:unique "true"^^xsd:boolean . + + ''') + + def test_essentials(self): + store = SparqlStore.Open() + # equality + self.assertEqual(store, store) + self.assertEqual(hash(store), hash(store)) + self.assertNotEqual(store, SparqlStore.Open()) + self.assertNotEqual(hash(store), hash(SparqlStore.Open())) + # string conversion + self.assertEqual(str(store), 'SparqlStore(uri=None)') + self.assertEqual(repr(store), 'SparqlStore(uri=None)') + # open + self.assertIsInstance(SparqlStore.Open(), SparqlStore) + + + def test__has_type(self): + # setup store + store = SparqlStore.Open() + store.schema = _schema.Schema.from_string(''' + prefix rdfs: + prefix xsd: + prefix bsfs: + + bsfs:Entity rdfs:subClassOf bsfs:Node . + bsfs:Document rdfs:subClassOf bsfs:Entity . + bsfs:Image rdfs:subClassOf bsfs:Entity . + bsfs:PDF rdfs:subClassOf bsfs:Document . + + ''') + # add some instances + store.create(store.schema.node(ns.bsfs.Entity), {URI('http://example.com/me/entity#1234')}) + store.create(store.schema.node(ns.bsfs.Document), {URI('http://example.com/me/document#1234')}) + store.create(store.schema.node(ns.bsfs.Image), {URI('http://example.com/me/image#1234')}) + store.create(store.schema.node(ns.bsfs.PDF), {URI('http://example.com/me/pdf#1234')}) + + # node_type must be in the schema + self.assertRaises(errors.ConsistencyError, store._has_type, URI('http://example.com/me/entity#1234'), store.schema.node(ns.bsfs.Node).get_child(ns.bsfs.invalid)) + + # returns False on inexistent nodes + self.assertFalse(store._has_type(URI('http://example.com/me/entity#4321'), store.schema.node(ns.bsfs.Entity))) + self.assertFalse(store._has_type(URI('http://example.com/me/document#4321'), store.schema.node(ns.bsfs.Document))) + self.assertFalse(store._has_type(URI('http://example.com/me/image#4321'), store.schema.node(ns.bsfs.Image))) + self.assertFalse(store._has_type(URI('http://example.com/me/pdf#4321'), store.schema.node(ns.bsfs.PDF))) + + # _has_type checks direct types + self.assertTrue(store._has_type(URI('http://example.com/me/entity#1234'), store.schema.node(ns.bsfs.Entity))) + self.assertTrue(store._has_type(URI('http://example.com/me/document#1234'), store.schema.node(ns.bsfs.Document))) + self.assertTrue(store._has_type(URI('http://example.com/me/image#1234'), store.schema.node(ns.bsfs.Image))) + self.assertTrue(store._has_type(URI('http://example.com/me/pdf#1234'), store.schema.node(ns.bsfs.PDF))) + + # _has_type checks type hierarchy + self.assertFalse(store._has_type(URI('http://example.com/me/entity#1234'), store.schema.node(ns.bsfs.Document))) + self.assertFalse(store._has_type(URI('http://example.com/me/entity#1234'), store.schema.node(ns.bsfs.Image))) + self.assertFalse(store._has_type(URI('http://example.com/me/entity#1234'), store.schema.node(ns.bsfs.PDF))) + + self.assertTrue(store._has_type(URI('http://example.com/me/document#1234'), store.schema.node(ns.bsfs.Entity))) + self.assertFalse(store._has_type(URI('http://example.com/me/document#1234'), store.schema.node(ns.bsfs.Image))) + self.assertFalse(store._has_type(URI('http://example.com/me/document#1234'), store.schema.node(ns.bsfs.PDF))) + + self.assertTrue(store._has_type(URI('http://example.com/me/image#1234'), store.schema.node(ns.bsfs.Entity))) + self.assertFalse(store._has_type(URI('http://example.com/me/image#1234'), store.schema.node(ns.bsfs.Document))) + self.assertFalse(store._has_type(URI('http://example.com/me/image#1234'), store.schema.node(ns.bsfs.PDF))) + + self.assertTrue(store._has_type(URI('http://example.com/me/pdf#1234'), store.schema.node(ns.bsfs.Entity))) + self.assertTrue(store._has_type(URI('http://example.com/me/pdf#1234'), store.schema.node(ns.bsfs.Document))) + self.assertFalse(store._has_type(URI('http://example.com/me/pdf#1234'), store.schema.node(ns.bsfs.Image))) + + + def test_schema(self): + # setup + store = SparqlStore.Open() + curr = self.schema + p_comment = curr.predicate(ns.bse.comment) + p_filesize = curr.predicate(ns.bse.filesize) + p_tag = curr.predicate(ns.bse.tag) + p_author = curr.predicate(ns.bse.author) + + # migrate to an initial schema + store.schema = curr + # store has migrated + self.assertEqual(store.schema, curr) + + # add some instances + ent_ids = {URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')} + tag_ids = {URI('http://example.com/me/tag#1234'), URI('http://example.com/me/tag#4321')} + store.create(curr.node(ns.bsfs.Entity), ent_ids) + store.create(curr.node(ns.bsfs.Tag), tag_ids) + store.create(curr.node(ns.bsfs.User), {URI('http://example.com/me')}) + # add some triples + store.set(curr.node(ns.bsfs.Entity), ent_ids, p_comment, {'foo', 'bar'}) + store.set(curr.node(ns.bsfs.Entity), ent_ids, p_filesize, {1234}) + store.set(curr.node(ns.bsfs.Entity), ent_ids, p_tag, + {URI('http://example.com/me/tag#1234'), URI('http://example.com/me/tag#4321')}) + store.set(curr.node(ns.bsfs.Entity), ent_ids, p_author, + {URI('http://example.com/me')}) + # check instances + instances = { + # node instances + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + (rdflib.URIRef('http://example.com/me/tag#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), + (rdflib.URIRef('http://example.com/me/tag#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), + (rdflib.URIRef('http://example.com/me'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.User)), + # comments + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foo', datatype=rdflib.XSD.string)), + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('bar', datatype=rdflib.XSD.string)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foo', datatype=rdflib.XSD.string)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('bar', datatype=rdflib.XSD.string)), + # filesize + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), + # tags + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), + # author + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me')), + } + self.assertSetEqual(set(store._graph), instances) + + # add some classes to the schema + curr = curr + _schema.Schema.from_string(''' + prefix rdfs: + prefix xsd: + prefix bsfs: + prefix bse: + prefix bst: + prefix bsc: + + bsfs:Entity rdfs:subClassOf bsfs:Node . + bsfs:Tag rdfs:subClassOf bsfs:Node . + bsfs:Collection rdfs:subClassOf bsfs:Node . + xsd:boolean rdfs:subClassOf bsfs:Literal . + + # literal + bse:shared rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range xsd:boolean ; + bsfs:unique "true"^^xsd:boolean . + + # node + bse:partOf rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range bsfs:Collection ; + bsfs:unique "false"^^xsd:boolean . + + # predicates across auxiliary node classes + bst:usedIn rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Tag ; + rdfs:range bsfs:Collection ; + bsfs:unique "false"^^xsd:boolean . + + bsc:tag rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Collection ; + rdfs:range bsfs:Tag ; + bsfs:unique "false"^^xsd:boolean . + + bst:principal rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Tag ; + rdfs:range bsfs:Node ; + bsfs:unique "true"^^xsd:boolean . + + ''') + # store migrated to the new schema + store.schema = curr + self.assertEqual(store.schema, curr) + # instances have not changed + self.assertSetEqual(set(store._graph), instances) + # add some instances of the new classes + p_partOf = curr.predicate(ns.bse.partOf) + p_shared = curr.predicate(ns.bse.shared) + p_usedIn = curr.predicate('http://bsfs.ai/schema/Tag#usedIn') + p_ctag = curr.predicate('http://bsfs.ai/schema/Collection#tag') + p_principal = curr.predicate('http://bsfs.ai/schema/Tag#principal') + store.create(curr.node(ns.bsfs.Collection), {URI('http://example.com/me/collection#1234'), URI('http://example.com/me/collection#4321')}) + # add some more triples + store.set(curr.node(ns.bsfs.Entity), ent_ids, p_shared, {True}) + store.set(curr.node(ns.bsfs.Entity), ent_ids, p_partOf, + {URI('http://example.com/me/collection#1234'), URI('http://example.com/me/collection#4321')}) + store.set(curr.node(ns.bsfs.Tag), {URI('http://example.com/me/tag#1234')}, p_usedIn, + {URI('http://example.com/me/collection#1234')}) + store.set(curr.node(ns.bsfs.Collection), {URI('http://example.com/me/collection#4321')}, p_ctag, + {URI('http://example.com/me/tag#1234'), URI('http://example.com/me/tag#4321')}) + store.set(curr.node(ns.bsfs.Tag), {URI('http://example.com/me/tag#1234')}, p_principal, + {URI('http://example.com/me/collection#1234')}) + # new instances are now in the graph + self.assertSetEqual(set(store._graph), instances | { + # collections + (rdflib.URIRef('http://example.com/me/collection#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Collection)), + (rdflib.URIRef('http://example.com/me/collection#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Collection)), + # partOf + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_partOf.uri), rdflib.URIRef('http://example.com/me/collection#1234')), + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_partOf.uri), rdflib.URIRef('http://example.com/me/collection#4321')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_partOf.uri), rdflib.URIRef('http://example.com/me/collection#1234')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_partOf.uri), rdflib.URIRef('http://example.com/me/collection#4321')), + # shared + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_shared.uri), rdflib.Literal('true', datatype=rdflib.XSD.boolean)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_shared.uri), rdflib.Literal('true', datatype=rdflib.XSD.boolean)), + # auxiliary node connections + (rdflib.URIRef('http://example.com/me/tag#1234'), rdflib.URIRef(p_usedIn.uri), rdflib.URIRef('http://example.com/me/collection#1234')), + (rdflib.URIRef('http://example.com/me/tag#1234'), rdflib.URIRef(p_principal.uri), rdflib.URIRef('http://example.com/me/collection#1234')), + (rdflib.URIRef('http://example.com/me/collection#4321'), rdflib.URIRef(p_ctag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), + (rdflib.URIRef('http://example.com/me/collection#4321'), rdflib.URIRef(p_ctag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), + }) + + + # remove some classes from the schema + curr = _schema.Schema.from_string(''' + prefix rdfs: + prefix xsd: + prefix bsfs: + prefix bse: + prefix bst: + + bsfs:Entity rdfs:subClassOf bsfs:Node . + bsfs:Tag rdfs:subClassOf bsfs:Node . + bsfs:User rdfs:subClassOf bsfs:Node . + + xsd:boolean rdfs:subClassOf bsfs:Literal . + xsd:integer rdfs:subClassOf bsfs:Literal . + + bse:filesize rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range xsd:integer ; + bsfs:unique "true"^^xsd:boolean . + + bse:tag rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range bsfs:Tag ; + bsfs:unique "false"^^xsd:boolean . + + bse:shared rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range xsd:boolean ; + bsfs:unique "true"^^xsd:boolean . + + bst:principal rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Tag ; + rdfs:range bsfs:Node ; + bsfs:unique "true"^^xsd:boolean . + + # removed: bsfs:Collection + # removed: xsd:string + # removed: bse:comment (bsfs:Entity -> xsd:string) + # removed: bse:partOf (bsfs:Entity -> bsfs:Collection) + # removed: bse:author (bsfs:entity -> bsfs:User) + # removed: bst:usedIn (bsfs:Tag -> bsfs:Collection) + # removed: bsc:tag (bsfs:Collection -> bsfs:Tag) + + ''') + # store migrated to the new schema + store.schema = curr + self.assertEqual(store.schema, curr) + # instances of old classes were removed + self.assertSetEqual(set(store._graph), { + # node instances + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + (rdflib.URIRef('http://example.com/me/tag#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), + (rdflib.URIRef('http://example.com/me/tag#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), + (rdflib.URIRef('http://example.com/me'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.User)), + # filesize + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), + # tags + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), + # shared + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_shared.uri), rdflib.Literal('true', datatype=rdflib.XSD.boolean)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_shared.uri), rdflib.Literal('true', datatype=rdflib.XSD.boolean)), + }) + + # can only assign schema instances + self.assertRaises(TypeError, setattr, store, 'schema', None) + self.assertRaises(TypeError, setattr, store, 'schema', 1234) + self.assertRaises(TypeError, setattr, store, 'schema', 'foo') + class Foo(): pass + self.assertRaises(TypeError, setattr, store, 'schema', Foo()) + + # cannot migrate to incompatible schema + invalid = _schema.Schema.from_string(''' + prefix rdfs: + prefix xsd: + prefix bsfs: + prefix bse: + + bsfs:Entity rdfs:subClassOf bsfs:Node . + bsfs:Tag rdfs:subClassOf bsfs:Entity . # inconsistent with previous tag definition + + bse:tag rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range bsfs:Tag ; + bsfs:unique "false"^^xsd:boolean . + + ''') + self.assertRaises(errors.ConsistencyError, setattr, store, 'schema', invalid) + invalid = _schema.Schema.from_string(''' + prefix rdfs: + prefix xsd: + prefix bsfs: + prefix bse: + + bsfs:Entity rdfs:subClassOf bsfs:Node . + bsfs:User rdfs:subClassOf bsfs:Node . + + # inconsistent predicate + bse:tag rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range bsfs:User; + bsfs:unique "false"^^xsd:boolean . + + ''') + self.assertRaises(errors.ConsistencyError, setattr, store, 'schema', invalid) + + + def test_transaction(self): + # store setup + store = SparqlStore.Open() + store.schema = self.schema + p_tag = store.schema.predicate(ns.bse.tag) + p_filesize = store.schema.predicate(ns.bse.filesize) + # prepare node types + ent_type = store.schema.node(ns.bsfs.Entity) + tag_type = store.schema.node(ns.bsfs.Tag) + ent_ids = {URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')} + tag_ids = {URI('http://example.com/me/tag#1234'), URI('http://example.com/me/tag#4321')} + # target instances + instances = { + # node instances + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + (rdflib.URIRef('http://example.com/me/tag#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), + (rdflib.URIRef('http://example.com/me/tag#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), + # links + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), + } + + # add some data + store.create(ent_type, ent_ids) + store.create(tag_type, tag_ids) + store.set(ent_type, ent_ids, p_tag, tag_ids) + store.set(ent_type, ent_ids, p_filesize, {1234}) + # current transaction is visible + self.assertSetEqual(set(store._graph), instances | { + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(1234, datatype=rdflib.XSD.integer)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(1234, datatype=rdflib.XSD.integer)), + }) + + # rollback undoes previous changes + store.rollback() + self.assertSetEqual(set(store._graph), set()) + + # add some data once more + store.create(ent_type, ent_ids) + store.create(tag_type, tag_ids) + store.set(ent_type, ent_ids, p_tag, tag_ids) + store.set(ent_type, ent_ids, p_filesize, {1234}) + # current transaction is visible + self.assertSetEqual(set(store._graph), instances | { + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(1234, datatype=rdflib.XSD.integer)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(1234, datatype=rdflib.XSD.integer)), + }) + + # commit saves changes + store.commit() + self.assertSetEqual(set(store._graph), instances | { + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(1234, datatype=rdflib.XSD.integer)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(1234, datatype=rdflib.XSD.integer)), + }) + + # add additional data + store.create(ent_type, {URI('http://example.com/me/entity#hello')}) + store.set(ent_type, {URI('http://example.com/me/entity#hello')}, p_tag, tag_ids) + store.set(ent_type, ent_ids, p_filesize, {4321}) + self.assertSetEqual(set(store._graph), instances | { + (rdflib.URIRef('http://example.com/me/entity#hello'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + (rdflib.URIRef('http://example.com/me/entity#hello'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), + (rdflib.URIRef('http://example.com/me/entity#hello'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(4321, datatype=rdflib.XSD.integer)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(4321, datatype=rdflib.XSD.integer)), + }) + + # rollback undoes only changes since last commit + store.rollback() + self.assertSetEqual(set(store._graph), instances | { + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(1234, datatype=rdflib.XSD.integer)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(1234, datatype=rdflib.XSD.integer)), + }) + + def test_get(self): + raise NotImplementedError() + + def test_exists(self): + # store setup + store = SparqlStore.Open() + store.schema = self.schema + # prepare node types + ent_type = store.schema.node(ns.bsfs.Entity) + tag_type = store.schema.node(ns.bsfs.Tag) + # create node instances + ent_ids = { + URI('http://example.com/me/entity#1234'), + URI('http://example.com/me/entity#4321'), + } + tag_ids = { + URI('http://example.com/me/tag#1234'), + URI('http://example.com/me/tag#4321'), + } + store.create(ent_type, ent_ids) + store.create(tag_type, tag_ids) + + # exists returns all existing nodes of the correct type + self.assertSetEqual(ent_ids, set(store.exists(ent_type, ent_ids))) + self.assertSetEqual(tag_ids, set(store.exists(tag_type, tag_ids))) + # exists returns only nodes that match the type + self.assertSetEqual(set(), set(store.exists(ent_type, tag_ids))) + self.assertSetEqual({URI('http://example.com/me/entity#1234')}, set(store.exists(ent_type, { + URI('http://example.com/me/tag#1234'), + URI('http://example.com/me/entity#1234'), + }))) + # exists returns only nodes that exist + self.assertSetEqual(set(), set(store.exists(ent_type, { + URI('http://example.com/me/entity#foo'), + URI('http://example.com/me/entity#bar'), + }))) + self.assertSetEqual({URI('http://example.com/me/entity#1234')}, set(store.exists(ent_type, { + URI('http://example.com/me/entity#foo'), + URI('http://example.com/me/entity#1234'), + }))) + + + def test_create(self): + # setup + store = SparqlStore.Open() + store.schema = self.schema + + # node type must be valid + self.assertRaises(errors.ConsistencyError, store.create, self.schema.node(ns.bsfs.Entity).get_child(ns.bsfs.invalid), { + URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')}) + + # can create some nodes + ent_type = store.schema.node(ns.bsfs.Entity) + store.create(ent_type, {URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')}) + self.assertSetEqual(set(store._graph), { + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + }) + + # existing nodes are skipped + store.create(ent_type, {URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#5678')}) + self.assertSetEqual(set(store._graph), { + # previous triples + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + # new triples + (rdflib.URIRef('http://example.com/me/entity#5678'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + }) + + # can create nodes of a different type + tag_type = store.schema.node(ns.bsfs.Tag) + store.create(tag_type, {URI('http://example.com/me/tag#1234'), URI('http://example.com/me/tag#4321')}) + self.assertSetEqual(set(store._graph), { + # previous triples + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + (rdflib.URIRef('http://example.com/me/entity#5678'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + # new triples + (rdflib.URIRef('http://example.com/me/tag#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), + (rdflib.URIRef('http://example.com/me/tag#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), + }) + + # creation does not change types of existing nodes + tag_type = store.schema.node(ns.bsfs.Tag) + store.create(tag_type, {URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')}) + self.assertSetEqual(set(store._graph), { + # previous triples + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + (rdflib.URIRef('http://example.com/me/entity#5678'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + # new triples + (rdflib.URIRef('http://example.com/me/tag#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), + (rdflib.URIRef('http://example.com/me/tag#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), + }) + + + def test_set(self): + # store setup + store = SparqlStore.Open() + store.schema = self.schema + # prepare node types + ent_type = store.schema.node(ns.bsfs.Entity) + user_type = store.schema.node(ns.bsfs.User) + tag_type = store.schema.node(ns.bsfs.Tag) + # prepare predicates + p_filesize = store.schema.predicate(ns.bse.filesize) + p_comment = store.schema.predicate(ns.bse.comment) + p_author = store.schema.predicate(ns.bse.author) + p_tag = store.schema.predicate(ns.bse.tag) + p_invalid = store.schema.predicate(ns.bsfs.Predicate).get_child(ns.bsfs.foo, range=store.schema.node(ns.bsfs.Tag)) + # create node instances + ent_ids = { + URI('http://example.com/me/entity#1234'), + URI('http://example.com/me/entity#4321'), + } + tag_ids = { + URI('http://example.com/me/tag#1234'), + URI('http://example.com/me/tag#4321'), + URI('http://example.com/me/tag#foo'), + URI('http://example.com/me/tag#bar'), + URI('http://example.com/me/tag#foobar'), + URI('http://example.com/me/tag#xyz'), + } + user_ids = { + URI('http://example.com/me/user#1234'), + URI('http://example.com/me/user#4321'), + } + store.create(ent_type, ent_ids) + store.create(tag_type, tag_ids) + store.create(user_type, user_ids) + + # invalid node_type is not permitted + self.assertRaises(errors.ConsistencyError, store.set, self.schema.node(ns.bsfs.Node).get_child(ns.bse.foo), + ent_ids, p_comment, {'hello world'}) + + # invalid predicate is not permitted + self.assertRaises(errors.ConsistencyError, store.set, ent_type, ent_ids, p_invalid, {'http://example.com/me/tag#1234'}) + + # predicate must match node_type + self.assertRaises(errors.ConsistencyError, store.set, tag_type, tag_ids, p_filesize, {1234}) + + # empty value does not change the graph + plen = len(store._graph) + store.set(ent_type, ent_ids, p_filesize, []) + store.set(ent_type, ent_ids, p_comment, []) + store.set(ent_type, ent_ids, p_author, []) + store.set(ent_type, ent_ids, p_tag, []) + self.assertEqual(plen, len(store._graph)) + + # cannot set multiple values on unique predicates + self.assertRaises(ValueError, store.set, ent_type, ent_ids, p_filesize, {1234, 4321}) + self.assertRaises(ValueError, store.set, ent_type, ent_ids, p_author, {URI('http://example.com/me/user#1234'), URI('http://example.com/me/user#4321')}) + + # value nodes must exist + self.assertRaises(errors.InstanceError, store.set, ent_type, ent_ids, p_author, {URI('http://example.com/me/user#invalid')}) + self.assertRaises(errors.InstanceError, store.set, ent_type, ent_ids, p_tag, {URI('http://example.com/me/tag#invalid')}) + + # value node types must be consistent with the predicate + self.assertRaises(errors.InstanceError, store.set, ent_type, ent_ids, p_author, {URI('http://example.com/me/entity#1234')}) + self.assertRaises(errors.InstanceError, store.set, ent_type, ent_ids, p_tag, {URI('http://example.com/me/entity#1234')}) + + # all value nodes must exist and be consistent + self.assertRaises(errors.InstanceError, store.set, ent_type, ent_ids, p_tag, { + URI('http://example.com/me/tag#1234'), URI('http://example.com/me/tag#invalid'), URI('http://example.com/me/entity#1234')}) + + + # set unique literal + store.set(ent_type, ent_ids, p_filesize, {1234}) + self.assertIn( + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), + set(store._graph)) + self.assertIn( + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), + set(store._graph)) + # re-assigning the same node changes nothing + store.set(ent_type, ent_ids, p_filesize, {1234}) + self.assertIn( + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), + set(store._graph)) + self.assertIn( + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), + set(store._graph)) + # cannot set multiple unique literals + self.assertRaises(ValueError, store.set, ent_type, ent_ids, p_filesize, {1234, 4321}) # same test as above + # unique literals are overwritten by set + store.set(ent_type, ent_ids, p_filesize, {4321}) + self.assertIn( + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('4321', datatype=rdflib.XSD.integer)), + set(store._graph)) + self.assertNotIn( + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), + set(store._graph)) + self.assertIn( + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('4321', datatype=rdflib.XSD.integer)), + set(store._graph)) + self.assertNotIn( + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), + set(store._graph)) + + # set non-unique literal + store.set(ent_type, ent_ids, p_comment, {'foobar'}) + self.assertTrue(set(store._graph).issuperset({ + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foobar', datatype=rdflib.XSD.string)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foobar', datatype=rdflib.XSD.string)), + })) + # re-assigning the same node changes nothing + store.set(ent_type, ent_ids, p_comment, {'foobar'}) + self.assertTrue(set(store._graph).issuperset({ + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foobar', datatype=rdflib.XSD.string)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foobar', datatype=rdflib.XSD.string)), + })) + # can set multiple non-unique literals at once + store.set(ent_type, ent_ids, p_comment, {'foo', 'bar'}) + self.assertTrue(set(store._graph).issuperset({ + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foo', datatype=rdflib.XSD.string)), + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('bar', datatype=rdflib.XSD.string)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foo', datatype=rdflib.XSD.string)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('bar', datatype=rdflib.XSD.string)), + })) + # non-unique literals are appended by set + store.set(ent_type, ent_ids, p_comment, {'hello world'}) + self.assertTrue(set(store._graph).issuperset({ + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foo', datatype=rdflib.XSD.string)), + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('bar', datatype=rdflib.XSD.string)), + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('hello world', datatype=rdflib.XSD.string)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foo', datatype=rdflib.XSD.string)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('bar', datatype=rdflib.XSD.string)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('hello world', datatype=rdflib.XSD.string)), + })) + + # set unique node + store.set(ent_type, ent_ids, p_author, {URI('http://example.com/me/user#1234')}) + self.assertIn( + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me/user#1234')), + set(store._graph)) + self.assertIn( + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me/user#1234')), + set(store._graph)) + # re-assigning the same node changes nothing + store.set(ent_type, ent_ids, p_author, {URI('http://example.com/me/user#1234')}) + self.assertIn( + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me/user#1234')), + set(store._graph)) + self.assertIn( + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me/user#1234')), + set(store._graph)) + # cannot set multiple unique nodes + self.assertRaises(ValueError, store.set, ent_type, ent_ids, p_author, {URI('http://example.com/me/user#1234'), URI('http://example.com/me/user#4321')}) + # unique nodes are overwritten by set + store.set(ent_type, ent_ids, p_author, {URI('http://example.com/me/user#4321')}) + self.assertIn( + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me/user#4321')), + set(store._graph)) + self.assertNotIn( + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me/user#1234')), + set(store._graph)) + self.assertIn( + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me/user#4321')), + set(store._graph)) + self.assertNotIn( + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me/user#1234')), + set(store._graph)) + + # set non-unique node + store.set(ent_type, ent_ids, p_tag, {'http://example.com/me/tag#foobar'}) + self.assertTrue(set(store._graph).issuperset({ + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#foobar')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#foobar')), + })) + # re-assigning the same node changes nothing + store.set(ent_type, ent_ids, p_tag, {'http://example.com/me/tag#foobar'}) + self.assertTrue(set(store._graph).issuperset({ + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#foobar')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#foobar')), + })) + # can set multiple non-unique literals at once + store.set(ent_type, ent_ids, p_tag, {'http://example.com/me/tag#1234', 'http://example.com/me/tag#4321'}) + self.assertTrue(set(store._graph).issuperset({ + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), + })) + # non-unique nodes are appended by set + store.set(ent_type, ent_ids, p_tag, {'http://example.com/me/tag#foo', 'http://example.com/me/tag#bar'}) + self.assertTrue(set(store._graph).issuperset({ + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#foo')), + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#bar')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#foo')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#bar')), + })) + + # nothing happens when no guids are given + plen = len(store._graph) + store.set(ent_type, set(), p_comment, {'xyz'}) + store.set(ent_type, set(), p_tag, {URI('http://example.com/me/tag#xyz')}) + self.assertEqual(plen, len(store._graph)) + + # guids must be instances of node_type + self.assertRaises(errors.InstanceError, store.set, ent_type, tag_ids, p_comment, {'xyz'}) + # inexistent guids + self.assertRaises(errors.InstanceError, store.set, ent_type, {URI('http://example.com/me/entity#foobar')}, p_comment, {'xyz'}) + + +## main ## + +if __name__ == '__main__': + unittest.main() + +## EOF ## diff --git a/test/triple_store/test_sparql.py b/test/triple_store/test_sparql.py deleted file mode 100644 index 8d98749..0000000 --- a/test/triple_store/test_sparql.py +++ /dev/null @@ -1,769 +0,0 @@ -""" - -Part of the bsfs test suite. -A copy of the license is provided with the project. -Author: Matthias Baumgartner, 2022 -""" -# imports -import rdflib -import unittest - -# bsie imports -from bsfs import schema as _schema -from bsfs.namespace import ns -from bsfs.utils import errors, URI - -# objects to test -from bsfs.triple_store.sparql import SparqlStore - - -## code ## - -class TestSparqlStore(unittest.TestCase): - def setUp(self): - self.schema = _schema.Schema.from_string(''' - prefix rdfs: - prefix xsd: - - prefix bsfs: - prefix bse: - - bsfs:Entity rdfs:subClassOf bsfs:Node . - bsfs:Tag rdfs:subClassOf bsfs:Node . - bsfs:User rdfs:subClassOf bsfs:Node . - xsd:string rdfs:subClassOf bsfs:Literal . - xsd:integer rdfs:subClassOf bsfs:Literal . - - # non-unique literal - bse:comment rdfs:subClassOf bsfs:Predicate ; - rdfs:domain bsfs:Entity ; - rdfs:range xsd:string ; - bsfs:unique "false"^^xsd:boolean . - - # unique literal - bse:filesize rdfs:subClassOf bsfs:Predicate ; - rdfs:domain bsfs:Entity ; - rdfs:range xsd:integer ; - bsfs:unique "true"^^xsd:boolean . - - # non-unique node - bse:tag rdfs:subClassOf bsfs:Predicate ; - rdfs:domain bsfs:Entity ; - rdfs:range bsfs:Tag ; - bsfs:unique "false"^^xsd:boolean . - - # unique node - bse:author rdfs:subClassOf bsfs:Predicate ; - rdfs:domain bsfs:Entity ; - rdfs:range bsfs:User ; - bsfs:unique "true"^^xsd:boolean . - - ''') - - def test_essentials(self): - store = SparqlStore.Open() - # equality - self.assertEqual(store, store) - self.assertEqual(hash(store), hash(store)) - self.assertNotEqual(store, SparqlStore.Open()) - self.assertNotEqual(hash(store), hash(SparqlStore.Open())) - # string conversion - self.assertEqual(str(store), 'SparqlStore(uri=None)') - self.assertEqual(repr(store), 'SparqlStore(uri=None)') - # open - self.assertIsInstance(SparqlStore.Open(), SparqlStore) - - - def test__has_type(self): - # setup store - store = SparqlStore.Open() - store.schema = _schema.Schema.from_string(''' - prefix rdfs: - prefix xsd: - prefix bsfs: - - bsfs:Entity rdfs:subClassOf bsfs:Node . - bsfs:Document rdfs:subClassOf bsfs:Entity . - bsfs:Image rdfs:subClassOf bsfs:Entity . - bsfs:PDF rdfs:subClassOf bsfs:Document . - - ''') - # add some instances - store.create(store.schema.node(ns.bsfs.Entity), {URI('http://example.com/me/entity#1234')}) - store.create(store.schema.node(ns.bsfs.Document), {URI('http://example.com/me/document#1234')}) - store.create(store.schema.node(ns.bsfs.Image), {URI('http://example.com/me/image#1234')}) - store.create(store.schema.node(ns.bsfs.PDF), {URI('http://example.com/me/pdf#1234')}) - - # node_type must be in the schema - self.assertRaises(errors.ConsistencyError, store._has_type, URI('http://example.com/me/entity#1234'), store.schema.node(ns.bsfs.Node).get_child(ns.bsfs.invalid)) - - # returns False on inexistent nodes - self.assertFalse(store._has_type(URI('http://example.com/me/entity#4321'), store.schema.node(ns.bsfs.Entity))) - self.assertFalse(store._has_type(URI('http://example.com/me/document#4321'), store.schema.node(ns.bsfs.Document))) - self.assertFalse(store._has_type(URI('http://example.com/me/image#4321'), store.schema.node(ns.bsfs.Image))) - self.assertFalse(store._has_type(URI('http://example.com/me/pdf#4321'), store.schema.node(ns.bsfs.PDF))) - - # _has_type checks direct types - self.assertTrue(store._has_type(URI('http://example.com/me/entity#1234'), store.schema.node(ns.bsfs.Entity))) - self.assertTrue(store._has_type(URI('http://example.com/me/document#1234'), store.schema.node(ns.bsfs.Document))) - self.assertTrue(store._has_type(URI('http://example.com/me/image#1234'), store.schema.node(ns.bsfs.Image))) - self.assertTrue(store._has_type(URI('http://example.com/me/pdf#1234'), store.schema.node(ns.bsfs.PDF))) - - # _has_type checks type hierarchy - self.assertFalse(store._has_type(URI('http://example.com/me/entity#1234'), store.schema.node(ns.bsfs.Document))) - self.assertFalse(store._has_type(URI('http://example.com/me/entity#1234'), store.schema.node(ns.bsfs.Image))) - self.assertFalse(store._has_type(URI('http://example.com/me/entity#1234'), store.schema.node(ns.bsfs.PDF))) - - self.assertTrue(store._has_type(URI('http://example.com/me/document#1234'), store.schema.node(ns.bsfs.Entity))) - self.assertFalse(store._has_type(URI('http://example.com/me/document#1234'), store.schema.node(ns.bsfs.Image))) - self.assertFalse(store._has_type(URI('http://example.com/me/document#1234'), store.schema.node(ns.bsfs.PDF))) - - self.assertTrue(store._has_type(URI('http://example.com/me/image#1234'), store.schema.node(ns.bsfs.Entity))) - self.assertFalse(store._has_type(URI('http://example.com/me/image#1234'), store.schema.node(ns.bsfs.Document))) - self.assertFalse(store._has_type(URI('http://example.com/me/image#1234'), store.schema.node(ns.bsfs.PDF))) - - self.assertTrue(store._has_type(URI('http://example.com/me/pdf#1234'), store.schema.node(ns.bsfs.Entity))) - self.assertTrue(store._has_type(URI('http://example.com/me/pdf#1234'), store.schema.node(ns.bsfs.Document))) - self.assertFalse(store._has_type(URI('http://example.com/me/pdf#1234'), store.schema.node(ns.bsfs.Image))) - - - def test_schema(self): - # setup - store = SparqlStore.Open() - curr = self.schema - p_comment = curr.predicate(ns.bse.comment) - p_filesize = curr.predicate(ns.bse.filesize) - p_tag = curr.predicate(ns.bse.tag) - p_author = curr.predicate(ns.bse.author) - - # migrate to an initial schema - store.schema = curr - # store has migrated - self.assertEqual(store.schema, curr) - - # add some instances - ent_ids = {URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')} - tag_ids = {URI('http://example.com/me/tag#1234'), URI('http://example.com/me/tag#4321')} - store.create(curr.node(ns.bsfs.Entity), ent_ids) - store.create(curr.node(ns.bsfs.Tag), tag_ids) - store.create(curr.node(ns.bsfs.User), {URI('http://example.com/me')}) - # add some triples - store.set(curr.node(ns.bsfs.Entity), ent_ids, p_comment, {'foo', 'bar'}) - store.set(curr.node(ns.bsfs.Entity), ent_ids, p_filesize, {1234}) - store.set(curr.node(ns.bsfs.Entity), ent_ids, p_tag, - {URI('http://example.com/me/tag#1234'), URI('http://example.com/me/tag#4321')}) - store.set(curr.node(ns.bsfs.Entity), ent_ids, p_author, - {URI('http://example.com/me')}) - # check instances - instances = { - # node instances - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - (rdflib.URIRef('http://example.com/me/tag#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), - (rdflib.URIRef('http://example.com/me/tag#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), - (rdflib.URIRef('http://example.com/me'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.User)), - # comments - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foo', datatype=rdflib.XSD.string)), - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('bar', datatype=rdflib.XSD.string)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foo', datatype=rdflib.XSD.string)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('bar', datatype=rdflib.XSD.string)), - # filesize - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), - # tags - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), - # author - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me')), - } - self.assertSetEqual(set(store._graph), instances) - - # add some classes to the schema - curr = curr + _schema.Schema.from_string(''' - prefix rdfs: - prefix xsd: - prefix bsfs: - prefix bse: - prefix bst: - prefix bsc: - - bsfs:Entity rdfs:subClassOf bsfs:Node . - bsfs:Tag rdfs:subClassOf bsfs:Node . - bsfs:Collection rdfs:subClassOf bsfs:Node . - xsd:boolean rdfs:subClassOf bsfs:Literal . - - # literal - bse:shared rdfs:subClassOf bsfs:Predicate ; - rdfs:domain bsfs:Entity ; - rdfs:range xsd:boolean ; - bsfs:unique "true"^^xsd:boolean . - - # node - bse:partOf rdfs:subClassOf bsfs:Predicate ; - rdfs:domain bsfs:Entity ; - rdfs:range bsfs:Collection ; - bsfs:unique "false"^^xsd:boolean . - - # predicates across auxiliary node classes - bst:usedIn rdfs:subClassOf bsfs:Predicate ; - rdfs:domain bsfs:Tag ; - rdfs:range bsfs:Collection ; - bsfs:unique "false"^^xsd:boolean . - - bsc:tag rdfs:subClassOf bsfs:Predicate ; - rdfs:domain bsfs:Collection ; - rdfs:range bsfs:Tag ; - bsfs:unique "false"^^xsd:boolean . - - bst:principal rdfs:subClassOf bsfs:Predicate ; - rdfs:domain bsfs:Tag ; - rdfs:range bsfs:Node ; - bsfs:unique "true"^^xsd:boolean . - - ''') - # store migrated to the new schema - store.schema = curr - self.assertEqual(store.schema, curr) - # instances have not changed - self.assertSetEqual(set(store._graph), instances) - # add some instances of the new classes - p_partOf = curr.predicate(ns.bse.partOf) - p_shared = curr.predicate(ns.bse.shared) - p_usedIn = curr.predicate('http://bsfs.ai/schema/Tag#usedIn') - p_ctag = curr.predicate('http://bsfs.ai/schema/Collection#tag') - p_principal = curr.predicate('http://bsfs.ai/schema/Tag#principal') - store.create(curr.node(ns.bsfs.Collection), {URI('http://example.com/me/collection#1234'), URI('http://example.com/me/collection#4321')}) - # add some more triples - store.set(curr.node(ns.bsfs.Entity), ent_ids, p_shared, {True}) - store.set(curr.node(ns.bsfs.Entity), ent_ids, p_partOf, - {URI('http://example.com/me/collection#1234'), URI('http://example.com/me/collection#4321')}) - store.set(curr.node(ns.bsfs.Tag), {URI('http://example.com/me/tag#1234')}, p_usedIn, - {URI('http://example.com/me/collection#1234')}) - store.set(curr.node(ns.bsfs.Collection), {URI('http://example.com/me/collection#4321')}, p_ctag, - {URI('http://example.com/me/tag#1234'), URI('http://example.com/me/tag#4321')}) - store.set(curr.node(ns.bsfs.Tag), {URI('http://example.com/me/tag#1234')}, p_principal, - {URI('http://example.com/me/collection#1234')}) - # new instances are now in the graph - self.assertSetEqual(set(store._graph), instances | { - # collections - (rdflib.URIRef('http://example.com/me/collection#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Collection)), - (rdflib.URIRef('http://example.com/me/collection#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Collection)), - # partOf - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_partOf.uri), rdflib.URIRef('http://example.com/me/collection#1234')), - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_partOf.uri), rdflib.URIRef('http://example.com/me/collection#4321')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_partOf.uri), rdflib.URIRef('http://example.com/me/collection#1234')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_partOf.uri), rdflib.URIRef('http://example.com/me/collection#4321')), - # shared - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_shared.uri), rdflib.Literal('true', datatype=rdflib.XSD.boolean)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_shared.uri), rdflib.Literal('true', datatype=rdflib.XSD.boolean)), - # auxiliary node connections - (rdflib.URIRef('http://example.com/me/tag#1234'), rdflib.URIRef(p_usedIn.uri), rdflib.URIRef('http://example.com/me/collection#1234')), - (rdflib.URIRef('http://example.com/me/tag#1234'), rdflib.URIRef(p_principal.uri), rdflib.URIRef('http://example.com/me/collection#1234')), - (rdflib.URIRef('http://example.com/me/collection#4321'), rdflib.URIRef(p_ctag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), - (rdflib.URIRef('http://example.com/me/collection#4321'), rdflib.URIRef(p_ctag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), - }) - - - # remove some classes from the schema - curr = _schema.Schema.from_string(''' - prefix rdfs: - prefix xsd: - prefix bsfs: - prefix bse: - prefix bst: - - bsfs:Entity rdfs:subClassOf bsfs:Node . - bsfs:Tag rdfs:subClassOf bsfs:Node . - bsfs:User rdfs:subClassOf bsfs:Node . - - xsd:boolean rdfs:subClassOf bsfs:Literal . - xsd:integer rdfs:subClassOf bsfs:Literal . - - bse:filesize rdfs:subClassOf bsfs:Predicate ; - rdfs:domain bsfs:Entity ; - rdfs:range xsd:integer ; - bsfs:unique "true"^^xsd:boolean . - - bse:tag rdfs:subClassOf bsfs:Predicate ; - rdfs:domain bsfs:Entity ; - rdfs:range bsfs:Tag ; - bsfs:unique "false"^^xsd:boolean . - - bse:shared rdfs:subClassOf bsfs:Predicate ; - rdfs:domain bsfs:Entity ; - rdfs:range xsd:boolean ; - bsfs:unique "true"^^xsd:boolean . - - bst:principal rdfs:subClassOf bsfs:Predicate ; - rdfs:domain bsfs:Tag ; - rdfs:range bsfs:Node ; - bsfs:unique "true"^^xsd:boolean . - - # removed: bsfs:Collection - # removed: xsd:string - # removed: bse:comment (bsfs:Entity -> xsd:string) - # removed: bse:partOf (bsfs:Entity -> bsfs:Collection) - # removed: bse:author (bsfs:entity -> bsfs:User) - # removed: bst:usedIn (bsfs:Tag -> bsfs:Collection) - # removed: bsc:tag (bsfs:Collection -> bsfs:Tag) - - ''') - # store migrated to the new schema - store.schema = curr - self.assertEqual(store.schema, curr) - # instances of old classes were removed - self.assertSetEqual(set(store._graph), { - # node instances - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - (rdflib.URIRef('http://example.com/me/tag#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), - (rdflib.URIRef('http://example.com/me/tag#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), - (rdflib.URIRef('http://example.com/me'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.User)), - # filesize - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), - # tags - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), - # shared - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_shared.uri), rdflib.Literal('true', datatype=rdflib.XSD.boolean)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_shared.uri), rdflib.Literal('true', datatype=rdflib.XSD.boolean)), - }) - - # can only assign schema instances - self.assertRaises(TypeError, setattr, store, 'schema', None) - self.assertRaises(TypeError, setattr, store, 'schema', 1234) - self.assertRaises(TypeError, setattr, store, 'schema', 'foo') - class Foo(): pass - self.assertRaises(TypeError, setattr, store, 'schema', Foo()) - - # cannot migrate to incompatible schema - invalid = _schema.Schema.from_string(''' - prefix rdfs: - prefix xsd: - prefix bsfs: - prefix bse: - - bsfs:Entity rdfs:subClassOf bsfs:Node . - bsfs:Tag rdfs:subClassOf bsfs:Entity . # inconsistent with previous tag definition - - bse:tag rdfs:subClassOf bsfs:Predicate ; - rdfs:domain bsfs:Entity ; - rdfs:range bsfs:Tag ; - bsfs:unique "false"^^xsd:boolean . - - ''') - self.assertRaises(errors.ConsistencyError, setattr, store, 'schema', invalid) - invalid = _schema.Schema.from_string(''' - prefix rdfs: - prefix xsd: - prefix bsfs: - prefix bse: - - bsfs:Entity rdfs:subClassOf bsfs:Node . - bsfs:User rdfs:subClassOf bsfs:Node . - - # inconsistent predicate - bse:tag rdfs:subClassOf bsfs:Predicate ; - rdfs:domain bsfs:Entity ; - rdfs:range bsfs:User; - bsfs:unique "false"^^xsd:boolean . - - ''') - self.assertRaises(errors.ConsistencyError, setattr, store, 'schema', invalid) - - - def test_transaction(self): - # store setup - store = SparqlStore.Open() - store.schema = self.schema - p_tag = store.schema.predicate(ns.bse.tag) - p_filesize = store.schema.predicate(ns.bse.filesize) - # prepare node types - ent_type = store.schema.node(ns.bsfs.Entity) - tag_type = store.schema.node(ns.bsfs.Tag) - ent_ids = {URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')} - tag_ids = {URI('http://example.com/me/tag#1234'), URI('http://example.com/me/tag#4321')} - # target instances - instances = { - # node instances - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - (rdflib.URIRef('http://example.com/me/tag#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), - (rdflib.URIRef('http://example.com/me/tag#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), - # links - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), - } - - # add some data - store.create(ent_type, ent_ids) - store.create(tag_type, tag_ids) - store.set(ent_type, ent_ids, p_tag, tag_ids) - store.set(ent_type, ent_ids, p_filesize, {1234}) - # current transaction is visible - self.assertSetEqual(set(store._graph), instances | { - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(1234, datatype=rdflib.XSD.integer)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(1234, datatype=rdflib.XSD.integer)), - }) - - # rollback undoes previous changes - store.rollback() - self.assertSetEqual(set(store._graph), set()) - - # add some data once more - store.create(ent_type, ent_ids) - store.create(tag_type, tag_ids) - store.set(ent_type, ent_ids, p_tag, tag_ids) - store.set(ent_type, ent_ids, p_filesize, {1234}) - # current transaction is visible - self.assertSetEqual(set(store._graph), instances | { - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(1234, datatype=rdflib.XSD.integer)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(1234, datatype=rdflib.XSD.integer)), - }) - - # commit saves changes - store.commit() - self.assertSetEqual(set(store._graph), instances | { - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(1234, datatype=rdflib.XSD.integer)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(1234, datatype=rdflib.XSD.integer)), - }) - - # add additional data - store.create(ent_type, {URI('http://example.com/me/entity#hello')}) - store.set(ent_type, {URI('http://example.com/me/entity#hello')}, p_tag, tag_ids) - store.set(ent_type, ent_ids, p_filesize, {4321}) - self.assertSetEqual(set(store._graph), instances | { - (rdflib.URIRef('http://example.com/me/entity#hello'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - (rdflib.URIRef('http://example.com/me/entity#hello'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), - (rdflib.URIRef('http://example.com/me/entity#hello'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(4321, datatype=rdflib.XSD.integer)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(4321, datatype=rdflib.XSD.integer)), - }) - - # rollback undoes only changes since last commit - store.rollback() - self.assertSetEqual(set(store._graph), instances | { - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(1234, datatype=rdflib.XSD.integer)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(1234, datatype=rdflib.XSD.integer)), - }) - - - def test_exists(self): - # store setup - store = SparqlStore.Open() - store.schema = self.schema - # prepare node types - ent_type = store.schema.node(ns.bsfs.Entity) - tag_type = store.schema.node(ns.bsfs.Tag) - # create node instances - ent_ids = { - URI('http://example.com/me/entity#1234'), - URI('http://example.com/me/entity#4321'), - } - tag_ids = { - URI('http://example.com/me/tag#1234'), - URI('http://example.com/me/tag#4321'), - } - store.create(ent_type, ent_ids) - store.create(tag_type, tag_ids) - - # exists returns all existing nodes of the correct type - self.assertSetEqual(ent_ids, set(store.exists(ent_type, ent_ids))) - self.assertSetEqual(tag_ids, set(store.exists(tag_type, tag_ids))) - # exists returns only nodes that match the type - self.assertSetEqual(set(), set(store.exists(ent_type, tag_ids))) - self.assertSetEqual({URI('http://example.com/me/entity#1234')}, set(store.exists(ent_type, { - URI('http://example.com/me/tag#1234'), - URI('http://example.com/me/entity#1234'), - }))) - # exists returns only nodes that exist - self.assertSetEqual(set(), set(store.exists(ent_type, { - URI('http://example.com/me/entity#foo'), - URI('http://example.com/me/entity#bar'), - }))) - self.assertSetEqual({URI('http://example.com/me/entity#1234')}, set(store.exists(ent_type, { - URI('http://example.com/me/entity#foo'), - URI('http://example.com/me/entity#1234'), - }))) - - - def test_create(self): - # setup - store = SparqlStore.Open() - store.schema = self.schema - - # node type must be valid - self.assertRaises(errors.ConsistencyError, store.create, self.schema.node(ns.bsfs.Entity).get_child(ns.bsfs.invalid), { - URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')}) - - # can create some nodes - ent_type = store.schema.node(ns.bsfs.Entity) - store.create(ent_type, {URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')}) - self.assertSetEqual(set(store._graph), { - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - }) - - # existing nodes are skipped - store.create(ent_type, {URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#5678')}) - self.assertSetEqual(set(store._graph), { - # previous triples - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - # new triples - (rdflib.URIRef('http://example.com/me/entity#5678'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - }) - - # can create nodes of a different type - tag_type = store.schema.node(ns.bsfs.Tag) - store.create(tag_type, {URI('http://example.com/me/tag#1234'), URI('http://example.com/me/tag#4321')}) - self.assertSetEqual(set(store._graph), { - # previous triples - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - (rdflib.URIRef('http://example.com/me/entity#5678'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - # new triples - (rdflib.URIRef('http://example.com/me/tag#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), - (rdflib.URIRef('http://example.com/me/tag#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), - }) - - # creation does not change types of existing nodes - tag_type = store.schema.node(ns.bsfs.Tag) - store.create(tag_type, {URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')}) - self.assertSetEqual(set(store._graph), { - # previous triples - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - (rdflib.URIRef('http://example.com/me/entity#5678'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - # new triples - (rdflib.URIRef('http://example.com/me/tag#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), - (rdflib.URIRef('http://example.com/me/tag#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), - }) - - - def test_set(self): - # store setup - store = SparqlStore.Open() - store.schema = self.schema - # prepare node types - ent_type = store.schema.node(ns.bsfs.Entity) - user_type = store.schema.node(ns.bsfs.User) - tag_type = store.schema.node(ns.bsfs.Tag) - # prepare predicates - p_filesize = store.schema.predicate(ns.bse.filesize) - p_comment = store.schema.predicate(ns.bse.comment) - p_author = store.schema.predicate(ns.bse.author) - p_tag = store.schema.predicate(ns.bse.tag) - p_invalid = store.schema.predicate(ns.bsfs.Predicate).get_child(ns.bsfs.foo, range=store.schema.node(ns.bsfs.Tag)) - # create node instances - ent_ids = { - URI('http://example.com/me/entity#1234'), - URI('http://example.com/me/entity#4321'), - } - tag_ids = { - URI('http://example.com/me/tag#1234'), - URI('http://example.com/me/tag#4321'), - URI('http://example.com/me/tag#foo'), - URI('http://example.com/me/tag#bar'), - URI('http://example.com/me/tag#foobar'), - URI('http://example.com/me/tag#xyz'), - } - user_ids = { - URI('http://example.com/me/user#1234'), - URI('http://example.com/me/user#4321'), - } - store.create(ent_type, ent_ids) - store.create(tag_type, tag_ids) - store.create(user_type, user_ids) - - # invalid node_type is not permitted - self.assertRaises(errors.ConsistencyError, store.set, self.schema.node(ns.bsfs.Node).get_child(ns.bse.foo), - ent_ids, p_comment, {'hello world'}) - - # invalid predicate is not permitted - self.assertRaises(errors.ConsistencyError, store.set, ent_type, ent_ids, p_invalid, {'http://example.com/me/tag#1234'}) - - # predicate must match node_type - self.assertRaises(errors.ConsistencyError, store.set, tag_type, tag_ids, p_filesize, {1234}) - - # empty value does not change the graph - plen = len(store._graph) - store.set(ent_type, ent_ids, p_filesize, []) - store.set(ent_type, ent_ids, p_comment, []) - store.set(ent_type, ent_ids, p_author, []) - store.set(ent_type, ent_ids, p_tag, []) - self.assertEqual(plen, len(store._graph)) - - # cannot set multiple values on unique predicates - self.assertRaises(ValueError, store.set, ent_type, ent_ids, p_filesize, {1234, 4321}) - self.assertRaises(ValueError, store.set, ent_type, ent_ids, p_author, {URI('http://example.com/me/user#1234'), URI('http://example.com/me/user#4321')}) - - # value nodes must exist - self.assertRaises(errors.InstanceError, store.set, ent_type, ent_ids, p_author, {URI('http://example.com/me/user#invalid')}) - self.assertRaises(errors.InstanceError, store.set, ent_type, ent_ids, p_tag, {URI('http://example.com/me/tag#invalid')}) - - # value node types must be consistent with the predicate - self.assertRaises(errors.InstanceError, store.set, ent_type, ent_ids, p_author, {URI('http://example.com/me/entity#1234')}) - self.assertRaises(errors.InstanceError, store.set, ent_type, ent_ids, p_tag, {URI('http://example.com/me/entity#1234')}) - - # all value nodes must exist and be consistent - self.assertRaises(errors.InstanceError, store.set, ent_type, ent_ids, p_tag, { - URI('http://example.com/me/tag#1234'), URI('http://example.com/me/tag#invalid'), URI('http://example.com/me/entity#1234')}) - - - # set unique literal - store.set(ent_type, ent_ids, p_filesize, {1234}) - self.assertIn( - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), - set(store._graph)) - self.assertIn( - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), - set(store._graph)) - # re-assigning the same node changes nothing - store.set(ent_type, ent_ids, p_filesize, {1234}) - self.assertIn( - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), - set(store._graph)) - self.assertIn( - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), - set(store._graph)) - # cannot set multiple unique literals - self.assertRaises(ValueError, store.set, ent_type, ent_ids, p_filesize, {1234, 4321}) # same test as above - # unique literals are overwritten by set - store.set(ent_type, ent_ids, p_filesize, {4321}) - self.assertIn( - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('4321', datatype=rdflib.XSD.integer)), - set(store._graph)) - self.assertNotIn( - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), - set(store._graph)) - self.assertIn( - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('4321', datatype=rdflib.XSD.integer)), - set(store._graph)) - self.assertNotIn( - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), - set(store._graph)) - - # set non-unique literal - store.set(ent_type, ent_ids, p_comment, {'foobar'}) - self.assertTrue(set(store._graph).issuperset({ - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foobar', datatype=rdflib.XSD.string)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foobar', datatype=rdflib.XSD.string)), - })) - # re-assigning the same node changes nothing - store.set(ent_type, ent_ids, p_comment, {'foobar'}) - self.assertTrue(set(store._graph).issuperset({ - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foobar', datatype=rdflib.XSD.string)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foobar', datatype=rdflib.XSD.string)), - })) - # can set multiple non-unique literals at once - store.set(ent_type, ent_ids, p_comment, {'foo', 'bar'}) - self.assertTrue(set(store._graph).issuperset({ - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foo', datatype=rdflib.XSD.string)), - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('bar', datatype=rdflib.XSD.string)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foo', datatype=rdflib.XSD.string)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('bar', datatype=rdflib.XSD.string)), - })) - # non-unique literals are appended by set - store.set(ent_type, ent_ids, p_comment, {'hello world'}) - self.assertTrue(set(store._graph).issuperset({ - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foo', datatype=rdflib.XSD.string)), - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('bar', datatype=rdflib.XSD.string)), - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('hello world', datatype=rdflib.XSD.string)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foo', datatype=rdflib.XSD.string)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('bar', datatype=rdflib.XSD.string)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('hello world', datatype=rdflib.XSD.string)), - })) - - # set unique node - store.set(ent_type, ent_ids, p_author, {URI('http://example.com/me/user#1234')}) - self.assertIn( - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me/user#1234')), - set(store._graph)) - self.assertIn( - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me/user#1234')), - set(store._graph)) - # re-assigning the same node changes nothing - store.set(ent_type, ent_ids, p_author, {URI('http://example.com/me/user#1234')}) - self.assertIn( - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me/user#1234')), - set(store._graph)) - self.assertIn( - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me/user#1234')), - set(store._graph)) - # cannot set multiple unique nodes - self.assertRaises(ValueError, store.set, ent_type, ent_ids, p_author, {URI('http://example.com/me/user#1234'), URI('http://example.com/me/user#4321')}) - # unique nodes are overwritten by set - store.set(ent_type, ent_ids, p_author, {URI('http://example.com/me/user#4321')}) - self.assertIn( - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me/user#4321')), - set(store._graph)) - self.assertNotIn( - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me/user#1234')), - set(store._graph)) - self.assertIn( - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me/user#4321')), - set(store._graph)) - self.assertNotIn( - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me/user#1234')), - set(store._graph)) - - # set non-unique node - store.set(ent_type, ent_ids, p_tag, {'http://example.com/me/tag#foobar'}) - self.assertTrue(set(store._graph).issuperset({ - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#foobar')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#foobar')), - })) - # re-assigning the same node changes nothing - store.set(ent_type, ent_ids, p_tag, {'http://example.com/me/tag#foobar'}) - self.assertTrue(set(store._graph).issuperset({ - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#foobar')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#foobar')), - })) - # can set multiple non-unique literals at once - store.set(ent_type, ent_ids, p_tag, {'http://example.com/me/tag#1234', 'http://example.com/me/tag#4321'}) - self.assertTrue(set(store._graph).issuperset({ - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), - })) - # non-unique nodes are appended by set - store.set(ent_type, ent_ids, p_tag, {'http://example.com/me/tag#foo', 'http://example.com/me/tag#bar'}) - self.assertTrue(set(store._graph).issuperset({ - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#foo')), - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#bar')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#foo')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#bar')), - })) - - # nothing happens when no guids are given - plen = len(store._graph) - store.set(ent_type, set(), p_comment, {'xyz'}) - store.set(ent_type, set(), p_tag, {URI('http://example.com/me/tag#xyz')}) - self.assertEqual(plen, len(store._graph)) - - # guids must be instances of node_type - self.assertRaises(errors.InstanceError, store.set, ent_type, tag_ids, p_comment, {'xyz'}) - # inexistent guids - self.assertRaises(errors.InstanceError, store.set, ent_type, {URI('http://example.com/me/entity#foobar')}, p_comment, {'xyz'}) - - -## main ## - -if __name__ == '__main__': - unittest.main() - -## EOF ## -- cgit v1.2.3 From a0f2308adcb226d28de3355bc7115a6d9b669462 Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Mon, 19 Dec 2022 13:40:02 +0100 Subject: import fixes --- test/triple_store/test_base.py | 3 +++ 1 file changed, 3 insertions(+) (limited to 'test/triple_store') diff --git a/test/triple_store/test_base.py b/test/triple_store/test_base.py index a4b0559..a0c3260 100644 --- a/test/triple_store/test_base.py +++ b/test/triple_store/test_base.py @@ -35,6 +35,9 @@ class DummyBase(TripleStoreBase): def schema(self, schema): pass + def get(self, node_type, query): + pass + def exists(self, node_type, guids): pass -- cgit v1.2.3 From 73e39cb4967949025aefe874f401e27b0abb772c Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Thu, 22 Dec 2022 20:29:57 +0100 Subject: filter ast parser and get method in sparql store --- test/triple_store/sparql/test_parse_filter.py | 727 ++++++++++++++++++++++++++ test/triple_store/sparql/test_sparql.py | 90 +++- 2 files changed, 808 insertions(+), 9 deletions(-) create mode 100644 test/triple_store/sparql/test_parse_filter.py (limited to 'test/triple_store') diff --git a/test/triple_store/sparql/test_parse_filter.py b/test/triple_store/sparql/test_parse_filter.py new file mode 100644 index 0000000..bd19803 --- /dev/null +++ b/test/triple_store/sparql/test_parse_filter.py @@ -0,0 +1,727 @@ +""" + +Part of the bsfs test suite. +A copy of the license is provided with the project. +Author: Matthias Baumgartner, 2022 +""" +# imports +import rdflib +import unittest + +# bsie imports +from bsfs import schema as _schema +from bsfs.namespace import ns +from bsfs.query import ast +from bsfs.utils import errors + +# objects to test +from bsfs.triple_store.sparql.parse_filter import Filter + + +## code ## + +class TestParseFilter(unittest.TestCase): + def setUp(self): + # schema + self.schema = _schema.Schema.from_string(''' + prefix rdfs: + prefix xsd: + + prefix bsfs: + prefix bse: + + bsfs:Entity rdfs:subClassOf bsfs:Node . + bsfs:Image rdfs:subClassOf bsfs:Entity . + bsfs:Tag rdfs:subClassOf bsfs:Node . + + xsd:string rdfs:subClassOf bsfs:Literal . + xsd:integer rdfs:subClassOf bsfs:Literal . + bsfs:URI rdfs:subClassOf bsfs:Literal . + + bse:comment rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Node ; + rdfs:range xsd:string ; + bsfs:unique "false"^^xsd:boolean . + + bse:filesize rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range xsd:integer ; + bsfs:unique "true"^^xsd:boolean . + + bse:buddy rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range bsfs:Node ; + bsfs:unique "false"^^xsd:boolean . + + bse:tag rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range bsfs:Tag ; + bsfs:unique "false"^^xsd:boolean . + + bse:representative rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Tag ; + rdfs:range bsfs:Image ; + bsfs:unique "false"^^xsd:boolean . + + bse:iso rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Image ; + rdfs:range xsd:integer ; + bsfs:unique "true"^^xsd:boolean . + + ''') + + # parser instance + self.parser = Filter(self.schema) + + # graph to test queries + self.graph = rdflib.Graph() + # schema hierarchies + self.graph.add((rdflib.URIRef('http://bsfs.ai/schema/Entity'), rdflib.RDFS.subClassOf, rdflib.URIRef('http://bsfs.ai/schema/Node'))) + self.graph.add((rdflib.URIRef('http://bsfs.ai/schema/Image'), rdflib.RDFS.subClassOf, rdflib.URIRef('http://bsfs.ai/schema/Entity'))) + self.graph.add((rdflib.URIRef('http://bsfs.ai/schema/Tag'), rdflib.RDFS.subClassOf, rdflib.URIRef('http://bsfs.ai/schema/Node'))) + # entities + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Entity'))) + self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Entity'))) + # tags + self.graph.add((rdflib.URIRef('http://example.com/tag#1234'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Tag'))) + self.graph.add((rdflib.URIRef('http://example.com/tag#4321'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Tag'))) + # images + self.graph.add((rdflib.URIRef('http://example.com/image#1234'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Image'))) + self.graph.add((rdflib.URIRef('http://example.com/image#4321'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Image'))) + # node comments + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.bse.comment), rdflib.Literal('Me, Myself, and I', datatype=rdflib.XSD.string))) + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.bse.comment), rdflib.Literal('hello world', datatype=rdflib.XSD.string))) + self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef(ns.bse.comment), rdflib.Literal('hello world', datatype=rdflib.XSD.string))) + self.graph.add((rdflib.URIRef('http://example.com/image#1234'), rdflib.URIRef(ns.bse.comment), rdflib.Literal('Me, Myself, and I', datatype=rdflib.XSD.string))) + self.graph.add((rdflib.URIRef('http://example.com/tag#1234'), rdflib.URIRef(ns.bse.comment), rdflib.Literal('Me, Myself, and I', datatype=rdflib.XSD.string))) + self.graph.add((rdflib.URIRef('http://example.com/tag#4321'), rdflib.URIRef(ns.bse.comment), rdflib.Literal('4321', datatype=rdflib.XSD.string))) + # entity filesizes + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.bse.filesize), rdflib.Literal(1234, datatype=rdflib.XSD.integer))) + self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef(ns.bse.filesize), rdflib.Literal(4321, datatype=rdflib.XSD.integer))) + self.graph.add((rdflib.URIRef('http://example.com/image#1234'), rdflib.URIRef(ns.bse.filesize), rdflib.Literal(1234, datatype=rdflib.XSD.integer))) + self.graph.add((rdflib.URIRef('http://example.com/image#4321'), rdflib.URIRef(ns.bse.filesize), rdflib.Literal(4321, datatype=rdflib.XSD.integer))) + # entity tags + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.bse.tag), rdflib.URIRef('http://example.com/tag#1234'))) + self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef(ns.bse.tag), rdflib.URIRef('http://example.com/tag#4321'))) + self.graph.add((rdflib.URIRef('http://example.com/image#1234'), rdflib.URIRef(ns.bse.tag), rdflib.URIRef('http://example.com/tag#1234'))) + # tag representatives + self.graph.add((rdflib.URIRef('http://example.com/tag#1234'), rdflib.URIRef(ns.bse.representative), rdflib.URIRef('http://example.com/image#1234'))) + self.graph.add((rdflib.URIRef('http://example.com/tag#4321'), rdflib.URIRef(ns.bse.representative), rdflib.URIRef('http://example.com/image#4321'))) + # entity buddies + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.bse.buddy), rdflib.URIRef('http://example.com/image#1234'))) + self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef(ns.bse.buddy), rdflib.URIRef('http://example.com/image#4321'))) + # image iso + self.graph.add((rdflib.URIRef('http://example.com/image#1234'), rdflib.URIRef(ns.bse.iso), rdflib.Literal(1234, datatype=rdflib.XSD.integer))) + self.graph.add((rdflib.URIRef('http://example.com/image#4321'), rdflib.URIRef(ns.bse.iso), rdflib.Literal(4321, datatype=rdflib.XSD.integer))) + + + def test_routing(self): + self.assertRaises(errors.BackendError, self.parser._parse_filter_expression, '1234', None, '') + self.assertRaises(errors.BackendError, self.parser._parse_predicate_expression, '1234', None) + + def test_call(self): + # NOTE: The individual ast components are considered in the respective tests. Here, we test __call__ specifics. + + # __call__ requires a valid root type + self.assertRaises(errors.BackendError, self.parser, self.schema.literal(ns.bsfs.Literal), None) + self.assertRaises(errors.ConsistencyError, self.parser, self.schema.node(ns.bsfs.Node).get_child(ns.bsfs.Invalid), None) + # __call__ requires a parseable root + self.assertRaises(errors.BackendError, self.parser, self.schema.node(ns.bsfs.Entity), ast.filter.FilterExpression()) + # __call__ returns an executable query + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Or( + ast.filter.Is('http://example.com/entity#1234'), + ast.filter.Is('http://example.com/entity#5678'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, {'http://example.com/entity#1234'}) + # root is optional + q = self.parser(self.schema.node(ns.bsfs.Entity)) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/entity#4321', 'http://example.com/image#1234', 'http://example.com/image#4321'}) + q = self.parser(self.schema.node(ns.bsfs.Tag)) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/tag#1234', 'http://example.com/tag#4321'}) + + + def test_is(self): + # _is requires a node + self.assertRaises(errors.BackendError, self.parser._is, self.schema.literal(ns.bsfs.Literal), ast.filter.Is('http://example.com/entity#1234'), '?ent') + # a single Is statement + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Is('http://example.com/entity#1234')) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234'}) + # an aggregate of Is statements + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Or( + ast.filter.Is('http://example.com/entity#1234'), + ast.filter.Is('http://example.com/entity#4321'), + )) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/entity#4321'}) + # combined with other filters + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.And( + ast.filter.Or( + ast.filter.Is('http://example.com/entity#1234'), + ast.filter.Is('http://example.com/entity#4321'), + ), + ast.filter.Any(ns.bse.comment, + ast.filter.Equals('Me, Myself, and I') + ), + )) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234'}) + # as argument of Any/All + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Any(ns.bse.tag, ast.filter.Is('http://example.com/tag#1234'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/image#1234'}) + + + def test_equals(self): + # _equals requires a literal + self.assertRaises(errors.BackendError, self.parser._equals, self.schema.node(ns.bsfs.Entity), ast.filter.Equals('hello world'), '?ent') + # a single Equals statement + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.Equals('hello world'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/entity#4321'}) + # a single Equals statement that includes subtypes + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/image#1234'}) + # an Equals statement on an integer + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.filesize, ast.filter.Equals(4321))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#4321', 'http://example.com/image#4321'}) + + + def test_substring(self): + # _substring requires a literal + self.assertRaises(errors.BackendError, self.parser._substring, self.schema.node(ns.bsfs.Entity), ast.filter.Substring('hello world'), '?ent') + # a single Substring statement + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.Substring('hello'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/entity#4321'}) + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.Substring('lo wo'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/entity#4321'}) + # a single Substring statement that includes subtypes + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.Substring('Myself'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/image#1234'}) + # an Substring statement on an integer + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.filesize, ast.filter.Substring('32'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#4321', 'http://example.com/image#4321'}) + + + def test_starts_with(self): + # _starts_with requires a literal + self.assertRaises(errors.BackendError, self.parser._starts_with, self.schema.node(ns.bsfs.Entity), ast.filter.StartsWith('hello world'), '?ent') + # a single StartsWith statement + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.StartsWith('hello'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/entity#4321'}) + # a single StartsWith statement that includes subtypes + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.StartsWith('Me, Mys'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/image#1234'}) + # an StartsWith statement on an integer + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.filesize, ast.filter.StartsWith(432))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#4321', 'http://example.com/image#4321'}) + + + def test_ends_with(self): + # _ends_with requires a literal + self.assertRaises(errors.BackendError, self.parser._ends_with, self.schema.node(ns.bsfs.Entity), ast.filter.EndsWith('hello world'), '?ent') + # a single EndsWith statement + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.EndsWith('orld'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/entity#4321'}) + # a single EndsWith statement that includes subtypes + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.EndsWith('and I'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/image#1234'}) + # an EndsWith statement on an integer + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.filesize, ast.filter.EndsWith(321))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#4321', 'http://example.com/image#4321'}) + + + def test_less_than(self): + # _less_than requires a literal + self.assertRaises(errors.BackendError, self.parser._less_than, self.schema.node(ns.bsfs.Entity), ast.filter.LessThan(2000), '?ent') + # a single LessThan statement + q = self.parser(self.schema.node(ns.bsfs.Image), ast.filter.Any(ns.bse.iso, ast.filter.LessThan(2000))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/image#1234'}) + # _less_than respects boundary + q = self.parser(self.schema.node(ns.bsfs.Image), ast.filter.Any(ns.bse.iso, ast.filter.LessThan(1234, strict=True))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set()) + q = self.parser(self.schema.node(ns.bsfs.Image), ast.filter.Any(ns.bse.iso, ast.filter.LessThan(1234, strict=False))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/image#1234'}) + # a single LessThan statement that includes subtypes + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.filesize, ast.filter.LessThan(2000))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/image#1234'}) + # an LessThan statement on a string + # always negative; note that http://example.com/tag#4321 is also not returned although its comment is a pure number + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.LessThan(10_000))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set()) + + + def test_greater_than(self): + # _greater_than requires a literal + self.assertRaises(errors.BackendError, self.parser._greater_than, self.schema.node(ns.bsfs.Entity), ast.filter.GreaterThan(2000), '?ent') + # a single GreaterThan statement + q = self.parser(self.schema.node(ns.bsfs.Image), ast.filter.Any(ns.bse.iso, ast.filter.GreaterThan(2000))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/image#4321'}) + # _greater_than respects boundary + q = self.parser(self.schema.node(ns.bsfs.Image), ast.filter.Any(ns.bse.iso, ast.filter.GreaterThan(4321, strict=True))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set()) + q = self.parser(self.schema.node(ns.bsfs.Image), ast.filter.Any(ns.bse.iso, ast.filter.GreaterThan(4321, strict=False))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/image#4321'}) + # a single GreaterThan statement that includes subtypes + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.filesize, ast.filter.GreaterThan(2000))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#4321', 'http://example.com/image#4321'}) + # an GreaterThan statement on a string + # always positive + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.GreaterThan(0))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/entity#4321', 'http://example.com/image#1234'}) + + + def test_and(self): + # And childs have to match the node type + self.assertRaises(errors.BackendError, self.parser, + self.schema.node(ns.bsfs.Entity), + ast.filter.And( + ast.filter.StartsWith('hello'), + ast.filter.EndsWith('world'), + )) + # no child produces an empty query + self.assertEqual(self.parser._and( + self.schema.node(ns.bsfs.Entity), + ast.filter.And(), '?ent'), '') + # And can mix different conditions + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.And( + ast.filter.Is('http://example.com/entity#1234'), + ast.filter.Any(ns.bse.filesize, ast.filter.Equals(1234)), + ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')), + )) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234'}) + # all conditions have to match + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.And( + ast.filter.Is('http://example.com/entity#4321'), + ast.filter.Any(ns.bse.filesize, ast.filter.Equals(1234)), + ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')), + )) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set()) + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.And( + ast.filter.Is('http://example.com/entity#1234'), + ast.filter.Any(ns.bse.filesize, ast.filter.Equals(4321)), + ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')), + )) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set()) + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.And( + ast.filter.Is('http://example.com/entity#1234'), + ast.filter.Any(ns.bse.filesize, ast.filter.Equals(1234)), + ast.filter.Any(ns.bse.comment, ast.filter.Equals('foobar')), + )) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set()) + # And can be nested + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.And( + ast.filter.Is('http://example.com/entity#1234'), + ast.filter.And( + ast.filter.Any(ns.bse.filesize, ast.filter.Equals(1234)), + ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')), + ), + )) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234'}) + + + def test_or(self): + # Or childs have to match the node type + self.assertRaises(errors.BackendError, self.parser, + self.schema.node(ns.bsfs.Entity), + ast.filter.Or( + ast.filter.StartsWith('hello'), + ast.filter.EndsWith('world'), + )) + # no child produces an empty query + self.assertEqual(self.parser._and( + self.schema.node(ns.bsfs.Entity), + ast.filter.Or(), '?ent'), '') + # Or can mix different conditions + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Or( + ast.filter.Is('http://example.com/entity#1234'), + ast.filter.Any(ns.bse.filesize, ast.filter.Equals(4321)), + ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')), + )) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/image#1234', 'http://example.com/entity#4321', 'http://example.com/image#4321'}) + # at least one condition has to match + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Or( + ast.filter.Is('http://example.com/entity#5678'), + ast.filter.Any(ns.bse.filesize, ast.filter.Equals(8765)), + ast.filter.Any(ns.bse.comment, ast.filter.Equals('foobar')), + )) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set()) + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Or( + ast.filter.Is('http://example.com/entity#1234'), + ast.filter.Any(ns.bse.filesize, ast.filter.Equals(8765)), + ast.filter.Any(ns.bse.comment, ast.filter.Equals('foobar')), + )) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234'}) + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Or( + ast.filter.Is('http://example.com/entity#5678'), + ast.filter.Any(ns.bse.filesize, ast.filter.Equals(4321)), + ast.filter.Any(ns.bse.comment, ast.filter.Equals('foobar')), + )) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#4321', 'http://example.com/image#4321'}) + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Or( + ast.filter.Is('http://example.com/entity#5678'), + ast.filter.Any(ns.bse.filesize, ast.filter.Equals(8765)), + ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')), + )) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/image#1234'}) + # Or can be nested + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Or( + ast.filter.Is('http://example.com/entity#1234'), + ast.filter.Or( + ast.filter.Any(ns.bse.filesize, ast.filter.Equals(4321)), + ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')), + ), + )) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/image#1234', 'http://example.com/entity#4321', 'http://example.com/image#4321'}) + + + + def test_any(self): + # _any requires a node + self.assertRaises(errors.BackendError, self.parser._any, + self.schema.literal(ns.bsfs.Literal), + ast.filter.Any(ns.bse.filesize, ast.filter.Equals(1234)), '?ent') + # node type must match predicate's domain + self.assertRaises(errors.ConsistencyError, self.parser._any, + self.schema.node(ns.bsfs.Tag), + ast.filter.Any(ns.bse.filesize, ast.filter.Equals(1234)), '?ent') + # predicate must be valid + self.assertRaises(errors.ConsistencyError, self.parser._any, + self.schema.node(ns.bsfs.Entity), + ast.filter.Any(ns.bse.invalid, ast.filter.Equals(1234)), '?ent') + # _any returns a valid query + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Any(ns.bse.filesize, ast.filter.Equals(1234))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/image#1234'}) + # _any can be nested + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Any(ns.bse.tag, + ast.filter.Any(ns.bse.representative, + ast.filter.Is('http://example.com/image#1234')))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/image#1234'}) + + + def test_all(self): + # All requires a Node + self.assertRaises(errors.BackendError, self.parser._all, self.schema.literal(ns.bsfs.Literal), None, '') + # All Nodes + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.All(ns.bse.tag, ast.filter.Is('http://example.com/tag#1234'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/image#1234'}) + # All values + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.All(ns.bse.comment, ast.filter.Equals('hello world'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#4321'}) + # All on value within Or branch + # entity#1234 is selected because all of its comments are in ("hello world", "Me, Myself, and I") + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.All(ns.bse.comment, ast.filter.Or( + ast.filter.Equals('hello world'), + ast.filter.Equals('Me, Myself, and I')))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/entity#4321', 'http://example.com/image#1234'}) + # All requires at least one predicate/value + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.All(ns.bse.comment, ast.filter.Equals('Me, Myself, and I'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/image#1234'}) + # All within a statement + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.And( + ast.filter.All(ns.bse.tag, ast.filter.Is('http://example.com/tag#1234')), # entity#1234, image#1234 + ast.filter.All(ns.bse.comment, ast.filter.Or( # entity#1234, entity#4321, image#1234 + ast.filter.Equals('hello world'), + ast.filter.Equals('Me, Myself, and I'), + )) + ) + ) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/image#1234'}) + # All with reversed Predicate + q = self.parser(self.schema.node(ns.bsfs.Tag), + ast.filter.All(ast.filter.Predicate(ns.bse.tag, reverse=True), ast.filter.Is('http://example.com/entity#4321'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/tag#4321'}) + # All with multiple predicates + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.All(ast.filter.OneOf(ns.bse.tag, ns.bse.buddy), # entity#1234 (tag:tag#1234), entity#1234 (buddy:image#1234), image#1234(tag:tag#1234) + ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')))) # entity#1234, image#1234, tag#1234 + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/image#1234'}) + + + + def test_not(self): + # Not applies on conditions + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Not(ast.filter.Is('http://example.com/entity#1234'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/image#1234', 'http://example.com/entity#4321', 'http://example.com/image#4321'}) + # Not applies on conditions within branches + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Any(ns.bse.comment, ast.filter.Not(ast.filter.Equals('Me, Myself, and I')))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/entity#4321'}) + # Not applies on branches + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Not(ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#4321', 'http://example.com/image#4321'}) + # Double Not cancel each other + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Not(ast.filter.Not(ast.filter.Is('http://example.com/entity#1234')))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234'}) + # Not works within aggregation (and) + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.And( + ast.filter.Not(ast.filter.Is('http://example.com/entity#1234')), + ast.filter.Any(ns.bse.comment, ast.filter.Equals('hello world')), + )) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#4321'}) + # Not works within aggregation (or) + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Or( + ast.filter.Not(ast.filter.Is('http://example.com/entity#1234')), + ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')), + )) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/entity#4321', 'http://example.com/image#1234', 'http://example.com/image#4321'}) + # Not works outside aggregation (and) + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Not( + ast.filter.And( + ast.filter.Is('http://example.com/entity#1234'), + ast.filter.Any(ns.bse.comment, ast.filter.Equals('hello world')), + ))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#4321', 'http://example.com/image#1234', 'http://example.com/image#4321'}) + # Not works outside aggregation (or) + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Not( + ast.filter.Or( + ast.filter.Is('http://example.com/entity#4321'), + ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')), + ))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/image#4321'}) + # Not mixed with branch, aggregation, id, and value + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.And( + ast.filter.Not( # image#1234, image#4321 + ast.filter.Or( # entity#4321, entity#1234 + ast.filter.Is('http://example.com/entity#4321'), + ast.filter.Any(ns.bse.comment, ast.filter.Equals('hello world')), + ) + ), + ast.filter.Any(ns.bse.comment, ast.filter.Not(ast.filter.Equals('foobar'))), # entity#1234, entity#4321, image#1234 + )) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/image#1234'}) + + + def test_has(self): + # Has requires Node + self.assertRaises(errors.BackendError, self.parser._has, self.schema.literal(ns.bsfs.Literal), None, '') + # Has with GreaterThan constraint + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Has(ns.bse.comment, ast.filter.GreaterThan(0))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/entity#4321', 'http://example.com/image#1234'}) + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Has(ns.bse.comment, ast.filter.GreaterThan(1))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234'}) + # Has with Equals constraint + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Has(ns.bse.comment, 1)) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#4321', 'http://example.com/image#1234'}) + # Has with LessThan constraint + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Has(ns.bse.comment, ast.filter.LessThan(2))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#4321', 'http://example.com/image#1234', 'http://example.com/image#4321'}) + # Has with multiple constraints + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.bse.comment), rdflib.Literal('extra1', datatype=rdflib.XSD.string))) + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.bse.comment), rdflib.Literal('extra2', datatype=rdflib.XSD.string))) + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.bse.comment), rdflib.Literal('extra3', datatype=rdflib.XSD.string))) + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.bse.comment), rdflib.Literal('extra4', datatype=rdflib.XSD.string))) + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.bse.comment), rdflib.Literal('extra5', datatype=rdflib.XSD.string))) + self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef(ns.bse.comment), rdflib.Literal('extra1', datatype=rdflib.XSD.string))) + self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef(ns.bse.comment), rdflib.Literal('extra2', datatype=rdflib.XSD.string))) + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Has(ns.bse.comment, + ast.filter.And(ast.filter.GreaterThan(1), ast.filter.LessThan(5)))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#4321'}) + # Has with OneOf predicate + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Has(ast.filter.OneOf(ns.bse.tag, ns.bse.buddy), + ast.filter.GreaterThan(1))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/entity#4321'}) + # Has with reversed predicate + q = self.parser(self.schema.node(ns.bsfs.Tag), ast.filter.Has(ast.filter.Predicate(ns.bse.tag, reverse=True), + ast.filter.GreaterThan(1))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/tag#1234'}) + + + def test_one_of(self): + # _one_of expects a node + self.assertRaises(errors.BackendError, self.parser._one_of, + self.schema.literal(ns.bsfs.Literal), + ast.filter.OneOf(ast.filter.Predicate(ns.bse.filesize))) + # invalid predicate for node type raises an error + self.assertRaises(errors.ConsistencyError, self.parser._one_of, + self.schema.node(ns.bsfs.Node), + ast.filter.OneOf(ast.filter.Predicate(ns.bse.filesize))) + self.assertRaises(errors.ConsistencyError, self.parser, + self.schema.node(ns.bsfs.Tag), + ast.filter.Any(ast.filter.OneOf(ast.filter.Predicate(ns.bse.filesize)), ast.filter.Equals(1234))) + self.assertRaises(errors.BackendError, self.parser._one_of, + self.schema.node(ns.bsfs.Node), + ast.filter.OneOf(ast.filter.Predicate(ns.bsfs.Predicate))) + # invalid predicate combinations raise an error + self.assertRaises(errors.ConsistencyError, self.parser._one_of, + self.schema.node(ns.bsfs.Node), + ast.filter.OneOf( + ast.filter.Predicate(ns.bse.filesize), + ast.filter.Predicate(ns.bse.representative))) + # _one_of returns the URI and range + q = self.parser._one_of(self.schema.node(ns.bsfs.Image), + ast.filter.OneOf( + ast.filter.Predicate(ns.bse.iso), + ast.filter.Predicate(ns.bse.filesize))) + self.assertTrue(q[0] == f'<{ns.bse.iso}>|<{ns.bse.filesize}>' or q[0] == f'<{ns.bse.filesize}>|<{ns.bse.iso}>') + self.assertEqual(q[1], self.schema.literal(ns.xsd.integer)) + # OneOf can be nested + q = self.parser._one_of(self.schema.node(ns.bsfs.Image), + ast.filter.OneOf( + ast.filter.Predicate(ns.bse.iso), + ast.filter.OneOf( + ast.filter.Predicate(ns.bse.filesize)))) + self.assertTrue(q[0] == f'<{ns.bse.iso}>|<{ns.bse.filesize}>' or q[0] == f'<{ns.bse.filesize}>|<{ns.bse.iso}>') + self.assertEqual(q[1], self.schema.literal(ns.xsd.integer)) + # _one_of returns the most generic range + q = self.parser._one_of(self.schema.node(ns.bsfs.Entity), + ast.filter.OneOf( + ast.filter.Predicate(ns.bse.tag), + ast.filter.Predicate(ns.bse.buddy))) + self.assertTrue(q[0] == f'<{ns.bse.tag}>|<{ns.bse.buddy}>' or q[0] == f'<{ns.bse.buddy}>|<{ns.bse.tag}>') + self.assertEqual(q[1], self.schema.node(ns.bsfs.Node)) + # domains must match the given type + self.assertRaises(errors.ConsistencyError, self.parser, + self.schema.node(ns.bsfs.Entity), + ast.filter.Any(ast.filter.OneOf(ns.bse.tag, ns.bse.buddy), + ast.filter.Any(ast.filter.OneOf(ns.bse.filesize), + ast.filter.Equals(1234)))) + # ranges must have the same type (Node/Literal) + self.assertRaises(errors.ConsistencyError, self.parser, + self.schema.node(ns.bsfs.Entity), + ast.filter.Any(ast.filter.OneOf(ns.bse.tag, ns.bse.filesize), + ast.filter.Equals(1234))) + # ranges must be related + self.assertRaises(errors.ConsistencyError, self.parser, + self.schema.node(ns.bsfs.Entity), + ast.filter.Any(ast.filter.OneOf(ns.bse.comment, ns.bse.filesize), + ast.filter.Equals(1234))) + # integration: _one_of returns a valid sparql query + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Any(ast.filter.OneOf(ns.bse.tag, ns.bse.buddy), + ast.filter.Any(ast.filter.OneOf(ns.bse.comment), + ast.filter.Equals('Me, Myself, and I')))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/image#1234'}) + + + def test_predicate(self): + # predicate cannot be the root predicate (ns.bsfs.Predicate) + self.assertRaises(errors.BackendError, self.parser._predicate, self.schema.node(ns.bsfs.Node), ast.filter.Predicate(ns.bsfs.Predicate)) + # _predicate expects a node + self.assertRaises(errors.BackendError, self.parser._predicate, + self.schema.literal(ns.bsfs.Literal), + ast.filter.Predicate(ns.bse.filesize)) + # invalid predicate for node type raises an error + self.assertRaises(errors.ConsistencyError, self.parser._predicate, + self.schema.node(ns.bsfs.Node), + ast.filter.Predicate(ns.bse.filesize)) + self.assertRaises(errors.ConsistencyError, self.parser, + self.schema.node(ns.bsfs.Tag), + ast.filter.Any(ast.filter.Predicate(ns.bse.filesize), ast.filter.Equals(1234))) + # _predicate returns the URI and range + self.assertEqual(self.parser._predicate(self.schema.node(ns.bsfs.Entity), ast.filter.Predicate(ns.bse.filesize)), + (f'<{ns.bse.filesize}>', self.schema.literal(ns.xsd.integer))) + self.assertEqual(self.parser._predicate(self.schema.node(ns.bsfs.Entity), ast.filter.Predicate(ns.bse.tag)), + (f'<{ns.bse.tag}>', self.schema.node(ns.bsfs.Tag))) + # _predicate respects reverse flag + self.assertEqual(self.parser._predicate(self.schema.node(ns.bsfs.Tag), ast.filter.Predicate(ns.bse.tag, reverse=True)), + ('^<' + ns.bse.tag + '>', self.schema.node(ns.bsfs.Entity))) + # integration: _predicate returns a valid sparql query + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Any(ns.bse.tag, + ast.filter.Any(ns.bse.representative, + ast.filter.Any(ns.bse.filesize, + ast.filter.Equals(1234))))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/image#1234'}) + q = self.parser(self.schema.node(ns.bsfs.Tag), + ast.filter.Any(ast.filter.Predicate(ns.bse.tag, reverse=True), + ast.filter.Any(ns.bse.filesize, + ast.filter.LessThan(2000)))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/tag#1234'}) + + +## main ## + +if __name__ == '__main__': + unittest.main() + +## EOF ## diff --git a/test/triple_store/sparql/test_sparql.py b/test/triple_store/sparql/test_sparql.py index 0bf664a..3d81de1 100644 --- a/test/triple_store/sparql/test_sparql.py +++ b/test/triple_store/sparql/test_sparql.py @@ -11,6 +11,7 @@ import unittest # bsie imports from bsfs import schema as _schema from bsfs.namespace import ns +from bsfs.query import ast from bsfs.utils import errors, URI # objects to test @@ -59,6 +60,18 @@ class TestSparqlStore(unittest.TestCase): bsfs:unique "true"^^xsd:boolean . ''') + self.schema_triples = { + # schema hierarchy + (rdflib.URIRef(ns.bsfs.Entity), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), + (rdflib.URIRef(ns.bsfs.Tag), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), + (rdflib.URIRef(ns.bsfs.User), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), + (rdflib.URIRef(ns.xsd.string), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), + (rdflib.URIRef(ns.xsd.integer), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), + (rdflib.URIRef(ns.bse.comment), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef(ns.bse.filesize), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef(ns.bse.tag), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef(ns.bse.author), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + } def test_essentials(self): store = SparqlStore.Open() @@ -155,7 +168,7 @@ class TestSparqlStore(unittest.TestCase): store.set(curr.node(ns.bsfs.Entity), ent_ids, p_author, {URI('http://example.com/me')}) # check instances - instances = { + instances = self.schema_triples | { # node instances (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), @@ -228,7 +241,16 @@ class TestSparqlStore(unittest.TestCase): store.schema = curr self.assertEqual(store.schema, curr) # instances have not changed - self.assertSetEqual(set(store._graph), instances) + self.assertSetEqual(set(store._graph), instances | { + # schema hierarchy + (rdflib.URIRef(ns.bsfs.Collection), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), + (rdflib.URIRef(ns.xsd.boolean), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), + (rdflib.URIRef(ns.bse.shared), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef(ns.bse.partOf), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef('http://bsfs.ai/schema/Tag#usedIn'), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef('http://bsfs.ai/schema/Collection#tag'), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef('http://bsfs.ai/schema/Tag#principal'), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + }) # add some instances of the new classes p_partOf = curr.predicate(ns.bse.partOf) p_shared = curr.predicate(ns.bse.shared) @@ -248,6 +270,14 @@ class TestSparqlStore(unittest.TestCase): {URI('http://example.com/me/collection#1234')}) # new instances are now in the graph self.assertSetEqual(set(store._graph), instances | { + # same old schema hierarchy + (rdflib.URIRef(ns.bsfs.Collection), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), + (rdflib.URIRef(ns.xsd.boolean), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), + (rdflib.URIRef(ns.bse.shared), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef(ns.bse.partOf), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef('http://bsfs.ai/schema/Tag#usedIn'), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef('http://bsfs.ai/schema/Collection#tag'), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef('http://bsfs.ai/schema/Tag#principal'), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), # collections (rdflib.URIRef('http://example.com/me/collection#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Collection)), (rdflib.URIRef('http://example.com/me/collection#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Collection)), @@ -316,6 +346,16 @@ class TestSparqlStore(unittest.TestCase): self.assertEqual(store.schema, curr) # instances of old classes were removed self.assertSetEqual(set(store._graph), { + # schema hierarchy + (rdflib.URIRef(ns.bsfs.Entity), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), + (rdflib.URIRef(ns.bsfs.Tag), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), + (rdflib.URIRef(ns.bsfs.User), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), + (rdflib.URIRef(ns.xsd.boolean), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), + (rdflib.URIRef(ns.xsd.integer), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), + (rdflib.URIRef(ns.bse.shared), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef(ns.bse.tag), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef(ns.bse.filesize), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef('http://bsfs.ai/schema/Tag#principal'), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), # node instances (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), @@ -390,7 +430,7 @@ class TestSparqlStore(unittest.TestCase): ent_ids = {URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')} tag_ids = {URI('http://example.com/me/tag#1234'), URI('http://example.com/me/tag#4321')} # target instances - instances = { + instances = self.schema_triples | { # node instances (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), @@ -416,7 +456,7 @@ class TestSparqlStore(unittest.TestCase): # rollback undoes previous changes store.rollback() - self.assertSetEqual(set(store._graph), set()) + self.assertSetEqual(set(store._graph), self.schema_triples) # add some data once more store.create(ent_type, ent_ids) @@ -456,7 +496,38 @@ class TestSparqlStore(unittest.TestCase): }) def test_get(self): - raise NotImplementedError() + # store setup + store = SparqlStore.Open() + store.schema = self.schema + ent_type = self.schema.node(ns.bsfs.Entity) + tag_type = self.schema.node(ns.bsfs.Tag) + ent_ids = {URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')} + tag_ids = {URI('http://example.com/me/tag#1234'), URI('http://example.com/me/tag#4321')} + store.create(ent_type, ent_ids) + store.create(tag_type, tag_ids) + store.set(ent_type, ent_ids, self.schema.predicate(ns.bse.tag), tag_ids) + store.set(ent_type, {URI('http://example.com/me/entity#1234')}, self.schema.predicate(ns.bse.filesize), {1234}) + store.set(ent_type, {URI('http://example.com/me/entity#4321')}, self.schema.predicate(ns.bse.filesize), {4321}) + # node_type must be in the schema + self.assertRaises(errors.ConsistencyError, set, store.get(self.schema.node(ns.bsfs.Node).get_child(ns.bsfs.Invalid), ast.filter.IsIn(ent_ids))) + # query must be a filter expression + class Foo(): pass + self.assertRaises(TypeError, set, store.get(ent_type, 1234)) + self.assertRaises(TypeError, set, store.get(ent_type, '1234')) + self.assertRaises(TypeError, set, store.get(ent_type, Foo())) + # run some queries + self.assertSetEqual(set(store.get(tag_type, ast.filter.IsIn(tag_ids))), tag_ids) + self.assertSetEqual(set(store.get(ent_type, ast.filter.Any(ns.bse.tag, ast.filter.IsIn(tag_ids)))), ent_ids) + self.assertSetEqual(set(store.get(ent_type, ast.filter.IsIn(tag_ids))), set()) + # invalid queries raise error + self.assertRaises(errors.ConsistencyError, set, store.get(tag_type, ast.filter.Any(ns.bse.filesize, ast.filter.Equals(1234)))) + self.assertRaises(errors.BackendError, set, store.get(ent_type, ast.filter.Equals('http://example.com/me/entity#1234'))) + # run some more complex query + q = store.get(tag_type, ast.filter.Any(ast.filter.Predicate(ns.bse.tag, reverse=True), + ast.filter.Any(ns.bse.filesize, + ast.filter.LessThan(2000)))) + self.assertSetEqual(set(q), tag_ids) + def test_exists(self): # store setup @@ -509,14 +580,15 @@ class TestSparqlStore(unittest.TestCase): # can create some nodes ent_type = store.schema.node(ns.bsfs.Entity) store.create(ent_type, {URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')}) - self.assertSetEqual(set(store._graph), { + self.assertSetEqual(set(store._graph), self.schema_triples | { + # instances (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), }) # existing nodes are skipped store.create(ent_type, {URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#5678')}) - self.assertSetEqual(set(store._graph), { + self.assertSetEqual(set(store._graph), self.schema_triples | { # previous triples (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), @@ -527,7 +599,7 @@ class TestSparqlStore(unittest.TestCase): # can create nodes of a different type tag_type = store.schema.node(ns.bsfs.Tag) store.create(tag_type, {URI('http://example.com/me/tag#1234'), URI('http://example.com/me/tag#4321')}) - self.assertSetEqual(set(store._graph), { + self.assertSetEqual(set(store._graph), self.schema_triples | { # previous triples (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), @@ -540,7 +612,7 @@ class TestSparqlStore(unittest.TestCase): # creation does not change types of existing nodes tag_type = store.schema.node(ns.bsfs.Tag) store.create(tag_type, {URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')}) - self.assertSetEqual(set(store._graph), { + self.assertSetEqual(set(store._graph), self.schema_triples | { # previous triples (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), -- cgit v1.2.3 From 6fd984e694b0a7b749ab947211d792f5b011ee6f Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Thu, 12 Jan 2023 08:44:25 +0100 Subject: renamed get_child to child in schema.types._Type and _Vertex to Vertex in schema.types --- test/triple_store/sparql/test_parse_filter.py | 2 +- test/triple_store/sparql/test_sparql.py | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) (limited to 'test/triple_store') diff --git a/test/triple_store/sparql/test_parse_filter.py b/test/triple_store/sparql/test_parse_filter.py index bd19803..bd967e5 100644 --- a/test/triple_store/sparql/test_parse_filter.py +++ b/test/triple_store/sparql/test_parse_filter.py @@ -124,7 +124,7 @@ class TestParseFilter(unittest.TestCase): # __call__ requires a valid root type self.assertRaises(errors.BackendError, self.parser, self.schema.literal(ns.bsfs.Literal), None) - self.assertRaises(errors.ConsistencyError, self.parser, self.schema.node(ns.bsfs.Node).get_child(ns.bsfs.Invalid), None) + self.assertRaises(errors.ConsistencyError, self.parser, self.schema.node(ns.bsfs.Node).child(ns.bsfs.Invalid), None) # __call__ requires a parseable root self.assertRaises(errors.BackendError, self.parser, self.schema.node(ns.bsfs.Entity), ast.filter.FilterExpression()) # __call__ returns an executable query diff --git a/test/triple_store/sparql/test_sparql.py b/test/triple_store/sparql/test_sparql.py index 3d81de1..25a0b15 100644 --- a/test/triple_store/sparql/test_sparql.py +++ b/test/triple_store/sparql/test_sparql.py @@ -108,7 +108,7 @@ class TestSparqlStore(unittest.TestCase): store.create(store.schema.node(ns.bsfs.PDF), {URI('http://example.com/me/pdf#1234')}) # node_type must be in the schema - self.assertRaises(errors.ConsistencyError, store._has_type, URI('http://example.com/me/entity#1234'), store.schema.node(ns.bsfs.Node).get_child(ns.bsfs.invalid)) + self.assertRaises(errors.ConsistencyError, store._has_type, URI('http://example.com/me/entity#1234'), store.schema.node(ns.bsfs.Node).child(ns.bsfs.invalid)) # returns False on inexistent nodes self.assertFalse(store._has_type(URI('http://example.com/me/entity#4321'), store.schema.node(ns.bsfs.Entity))) @@ -509,7 +509,7 @@ class TestSparqlStore(unittest.TestCase): store.set(ent_type, {URI('http://example.com/me/entity#1234')}, self.schema.predicate(ns.bse.filesize), {1234}) store.set(ent_type, {URI('http://example.com/me/entity#4321')}, self.schema.predicate(ns.bse.filesize), {4321}) # node_type must be in the schema - self.assertRaises(errors.ConsistencyError, set, store.get(self.schema.node(ns.bsfs.Node).get_child(ns.bsfs.Invalid), ast.filter.IsIn(ent_ids))) + self.assertRaises(errors.ConsistencyError, set, store.get(self.schema.node(ns.bsfs.Node).child(ns.bsfs.Invalid), ast.filter.IsIn(ent_ids))) # query must be a filter expression class Foo(): pass self.assertRaises(TypeError, set, store.get(ent_type, 1234)) @@ -574,7 +574,7 @@ class TestSparqlStore(unittest.TestCase): store.schema = self.schema # node type must be valid - self.assertRaises(errors.ConsistencyError, store.create, self.schema.node(ns.bsfs.Entity).get_child(ns.bsfs.invalid), { + self.assertRaises(errors.ConsistencyError, store.create, self.schema.node(ns.bsfs.Entity).child(ns.bsfs.invalid), { URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')}) # can create some nodes @@ -636,7 +636,7 @@ class TestSparqlStore(unittest.TestCase): p_comment = store.schema.predicate(ns.bse.comment) p_author = store.schema.predicate(ns.bse.author) p_tag = store.schema.predicate(ns.bse.tag) - p_invalid = store.schema.predicate(ns.bsfs.Predicate).get_child(ns.bsfs.foo, range=store.schema.node(ns.bsfs.Tag)) + p_invalid = store.schema.predicate(ns.bsfs.Predicate).child(ns.bsfs.foo, range=store.schema.node(ns.bsfs.Tag)) # create node instances ent_ids = { URI('http://example.com/me/entity#1234'), @@ -659,7 +659,7 @@ class TestSparqlStore(unittest.TestCase): store.create(user_type, user_ids) # invalid node_type is not permitted - self.assertRaises(errors.ConsistencyError, store.set, self.schema.node(ns.bsfs.Node).get_child(ns.bse.foo), + self.assertRaises(errors.ConsistencyError, store.set, self.schema.node(ns.bsfs.Node).child(ns.bse.foo), ent_ids, p_comment, {'hello world'}) # invalid predicate is not permitted -- cgit v1.2.3 From 6b3e32b29799a8143e8ce9d20c5f27e3e166b9bb Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Thu, 12 Jan 2023 10:17:07 +0100 Subject: changed path to from_string in clients --- test/triple_store/sparql/test_parse_filter.py | 4 ++-- test/triple_store/sparql/test_sparql.py | 14 +++++++------- 2 files changed, 9 insertions(+), 9 deletions(-) (limited to 'test/triple_store') diff --git a/test/triple_store/sparql/test_parse_filter.py b/test/triple_store/sparql/test_parse_filter.py index bd967e5..1d96994 100644 --- a/test/triple_store/sparql/test_parse_filter.py +++ b/test/triple_store/sparql/test_parse_filter.py @@ -9,7 +9,7 @@ import rdflib import unittest # bsie imports -from bsfs import schema as _schema +from bsfs import schema as bsc from bsfs.namespace import ns from bsfs.query import ast from bsfs.utils import errors @@ -23,7 +23,7 @@ from bsfs.triple_store.sparql.parse_filter import Filter class TestParseFilter(unittest.TestCase): def setUp(self): # schema - self.schema = _schema.Schema.from_string(''' + self.schema = bsc.from_string(''' prefix rdfs: prefix xsd: diff --git a/test/triple_store/sparql/test_sparql.py b/test/triple_store/sparql/test_sparql.py index 25a0b15..5342925 100644 --- a/test/triple_store/sparql/test_sparql.py +++ b/test/triple_store/sparql/test_sparql.py @@ -9,7 +9,7 @@ import rdflib import unittest # bsie imports -from bsfs import schema as _schema +from bsfs import schema as bsc from bsfs.namespace import ns from bsfs.query import ast from bsfs.utils import errors, URI @@ -22,7 +22,7 @@ from bsfs.triple_store.sparql.sparql import SparqlStore class TestSparqlStore(unittest.TestCase): def setUp(self): - self.schema = _schema.Schema.from_string(''' + self.schema = bsc.from_string(''' prefix rdfs: prefix xsd: @@ -90,7 +90,7 @@ class TestSparqlStore(unittest.TestCase): def test__has_type(self): # setup store store = SparqlStore.Open() - store.schema = _schema.Schema.from_string(''' + store.schema = bsc.from_string(''' prefix rdfs: prefix xsd: prefix bsfs: @@ -195,7 +195,7 @@ class TestSparqlStore(unittest.TestCase): self.assertSetEqual(set(store._graph), instances) # add some classes to the schema - curr = curr + _schema.Schema.from_string(''' + curr = curr + bsc.from_string(''' prefix rdfs: prefix xsd: prefix bsfs: @@ -298,7 +298,7 @@ class TestSparqlStore(unittest.TestCase): # remove some classes from the schema - curr = _schema.Schema.from_string(''' + curr = bsc.from_string(''' prefix rdfs: prefix xsd: prefix bsfs: @@ -383,7 +383,7 @@ class TestSparqlStore(unittest.TestCase): self.assertRaises(TypeError, setattr, store, 'schema', Foo()) # cannot migrate to incompatible schema - invalid = _schema.Schema.from_string(''' + invalid = bsc.from_string(''' prefix rdfs: prefix xsd: prefix bsfs: @@ -399,7 +399,7 @@ class TestSparqlStore(unittest.TestCase): ''') self.assertRaises(errors.ConsistencyError, setattr, store, 'schema', invalid) - invalid = _schema.Schema.from_string(''' + invalid = bsc.from_string(''' prefix rdfs: prefix xsd: prefix bsfs: -- cgit v1.2.3 From b0ff4ed674ad78bf113c3cc0c2ccd187ccb91048 Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Thu, 12 Jan 2023 10:26:30 +0100 Subject: number literal adaptions --- test/triple_store/sparql/test_parse_filter.py | 3 ++- test/triple_store/sparql/test_sparql.py | 12 ++++++++---- 2 files changed, 10 insertions(+), 5 deletions(-) (limited to 'test/triple_store') diff --git a/test/triple_store/sparql/test_parse_filter.py b/test/triple_store/sparql/test_parse_filter.py index 1d96994..f6842c5 100644 --- a/test/triple_store/sparql/test_parse_filter.py +++ b/test/triple_store/sparql/test_parse_filter.py @@ -35,7 +35,8 @@ class TestParseFilter(unittest.TestCase): bsfs:Tag rdfs:subClassOf bsfs:Node . xsd:string rdfs:subClassOf bsfs:Literal . - xsd:integer rdfs:subClassOf bsfs:Literal . + bsfs:Number rdfs:subClassOf bsfs:Literal . + xsd:integer rdfs:subClassOf bsfs:Number . bsfs:URI rdfs:subClassOf bsfs:Literal . bse:comment rdfs:subClassOf bsfs:Predicate ; diff --git a/test/triple_store/sparql/test_sparql.py b/test/triple_store/sparql/test_sparql.py index 5342925..5b71016 100644 --- a/test/triple_store/sparql/test_sparql.py +++ b/test/triple_store/sparql/test_sparql.py @@ -33,7 +33,8 @@ class TestSparqlStore(unittest.TestCase): bsfs:Tag rdfs:subClassOf bsfs:Node . bsfs:User rdfs:subClassOf bsfs:Node . xsd:string rdfs:subClassOf bsfs:Literal . - xsd:integer rdfs:subClassOf bsfs:Literal . + bsfs:Number rdfs:subClassOf bsfs:Literal . + xsd:integer rdfs:subClassOf bsfs:Number . # non-unique literal bse:comment rdfs:subClassOf bsfs:Predicate ; @@ -66,7 +67,8 @@ class TestSparqlStore(unittest.TestCase): (rdflib.URIRef(ns.bsfs.Tag), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), (rdflib.URIRef(ns.bsfs.User), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), (rdflib.URIRef(ns.xsd.string), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), - (rdflib.URIRef(ns.xsd.integer), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), + (rdflib.URIRef(ns.bsfs.Number), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), + (rdflib.URIRef(ns.xsd.integer), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Number)), (rdflib.URIRef(ns.bse.comment), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), (rdflib.URIRef(ns.bse.filesize), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), (rdflib.URIRef(ns.bse.tag), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), @@ -310,7 +312,8 @@ class TestSparqlStore(unittest.TestCase): bsfs:User rdfs:subClassOf bsfs:Node . xsd:boolean rdfs:subClassOf bsfs:Literal . - xsd:integer rdfs:subClassOf bsfs:Literal . + bsfs:Number rdfs:subClassOf bsfs:Literal . + xsd:integer rdfs:subClassOf bsfs:Number . bse:filesize rdfs:subClassOf bsfs:Predicate ; rdfs:domain bsfs:Entity ; @@ -351,7 +354,8 @@ class TestSparqlStore(unittest.TestCase): (rdflib.URIRef(ns.bsfs.Tag), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), (rdflib.URIRef(ns.bsfs.User), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), (rdflib.URIRef(ns.xsd.boolean), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), - (rdflib.URIRef(ns.xsd.integer), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), + (rdflib.URIRef(ns.bsfs.Number), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), + (rdflib.URIRef(ns.xsd.integer), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Number)), (rdflib.URIRef(ns.bse.shared), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), (rdflib.URIRef(ns.bse.tag), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), (rdflib.URIRef(ns.bse.filesize), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), -- cgit v1.2.3 From e708016ae366e96051281f3a744af35a8c06d98b Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Thu, 12 Jan 2023 10:28:16 +0100 Subject: cleanup and cosmetic changes --- test/triple_store/sparql/test_sparql.py | 2 ++ 1 file changed, 2 insertions(+) (limited to 'test/triple_store') diff --git a/test/triple_store/sparql/test_sparql.py b/test/triple_store/sparql/test_sparql.py index 5b71016..aa5dfc7 100644 --- a/test/triple_store/sparql/test_sparql.py +++ b/test/triple_store/sparql/test_sparql.py @@ -63,6 +63,7 @@ class TestSparqlStore(unittest.TestCase): ''') self.schema_triples = { # schema hierarchy + (rdflib.URIRef(ns.bsfs.Feature), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), (rdflib.URIRef(ns.bsfs.Entity), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), (rdflib.URIRef(ns.bsfs.Tag), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), (rdflib.URIRef(ns.bsfs.User), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), @@ -350,6 +351,7 @@ class TestSparqlStore(unittest.TestCase): # instances of old classes were removed self.assertSetEqual(set(store._graph), { # schema hierarchy + (rdflib.URIRef(ns.bsfs.Feature), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), (rdflib.URIRef(ns.bsfs.Entity), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), (rdflib.URIRef(ns.bsfs.Tag), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), (rdflib.URIRef(ns.bsfs.User), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), -- cgit v1.2.3 From ccaee71e2b6135d3b324fe551c8652940b67aab3 Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Sun, 15 Jan 2023 20:57:42 +0100 Subject: Feature as Literal instead of Predicate subtype --- test/triple_store/sparql/test_parse_filter.py | 5 ++++- test/triple_store/sparql/test_sparql.py | 8 ++++++-- 2 files changed, 10 insertions(+), 3 deletions(-) (limited to 'test/triple_store') diff --git a/test/triple_store/sparql/test_parse_filter.py b/test/triple_store/sparql/test_parse_filter.py index f6842c5..5c16f11 100644 --- a/test/triple_store/sparql/test_parse_filter.py +++ b/test/triple_store/sparql/test_parse_filter.py @@ -30,12 +30,15 @@ class TestParseFilter(unittest.TestCase): prefix bsfs: prefix bse: + bsfs:Array rdfs:subClassOf bsfs:Literal . + bsfs:Feature rdfs:subClassOf bsfs:Array . + bsfs:Number rdfs:subClassOf bsfs:Literal . + bsfs:Entity rdfs:subClassOf bsfs:Node . bsfs:Image rdfs:subClassOf bsfs:Entity . bsfs:Tag rdfs:subClassOf bsfs:Node . xsd:string rdfs:subClassOf bsfs:Literal . - bsfs:Number rdfs:subClassOf bsfs:Literal . xsd:integer rdfs:subClassOf bsfs:Number . bsfs:URI rdfs:subClassOf bsfs:Literal . diff --git a/test/triple_store/sparql/test_sparql.py b/test/triple_store/sparql/test_sparql.py index aa5dfc7..1f56a7e 100644 --- a/test/triple_store/sparql/test_sparql.py +++ b/test/triple_store/sparql/test_sparql.py @@ -63,12 +63,14 @@ class TestSparqlStore(unittest.TestCase): ''') self.schema_triples = { # schema hierarchy - (rdflib.URIRef(ns.bsfs.Feature), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), (rdflib.URIRef(ns.bsfs.Entity), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), (rdflib.URIRef(ns.bsfs.Tag), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), (rdflib.URIRef(ns.bsfs.User), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), (rdflib.URIRef(ns.xsd.string), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), + (rdflib.URIRef(ns.bsfs.Array), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), + (rdflib.URIRef(ns.bsfs.Feature), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Array)), (rdflib.URIRef(ns.bsfs.Number), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), + (rdflib.URIRef(ns.bsfs.Time), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), (rdflib.URIRef(ns.xsd.integer), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Number)), (rdflib.URIRef(ns.bse.comment), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), (rdflib.URIRef(ns.bse.filesize), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), @@ -351,12 +353,14 @@ class TestSparqlStore(unittest.TestCase): # instances of old classes were removed self.assertSetEqual(set(store._graph), { # schema hierarchy - (rdflib.URIRef(ns.bsfs.Feature), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), (rdflib.URIRef(ns.bsfs.Entity), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), (rdflib.URIRef(ns.bsfs.Tag), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), (rdflib.URIRef(ns.bsfs.User), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), (rdflib.URIRef(ns.xsd.boolean), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), + (rdflib.URIRef(ns.bsfs.Array), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), + (rdflib.URIRef(ns.bsfs.Feature), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Array)), (rdflib.URIRef(ns.bsfs.Number), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), + (rdflib.URIRef(ns.bsfs.Time), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), (rdflib.URIRef(ns.xsd.integer), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Number)), (rdflib.URIRef(ns.bse.shared), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), (rdflib.URIRef(ns.bse.tag), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), -- cgit v1.2.3 From 80a97bfa9f22d0d6dd25928fe1754a3a0d1de78a Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Sun, 15 Jan 2023 21:00:12 +0100 Subject: Distance filter ast node --- test/triple_store/sparql/test_distance.py | 61 +++++++++++++++++++++++++++ test/triple_store/sparql/test_parse_filter.py | 50 ++++++++++++++++++++-- test/triple_store/sparql/test_sparql.py | 17 ++++++++ 3 files changed, 125 insertions(+), 3 deletions(-) create mode 100644 test/triple_store/sparql/test_distance.py (limited to 'test/triple_store') diff --git a/test/triple_store/sparql/test_distance.py b/test/triple_store/sparql/test_distance.py new file mode 100644 index 0000000..0659459 --- /dev/null +++ b/test/triple_store/sparql/test_distance.py @@ -0,0 +1,61 @@ +""" + +Part of the bsfs test suite. +A copy of the license is provided with the project. +Author: Matthias Baumgartner, 2022 +""" +# imports +import numpy as np +import unittest + +# objects to test +from bsfs.triple_store.sparql import distance + + +## code ## + +class TestDistance(unittest.TestCase): + + def test_euclid(self): + # self-distance is zero + self.assertEqual(distance.euclid([1,2,3,4], [1,2,3,4]), 0.0) + # accepts list-like arguments + self.assertAlmostEqual(distance.euclid([1,2,3,4], [2,3,4,5]), 2.0, 3) + self.assertAlmostEqual(distance.euclid((1,2,3,4), (2,3,4,5)), 2.0, 3) + # dimension can vary + self.assertAlmostEqual(distance.euclid([1,2,3], [2,3,4]), 1.732, 3) + self.assertAlmostEqual(distance.euclid([1,2,3,4,5], [2,3,4,5,6]), 2.236, 3) + # vector can be zero + self.assertAlmostEqual(distance.euclid([0,0,0], [1,2,3]), 3.742, 3) + + def test_cosine(self): + # self-distance is zero + self.assertEqual(distance.cosine([1,2,3,4], [1,2,3,4]), 0.0) + # accepts list-like arguments + self.assertAlmostEqual(distance.cosine([1,2,3,4], [4,3,2,1]), 0.333, 3) + self.assertAlmostEqual(distance.cosine((1,2,3,4), (4,3,2,1)), 0.333, 3) + # dimension can vary + self.assertAlmostEqual(distance.cosine([1,2,3], [3,2,1]), 0.286, 3) + self.assertAlmostEqual(distance.cosine([1,2,3,4,5], [5,4,3,2,1]), 0.364, 3) + # vector can be zero + self.assertAlmostEqual(distance.cosine([0,0,0], [1,2,3]), 1.0, 3) + + def test_manhatten(self): + # self-distance is zero + self.assertEqual(distance.manhatten([1,2,3,4], [1,2,3,4]), 0.0) + # accepts list-like arguments + self.assertAlmostEqual(distance.manhatten([1,2,3,4], [2,3,4,5]), 4.0, 3) + self.assertAlmostEqual(distance.manhatten((1,2,3,4), (2,3,4,5)), 4.0, 3) + # dimension can vary + self.assertAlmostEqual(distance.manhatten([1,2,3], [2,3,4]), 3.0, 3) + self.assertAlmostEqual(distance.manhatten([1,2,3,4,5], [2,3,4,5,6]), 5.0, 3) + # vector can be zero + self.assertAlmostEqual(distance.manhatten([0,0,0], [1,2,3]), 6.0, 3) + + +## main ## + +if __name__ == '__main__': + unittest.main() + +## EOF ## diff --git a/test/triple_store/sparql/test_parse_filter.py b/test/triple_store/sparql/test_parse_filter.py index 5c16f11..8764535 100644 --- a/test/triple_store/sparql/test_parse_filter.py +++ b/test/triple_store/sparql/test_parse_filter.py @@ -42,6 +42,15 @@ class TestParseFilter(unittest.TestCase): xsd:integer rdfs:subClassOf bsfs:Number . bsfs:URI rdfs:subClassOf bsfs:Literal . + bsfs:Colors rdfs:subClassOf bsfs:Feature ; + bsfs:dimension "4"^^xsd:integer ; + bsfs:dtype xsd:integer ; + bsfs:distance bsfs:euclidean . + + bse:colors rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range bsfs:Colors . + bse:comment rdfs:subClassOf bsfs:Predicate ; rdfs:domain bsfs:Node ; rdfs:range xsd:string ; @@ -74,9 +83,6 @@ class TestParseFilter(unittest.TestCase): ''') - # parser instance - self.parser = Filter(self.schema) - # graph to test queries self.graph = rdflib.Graph() # schema hierarchies @@ -117,6 +123,13 @@ class TestParseFilter(unittest.TestCase): # image iso self.graph.add((rdflib.URIRef('http://example.com/image#1234'), rdflib.URIRef(ns.bse.iso), rdflib.Literal(1234, datatype=rdflib.XSD.integer))) self.graph.add((rdflib.URIRef('http://example.com/image#4321'), rdflib.URIRef(ns.bse.iso), rdflib.Literal(4321, datatype=rdflib.XSD.integer))) + # color features + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.bse.colors), rdflib.Literal([1,2,3,4], datatype=rdflib.URIRef(ns.bsfs.Colors)))) + self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef(ns.bse.colors), rdflib.Literal([4,3,2,1], datatype=rdflib.URIRef(ns.bsfs.Colors)))) + self.graph.add((rdflib.URIRef('http://example.com/image#1234'), rdflib.URIRef(ns.bse.colors), rdflib.Literal([3,4,2,1], datatype=rdflib.URIRef(ns.bsfs.Colors)))) + + # parser instance + self.parser = Filter(self.graph, self.schema) def test_routing(self): @@ -617,6 +630,37 @@ class TestParseFilter(unittest.TestCase): {'http://example.com/tag#1234'}) + def test_distance(self): + # node colors distance to [2,4,3,1] + # entity#1234 [1,2,3,4] 3.742 + # entity#4321 [4,3,2,1] 2.449 + # image#1234 [3,4,2,1] 1.414 + + # _distance expects a feature + self.assertRaises(errors.BackendError, self.parser._distance, self.schema.node(ns.bsfs.Entity), ast.filter.Distance([1,2,3,4], 1), '') + # reference must have the correct dimension + self.assertRaises(errors.ConsistencyError, self.parser._distance, self.schema.literal(ns.bsfs.Colors), ast.filter.Distance([1,2,3], 1), '') + self.assertRaises(errors.ConsistencyError, self.parser._distance, self.schema.literal(ns.bsfs.Colors), ast.filter.Distance([1,2,3,4,5], 1), '') + # _distance respects threshold + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.colors, ast.filter.Distance([2,4,3,1], 4))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/entity#4321', 'http://example.com/image#1234'}) + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.colors, ast.filter.Distance([2,4,3,1], 3))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#4321', 'http://example.com/image#1234'}) + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.colors, ast.filter.Distance([2,4,3,1], 2))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/image#1234'}) + # result set can be empty + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.colors, ast.filter.Distance([2,4,3,1], 1))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set()) + # _distance respects strict + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.colors, ast.filter.Distance([1,2,3,4], 0, False))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234'}) + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.colors, ast.filter.Distance([1,2,3,4], 0, True))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set()) + def test_one_of(self): # _one_of expects a node self.assertRaises(errors.BackendError, self.parser._one_of, diff --git a/test/triple_store/sparql/test_sparql.py b/test/triple_store/sparql/test_sparql.py index 1f56a7e..435ca28 100644 --- a/test/triple_store/sparql/test_sparql.py +++ b/test/triple_store/sparql/test_sparql.py @@ -392,6 +392,23 @@ class TestSparqlStore(unittest.TestCase): class Foo(): pass self.assertRaises(TypeError, setattr, store, 'schema', Foo()) + # cannot define features w/o known distance function + invalid = bsc.from_string(''' + prefix rdfs: + prefix xsd: + prefix bsfs: + prefix bse: + + bsfs:Array rdfs:subClassOf bsfs:Literal . + bsfs:Feature rdfs:subClassOf bsfs:Array . + + bsfs:Colors rdfs:subClassOf bsfs:Feature ; + bsfs:dimension "4"^^xsd:integer ; + bsfs:distance bsfs:foobar . + + ''') + self.assertRaises(ValueError, setattr, store, 'schema', invalid) + # cannot migrate to incompatible schema invalid = bsc.from_string(''' prefix rdfs: -- cgit v1.2.3 From 3504609e1ba1f7f653fa79910474bebd3ec24d8a Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Mon, 16 Jan 2023 21:41:20 +0100 Subject: various minor fixes --- test/triple_store/sparql/test_sparql.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'test/triple_store') diff --git a/test/triple_store/sparql/test_sparql.py b/test/triple_store/sparql/test_sparql.py index 435ca28..7fbfb65 100644 --- a/test/triple_store/sparql/test_sparql.py +++ b/test/triple_store/sparql/test_sparql.py @@ -407,7 +407,7 @@ class TestSparqlStore(unittest.TestCase): bsfs:distance bsfs:foobar . ''') - self.assertRaises(ValueError, setattr, store, 'schema', invalid) + self.assertRaises(errors.UnsupportedError, setattr, store, 'schema', invalid) # cannot migrate to incompatible schema invalid = bsc.from_string(''' -- cgit v1.2.3 From 965f4dfe41afd552ed6477c75e1286c14e3580f6 Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Sat, 21 Jan 2023 16:31:08 +0100 Subject: Fetch in triple stores: * fetch interface * sparql fetch ast parser * sparql fetch implementation --- test/triple_store/sparql/test_parse_fetch.py | 263 ++++++++++++++++++++++++++ test/triple_store/sparql/test_parse_filter.py | 150 +++++++-------- test/triple_store/sparql/test_sparql.py | 70 +++++++ test/triple_store/sparql/test_utils.py | 155 +++++++++++++++ test/triple_store/test_base.py | 3 + 5 files changed, 566 insertions(+), 75 deletions(-) create mode 100644 test/triple_store/sparql/test_parse_fetch.py create mode 100644 test/triple_store/sparql/test_utils.py (limited to 'test/triple_store') diff --git a/test/triple_store/sparql/test_parse_fetch.py b/test/triple_store/sparql/test_parse_fetch.py new file mode 100644 index 0000000..0961789 --- /dev/null +++ b/test/triple_store/sparql/test_parse_fetch.py @@ -0,0 +1,263 @@ +""" + +Part of the bsfs test suite. +A copy of the license is provided with the project. +Author: Matthias Baumgartner, 2022 +""" +# imports +import rdflib +import unittest + +# bsie imports +from bsfs import schema +from bsfs.namespace import Namespace, ns +from bsfs.query import ast +from bsfs.utils import errors, URI + +# objects to test +from bsfs.triple_store.sparql.parse_fetch import Fetch + + +## code ## + +bsfs = Namespace('http://bsfs.ai/schema', fsep='/') +bse = Namespace('http://bsfs.ai/schema/Entity') +bst = Namespace('http://bsfs.ai/schema/Tag') +bsc = Namespace('http://bsfs.ai/schema/Collection') + +class TestParseFetch(unittest.TestCase): + + def setUp(self): + self.schema = schema.from_string(''' + prefix rdfs: + prefix xsd: + + prefix bsfs: + prefix bse: + prefix bst: + prefix bsc: + + # nodes + bsfs:Entity rdfs:subClassOf bsfs:Node . + bsfs:Tag rdfs:subClassOf bsfs:Node . + bsfs:Collection rdfs:subClassOf bsfs:Node . + + # literals + xsd:integer rdfs:subClassOf bsfs:Literal . + xsd:string rdfs:subClassOf bsfs:Literal . + + # predicates + bse:tag rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range bsfs:Tag . + + bse:collection rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range bsfs:Collection . + + bse:filename rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range xsd:string . + + bse:rank rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range xsd:integer . + + bst:main rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Tag ; + rdfs:range bsfs:Entity . + + bst:label rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Tag ; + rdfs:range xsd:string . + + bsc:tag rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Collection ; + rdfs:range bsfs:Tag . + + bsc:label rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Collection ; + rdfs:range xsd:string . + + bsc:rating rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Collection ; + rdfs:range xsd:integer . + + ''') + + # graph to test queries + self.graph = rdflib.Graph() + # schema hierarchies + self.graph.add((rdflib.URIRef('http://bsfs.ai/schema/Entity'), rdflib.RDFS.subClassOf, rdflib.URIRef('http://bsfs.ai/schema/Node'))) + self.graph.add((rdflib.URIRef('http://bsfs.ai/schema/Collection'), rdflib.RDFS.subClassOf, rdflib.URIRef('http://bsfs.ai/schema/Node'))) + self.graph.add((rdflib.URIRef('http://bsfs.ai/schema/Tag'), rdflib.RDFS.subClassOf, rdflib.URIRef('http://bsfs.ai/schema/Node'))) + # entities + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Entity'))) + self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Entity'))) + # tags + self.graph.add((rdflib.URIRef('http://example.com/tag#1234'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Tag'))) + self.graph.add((rdflib.URIRef('http://example.com/tag#4321'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Tag'))) + # collections + self.graph.add((rdflib.URIRef('http://example.com/collection#1234'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Collection'))) + self.graph.add((rdflib.URIRef('http://example.com/collection#4321'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Collection'))) + # entity literals + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(bse.rank), rdflib.Literal('1234', datatype=rdflib.XSD.integer))) + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(bse.filename), rdflib.Literal('filename_1234', datatype=rdflib.XSD.string))) + #self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef(bse.rank), rdflib.Literal('4321', datatype=rdflib.XSD.integer))) + self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef(bse.filename), rdflib.Literal('filename_4321', datatype=rdflib.XSD.string))) + # tag literals + self.graph.add((rdflib.URIRef('http://example.com/tag#1234'), rdflib.URIRef(bst.label), rdflib.Literal('tag_label_1234', datatype=rdflib.XSD.string))) + self.graph.add((rdflib.URIRef('http://example.com/tag#4321'), rdflib.URIRef(bst.label), rdflib.Literal('tag_label_4321', datatype=rdflib.XSD.string))) + # collection literals + self.graph.add((rdflib.URIRef('http://example.com/collection#1234'), rdflib.URIRef(bsc.label), rdflib.Literal('collection_label_1234', datatype=rdflib.XSD.string))) + self.graph.add((rdflib.URIRef('http://example.com/collection#1234'), rdflib.URIRef(bsc.rating), rdflib.Literal('1234', datatype=rdflib.XSD.integer))) + self.graph.add((rdflib.URIRef('http://example.com/collection#4321'), rdflib.URIRef(bsc.label), rdflib.Literal('collection_label_4321', datatype=rdflib.XSD.string))) + self.graph.add((rdflib.URIRef('http://example.com/collection#4321'), rdflib.URIRef(bsc.rating), rdflib.Literal('4321', datatype=rdflib.XSD.integer))) + # entity-tag links + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(bse.tag), rdflib.URIRef('http://example.com/tag#1234'))) + self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef(bse.tag), rdflib.URIRef('http://example.com/tag#4321'))) + # entity-collection links + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(bse.collection), rdflib.URIRef('http://example.com/collection#1234'))) + self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef(bse.collection), rdflib.URIRef('http://example.com/collection#4321'))) + # collection-tag links + self.graph.add((rdflib.URIRef('http://example.com/collection#1234'), rdflib.URIRef(bsc.tag), rdflib.URIRef('http://example.com/tag#1234'))) + self.graph.add((rdflib.URIRef('http://example.com/collection#4321'), rdflib.URIRef(bsc.tag), rdflib.URIRef('http://example.com/tag#4321'))) + # tag-entity links # NOTE: cross-over + self.graph.add((rdflib.URIRef('http://example.com/tag#1234'), rdflib.URIRef(bst.main), rdflib.URIRef('http://example.com/entity#4321'))) + self.graph.add((rdflib.URIRef('http://example.com/tag#4321'), rdflib.URIRef(bst.main), rdflib.URIRef('http://example.com/entity#1234'))) + + # default parser + self.parser = Fetch(self.schema) + self.ent = self.schema.node(ns.bsfs.Entity) + + + def test_call(self): + # NOTE: The individual ast components are considered in the respective tests. Here, we test __call__ specifics. + + # __call__ requires a valid root type + self.assertRaises(errors.BackendError, self.parser, self.schema.literal(ns.bsfs.Literal), ast.fetch.This('this')) + self.assertRaises(errors.ConsistencyError, self.parser, self.schema.node(ns.bsfs.Node).child(ns.bsfs.Invalid), ast.fetch.This('this')) + # __call__ requires a parseable root + self.assertRaises(errors.BackendError, self.parser, self.ent, ast.filter.FilterExpression()) + # __call__ returns an executable query + q = self.parser(self.ent, ast.fetch.Fetch(bse.tag, ast.fetch.Value(bst.label, 'label'))) + self.assertSetEqual(set(q(self.graph)), { + (rdflib.URIRef('http://example.com/entity#1234'), rdflib.Literal('tag_label_1234', datatype=rdflib.XSD.string)), + (rdflib.URIRef('http://example.com/entity#4321'), rdflib.Literal('tag_label_4321', datatype=rdflib.XSD.string)), + }) + + + def test_routing(self): + self.assertRaises(errors.BackendError, self.parser._parse_fetch_expression, self.ent, ast.fetch.FetchExpression(), '?head') + + + def test_all(self): + # multiple values query + q = self.parser(self.ent, ast.fetch.All( + ast.fetch.Value(bse.filename, name='filename'), + ast.fetch.Value(bse.rank, name='rank')), + ) + self.assertSetEqual(set(q.names), {'filename', 'rank'}) + if q.names == ('filename', 'rank'): + self.assertSetEqual(set(q(self.graph)), { + (rdflib.URIRef('http://example.com/entity#1234'), rdflib.Literal('filename_1234', datatype=rdflib.XSD.string), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), + (rdflib.URIRef('http://example.com/entity#4321'), rdflib.Literal('filename_4321', datatype=rdflib.XSD.string), None), + }) + else: + self.assertSetEqual(set(q(self.graph)), { + (rdflib.URIRef('http://example.com/entity#1234'), rdflib.Literal('1234', datatype=rdflib.XSD.integer), rdflib.Literal('filename_1234', datatype=rdflib.XSD.string)), + (rdflib.URIRef('http://example.com/entity#4321'), None, rdflib.Literal('filename_4321', datatype=rdflib.XSD.string)), + }) + # mixed values and node query + q = self.parser(self.ent, ast.fetch.All( + ast.fetch.Value(bse.filename, name='filename'), + ast.fetch.Node(bse.tag, name='tag'), + )) + self.assertSetEqual(set(q.names), {'filename', 'tag'}) + if q.names == ('filename', 'tag'): + self.assertSetEqual(set(q(self.graph)), { + (rdflib.URIRef('http://example.com/entity#1234'), rdflib.Literal('filename_1234', datatype=rdflib.XSD.string), rdflib.URIRef('http://example.com/tag#1234')), + (rdflib.URIRef('http://example.com/entity#4321'), rdflib.Literal('filename_4321', datatype=rdflib.XSD.string), rdflib.URIRef('http://example.com/tag#4321')), + }) + else: + self.assertSetEqual(set(q(self.graph)), { + (rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef('http://example.com/tag#1234'), rdflib.Literal('filename_1234', datatype=rdflib.XSD.string)), + (rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef('http://example.com/tag#4321'), rdflib.Literal('filename_4321', datatype=rdflib.XSD.string)), + }) + # multiple values and second hop + q = self.parser(self.ent, ast.fetch.Fetch(bse.tag, ast.fetch.All( + ast.fetch.This(name='tag'), + ast.fetch.Value(bst.label, name='label'), + ))) + self.assertSetEqual(set(q.names), {'tag', 'label'}) + if q.names == ('tag', 'label'): + self.assertSetEqual(set(q(self.graph)), { + (rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef('http://example.com/tag#1234'), rdflib.Literal('tag_label_1234', datatype=rdflib.XSD.string)), + (rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef('http://example.com/tag#4321'), rdflib.Literal('tag_label_4321', datatype=rdflib.XSD.string)), + }) + else: + self.assertSetEqual(set(q(self.graph)), { + (rdflib.URIRef('http://example.com/entity#1234'), rdflib.Literal('tag_label_1234', datatype=rdflib.XSD.string), rdflib.URIRef('http://example.com/tag#1234')), + (rdflib.URIRef('http://example.com/entity#4321'), rdflib.Literal('tag_label_4321', datatype=rdflib.XSD.string), rdflib.URIRef('http://example.com/tag#4321')), + }) + + + + def test_fetch(self): + # two-hop query + q = self.parser(self.ent, ast.fetch.Fetch(bse.tag, ast.fetch.Value(bst.label, 'tag_label'))) + self.assertSetEqual(set(q(self.graph)), { + (rdflib.URIRef('http://example.com/entity#1234'), rdflib.Literal('tag_label_1234', datatype=rdflib.XSD.string)), + (rdflib.URIRef('http://example.com/entity#4321'), rdflib.Literal('tag_label_4321', datatype=rdflib.XSD.string)), + }) + # three-hop-query + q = self.parser(self.ent, ast.fetch.Fetch(bse.tag, ast.fetch.Fetch(bst.main, ast.fetch.Value(bse.rank, 'entity_rank')))) + self.assertSetEqual(set(q(self.graph)), { + (rdflib.URIRef('http://example.com/entity#1234'), None), + (rdflib.URIRef('http://example.com/entity#4321'), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), + }) + + + def test_node(self): + # cannot use the internal hop name + self.assertRaises(errors.BackendError, self.parser, self.ent, ast.fetch.Node(bse.tag, self.parser.ngen.prefix[1:] + '123')) + # a simple Node statement + q = self.parser(self.ent, ast.fetch.Node(bse.tag, 'tag')) + self.assertSetEqual(set(q.names), {'tag'}) + self.assertSetEqual(set(q(self.graph)), { + (rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef('http://example.com/tag#1234')), + (rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef('http://example.com/tag#4321')), + }) + + + def test_value(self): + # cannot use the internal hop name + self.assertRaises(errors.BackendError, self.parser, self.schema.node(ns.bsfs.Entity), ast.fetch.Value(bse.filename, self.parser.ngen.prefix[1:] + '123')) + # a simple Value statement + q = self.parser(self.ent, ast.fetch.Value(bse.filename, 'filename')) + self.assertSetEqual(set(q.names), {'filename'}) + self.assertSetEqual(set(q(self.graph)), { + (rdflib.URIRef('http://example.com/entity#1234'), rdflib.Literal('filename_1234', datatype=rdflib.XSD.string)), + (rdflib.URIRef('http://example.com/entity#4321'), rdflib.Literal('filename_4321', datatype=rdflib.XSD.string)), + }) + + + def test_this(self): + # cannot use the internal hop name + self.assertRaises(errors.BackendError, self.parser, self.ent, ast.fetch.This(self.parser.ngen.prefix[1:] + '123')) + # a simple This statement + self.assertEqual(self.parser._this(self.ent, ast.fetch.This('this'), '?head'), + ({('?head', 'this')}, '')) + q = self.parser(self.ent, ast.fetch.This('this')) + self.assertSetEqual(set(q(self.graph)), { + (rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef('http://example.com/entity#1234')), + (rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef('http://example.com/entity#4321')), + }) + + +## main ## + +if __name__ == '__main__': + unittest.main() + +## EOF ## diff --git a/test/triple_store/sparql/test_parse_filter.py b/test/triple_store/sparql/test_parse_filter.py index 8764535..6fa0cd3 100644 --- a/test/triple_store/sparql/test_parse_filter.py +++ b/test/triple_store/sparql/test_parse_filter.py @@ -149,13 +149,13 @@ class TestParseFilter(unittest.TestCase): ast.filter.Or( ast.filter.Is('http://example.com/entity#1234'), ast.filter.Is('http://example.com/entity#5678'))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, {'http://example.com/entity#1234'}) + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234'}) # root is optional q = self.parser(self.schema.node(ns.bsfs.Entity)) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234', 'http://example.com/entity#4321', 'http://example.com/image#1234', 'http://example.com/image#4321'}) q = self.parser(self.schema.node(ns.bsfs.Tag)) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/tag#1234', 'http://example.com/tag#4321'}) @@ -164,7 +164,7 @@ class TestParseFilter(unittest.TestCase): self.assertRaises(errors.BackendError, self.parser._is, self.schema.literal(ns.bsfs.Literal), ast.filter.Is('http://example.com/entity#1234'), '?ent') # a single Is statement q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Is('http://example.com/entity#1234')) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234'}) # an aggregate of Is statements q = self.parser(self.schema.node(ns.bsfs.Entity), @@ -172,7 +172,7 @@ class TestParseFilter(unittest.TestCase): ast.filter.Is('http://example.com/entity#1234'), ast.filter.Is('http://example.com/entity#4321'), )) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234', 'http://example.com/entity#4321'}) # combined with other filters q = self.parser(self.schema.node(ns.bsfs.Entity), @@ -185,12 +185,12 @@ class TestParseFilter(unittest.TestCase): ast.filter.Equals('Me, Myself, and I') ), )) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234'}) # as argument of Any/All q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.tag, ast.filter.Is('http://example.com/tag#1234'))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234', 'http://example.com/image#1234'}) @@ -199,15 +199,15 @@ class TestParseFilter(unittest.TestCase): self.assertRaises(errors.BackendError, self.parser._equals, self.schema.node(ns.bsfs.Entity), ast.filter.Equals('hello world'), '?ent') # a single Equals statement q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.Equals('hello world'))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234', 'http://example.com/entity#4321'}) # a single Equals statement that includes subtypes q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I'))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234', 'http://example.com/image#1234'}) # an Equals statement on an integer q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.filesize, ast.filter.Equals(4321))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#4321', 'http://example.com/image#4321'}) @@ -216,18 +216,18 @@ class TestParseFilter(unittest.TestCase): self.assertRaises(errors.BackendError, self.parser._substring, self.schema.node(ns.bsfs.Entity), ast.filter.Substring('hello world'), '?ent') # a single Substring statement q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.Substring('hello'))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234', 'http://example.com/entity#4321'}) q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.Substring('lo wo'))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234', 'http://example.com/entity#4321'}) # a single Substring statement that includes subtypes q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.Substring('Myself'))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234', 'http://example.com/image#1234'}) # an Substring statement on an integer q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.filesize, ast.filter.Substring('32'))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#4321', 'http://example.com/image#4321'}) @@ -236,15 +236,15 @@ class TestParseFilter(unittest.TestCase): self.assertRaises(errors.BackendError, self.parser._starts_with, self.schema.node(ns.bsfs.Entity), ast.filter.StartsWith('hello world'), '?ent') # a single StartsWith statement q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.StartsWith('hello'))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234', 'http://example.com/entity#4321'}) # a single StartsWith statement that includes subtypes q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.StartsWith('Me, Mys'))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234', 'http://example.com/image#1234'}) # an StartsWith statement on an integer q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.filesize, ast.filter.StartsWith(432))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#4321', 'http://example.com/image#4321'}) @@ -253,15 +253,15 @@ class TestParseFilter(unittest.TestCase): self.assertRaises(errors.BackendError, self.parser._ends_with, self.schema.node(ns.bsfs.Entity), ast.filter.EndsWith('hello world'), '?ent') # a single EndsWith statement q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.EndsWith('orld'))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234', 'http://example.com/entity#4321'}) # a single EndsWith statement that includes subtypes q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.EndsWith('and I'))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234', 'http://example.com/image#1234'}) # an EndsWith statement on an integer q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.filesize, ast.filter.EndsWith(321))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#4321', 'http://example.com/image#4321'}) @@ -270,22 +270,22 @@ class TestParseFilter(unittest.TestCase): self.assertRaises(errors.BackendError, self.parser._less_than, self.schema.node(ns.bsfs.Entity), ast.filter.LessThan(2000), '?ent') # a single LessThan statement q = self.parser(self.schema.node(ns.bsfs.Image), ast.filter.Any(ns.bse.iso, ast.filter.LessThan(2000))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/image#1234'}) # _less_than respects boundary q = self.parser(self.schema.node(ns.bsfs.Image), ast.filter.Any(ns.bse.iso, ast.filter.LessThan(1234, strict=True))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set()) + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, set()) q = self.parser(self.schema.node(ns.bsfs.Image), ast.filter.Any(ns.bse.iso, ast.filter.LessThan(1234, strict=False))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/image#1234'}) # a single LessThan statement that includes subtypes q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.filesize, ast.filter.LessThan(2000))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234', 'http://example.com/image#1234'}) # an LessThan statement on a string # always negative; note that http://example.com/tag#4321 is also not returned although its comment is a pure number q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.LessThan(10_000))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set()) + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, set()) def test_greater_than(self): @@ -293,22 +293,22 @@ class TestParseFilter(unittest.TestCase): self.assertRaises(errors.BackendError, self.parser._greater_than, self.schema.node(ns.bsfs.Entity), ast.filter.GreaterThan(2000), '?ent') # a single GreaterThan statement q = self.parser(self.schema.node(ns.bsfs.Image), ast.filter.Any(ns.bse.iso, ast.filter.GreaterThan(2000))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/image#4321'}) # _greater_than respects boundary q = self.parser(self.schema.node(ns.bsfs.Image), ast.filter.Any(ns.bse.iso, ast.filter.GreaterThan(4321, strict=True))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set()) + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, set()) q = self.parser(self.schema.node(ns.bsfs.Image), ast.filter.Any(ns.bse.iso, ast.filter.GreaterThan(4321, strict=False))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/image#4321'}) # a single GreaterThan statement that includes subtypes q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.filesize, ast.filter.GreaterThan(2000))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#4321', 'http://example.com/image#4321'}) # an GreaterThan statement on a string # always positive q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.GreaterThan(0))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234', 'http://example.com/entity#4321', 'http://example.com/image#1234'}) @@ -331,7 +331,7 @@ class TestParseFilter(unittest.TestCase): ast.filter.Any(ns.bse.filesize, ast.filter.Equals(1234)), ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')), )) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234'}) # all conditions have to match q = self.parser(self.schema.node(ns.bsfs.Entity), @@ -340,21 +340,21 @@ class TestParseFilter(unittest.TestCase): ast.filter.Any(ns.bse.filesize, ast.filter.Equals(1234)), ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')), )) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set()) + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, set()) q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.And( ast.filter.Is('http://example.com/entity#1234'), ast.filter.Any(ns.bse.filesize, ast.filter.Equals(4321)), ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')), )) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set()) + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, set()) q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.And( ast.filter.Is('http://example.com/entity#1234'), ast.filter.Any(ns.bse.filesize, ast.filter.Equals(1234)), ast.filter.Any(ns.bse.comment, ast.filter.Equals('foobar')), )) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set()) + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, set()) # And can be nested q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.And( @@ -364,7 +364,7 @@ class TestParseFilter(unittest.TestCase): ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')), ), )) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234'}) @@ -387,7 +387,7 @@ class TestParseFilter(unittest.TestCase): ast.filter.Any(ns.bse.filesize, ast.filter.Equals(4321)), ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')), )) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234', 'http://example.com/image#1234', 'http://example.com/entity#4321', 'http://example.com/image#4321'}) # at least one condition has to match q = self.parser(self.schema.node(ns.bsfs.Entity), @@ -396,14 +396,14 @@ class TestParseFilter(unittest.TestCase): ast.filter.Any(ns.bse.filesize, ast.filter.Equals(8765)), ast.filter.Any(ns.bse.comment, ast.filter.Equals('foobar')), )) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set()) + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, set()) q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Or( ast.filter.Is('http://example.com/entity#1234'), ast.filter.Any(ns.bse.filesize, ast.filter.Equals(8765)), ast.filter.Any(ns.bse.comment, ast.filter.Equals('foobar')), )) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234'}) q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Or( @@ -411,7 +411,7 @@ class TestParseFilter(unittest.TestCase): ast.filter.Any(ns.bse.filesize, ast.filter.Equals(4321)), ast.filter.Any(ns.bse.comment, ast.filter.Equals('foobar')), )) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#4321', 'http://example.com/image#4321'}) q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Or( @@ -419,7 +419,7 @@ class TestParseFilter(unittest.TestCase): ast.filter.Any(ns.bse.filesize, ast.filter.Equals(8765)), ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')), )) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234', 'http://example.com/image#1234'}) # Or can be nested q = self.parser(self.schema.node(ns.bsfs.Entity), @@ -430,7 +430,7 @@ class TestParseFilter(unittest.TestCase): ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')), ), )) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234', 'http://example.com/image#1234', 'http://example.com/entity#4321', 'http://example.com/image#4321'}) @@ -451,14 +451,14 @@ class TestParseFilter(unittest.TestCase): # _any returns a valid query q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.filesize, ast.filter.Equals(1234))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234', 'http://example.com/image#1234'}) # _any can be nested q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.tag, ast.filter.Any(ns.bse.representative, ast.filter.Is('http://example.com/image#1234')))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234', 'http://example.com/image#1234'}) @@ -468,12 +468,12 @@ class TestParseFilter(unittest.TestCase): # All Nodes q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.All(ns.bse.tag, ast.filter.Is('http://example.com/tag#1234'))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234', 'http://example.com/image#1234'}) # All values q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.All(ns.bse.comment, ast.filter.Equals('hello world'))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#4321'}) # All on value within Or branch # entity#1234 is selected because all of its comments are in ("hello world", "Me, Myself, and I") @@ -481,12 +481,12 @@ class TestParseFilter(unittest.TestCase): ast.filter.All(ns.bse.comment, ast.filter.Or( ast.filter.Equals('hello world'), ast.filter.Equals('Me, Myself, and I')))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234', 'http://example.com/entity#4321', 'http://example.com/image#1234'}) # All requires at least one predicate/value q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.All(ns.bse.comment, ast.filter.Equals('Me, Myself, and I'))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/image#1234'}) # All within a statement q = self.parser(self.schema.node(ns.bsfs.Entity), @@ -498,18 +498,18 @@ class TestParseFilter(unittest.TestCase): )) ) ) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234', 'http://example.com/image#1234'}) # All with reversed Predicate q = self.parser(self.schema.node(ns.bsfs.Tag), ast.filter.All(ast.filter.Predicate(ns.bse.tag, reverse=True), ast.filter.Is('http://example.com/entity#4321'))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/tag#4321'}) # All with multiple predicates q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.All(ast.filter.OneOf(ns.bse.tag, ns.bse.buddy), # entity#1234 (tag:tag#1234), entity#1234 (buddy:image#1234), image#1234(tag:tag#1234) ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')))) # entity#1234, image#1234, tag#1234 - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234', 'http://example.com/image#1234'}) @@ -518,22 +518,22 @@ class TestParseFilter(unittest.TestCase): # Not applies on conditions q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Not(ast.filter.Is('http://example.com/entity#1234'))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/image#1234', 'http://example.com/entity#4321', 'http://example.com/image#4321'}) # Not applies on conditions within branches q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.Not(ast.filter.Equals('Me, Myself, and I')))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234', 'http://example.com/entity#4321'}) # Not applies on branches q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Not(ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#4321', 'http://example.com/image#4321'}) # Double Not cancel each other q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Not(ast.filter.Not(ast.filter.Is('http://example.com/entity#1234')))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234'}) # Not works within aggregation (and) q = self.parser(self.schema.node(ns.bsfs.Entity), @@ -541,7 +541,7 @@ class TestParseFilter(unittest.TestCase): ast.filter.Not(ast.filter.Is('http://example.com/entity#1234')), ast.filter.Any(ns.bse.comment, ast.filter.Equals('hello world')), )) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#4321'}) # Not works within aggregation (or) q = self.parser(self.schema.node(ns.bsfs.Entity), @@ -549,7 +549,7 @@ class TestParseFilter(unittest.TestCase): ast.filter.Not(ast.filter.Is('http://example.com/entity#1234')), ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')), )) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234', 'http://example.com/entity#4321', 'http://example.com/image#1234', 'http://example.com/image#4321'}) # Not works outside aggregation (and) q = self.parser(self.schema.node(ns.bsfs.Entity), @@ -558,7 +558,7 @@ class TestParseFilter(unittest.TestCase): ast.filter.Is('http://example.com/entity#1234'), ast.filter.Any(ns.bse.comment, ast.filter.Equals('hello world')), ))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#4321', 'http://example.com/image#1234', 'http://example.com/image#4321'}) # Not works outside aggregation (or) q = self.parser(self.schema.node(ns.bsfs.Entity), @@ -567,7 +567,7 @@ class TestParseFilter(unittest.TestCase): ast.filter.Is('http://example.com/entity#4321'), ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')), ))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/image#4321'}) # Not mixed with branch, aggregation, id, and value q = self.parser(self.schema.node(ns.bsfs.Entity), @@ -580,7 +580,7 @@ class TestParseFilter(unittest.TestCase): ), ast.filter.Any(ns.bse.comment, ast.filter.Not(ast.filter.Equals('foobar'))), # entity#1234, entity#4321, image#1234 )) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/image#1234'}) @@ -590,21 +590,21 @@ class TestParseFilter(unittest.TestCase): # Has with GreaterThan constraint q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Has(ns.bse.comment, ast.filter.GreaterThan(0))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234', 'http://example.com/entity#4321', 'http://example.com/image#1234'}) q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Has(ns.bse.comment, ast.filter.GreaterThan(1))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234'}) # Has with Equals constraint q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Has(ns.bse.comment, 1)) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#4321', 'http://example.com/image#1234'}) # Has with LessThan constraint q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Has(ns.bse.comment, ast.filter.LessThan(2))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#4321', 'http://example.com/image#1234', 'http://example.com/image#4321'}) # Has with multiple constraints self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.bse.comment), rdflib.Literal('extra1', datatype=rdflib.XSD.string))) @@ -616,17 +616,17 @@ class TestParseFilter(unittest.TestCase): self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef(ns.bse.comment), rdflib.Literal('extra2', datatype=rdflib.XSD.string))) q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Has(ns.bse.comment, ast.filter.And(ast.filter.GreaterThan(1), ast.filter.LessThan(5)))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#4321'}) # Has with OneOf predicate q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Has(ast.filter.OneOf(ns.bse.tag, ns.bse.buddy), ast.filter.GreaterThan(1))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234', 'http://example.com/entity#4321'}) # Has with reversed predicate q = self.parser(self.schema.node(ns.bsfs.Tag), ast.filter.Has(ast.filter.Predicate(ns.bse.tag, reverse=True), ast.filter.GreaterThan(1))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/tag#1234'}) @@ -643,23 +643,23 @@ class TestParseFilter(unittest.TestCase): self.assertRaises(errors.ConsistencyError, self.parser._distance, self.schema.literal(ns.bsfs.Colors), ast.filter.Distance([1,2,3,4,5], 1), '') # _distance respects threshold q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.colors, ast.filter.Distance([2,4,3,1], 4))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234', 'http://example.com/entity#4321', 'http://example.com/image#1234'}) q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.colors, ast.filter.Distance([2,4,3,1], 3))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#4321', 'http://example.com/image#1234'}) q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.colors, ast.filter.Distance([2,4,3,1], 2))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/image#1234'}) # result set can be empty q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.colors, ast.filter.Distance([2,4,3,1], 1))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set()) + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, set()) # _distance respects strict q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.colors, ast.filter.Distance([1,2,3,4], 0, False))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234'}) q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.colors, ast.filter.Distance([1,2,3,4], 0, True))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set()) + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, set()) def test_one_of(self): # _one_of expects a node @@ -725,7 +725,7 @@ class TestParseFilter(unittest.TestCase): ast.filter.Any(ast.filter.OneOf(ns.bse.tag, ns.bse.buddy), ast.filter.Any(ast.filter.OneOf(ns.bse.comment), ast.filter.Equals('Me, Myself, and I')))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234', 'http://example.com/image#1234'}) @@ -757,13 +757,13 @@ class TestParseFilter(unittest.TestCase): ast.filter.Any(ns.bse.representative, ast.filter.Any(ns.bse.filesize, ast.filter.Equals(1234))))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/entity#1234', 'http://example.com/image#1234'}) q = self.parser(self.schema.node(ns.bsfs.Tag), ast.filter.Any(ast.filter.Predicate(ns.bse.tag, reverse=True), ast.filter.Any(ns.bse.filesize, ast.filter.LessThan(2000)))) - self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + self.assertSetEqual({str(guid) for guid, in q(self.graph)}, {'http://example.com/tag#1234'}) diff --git a/test/triple_store/sparql/test_sparql.py b/test/triple_store/sparql/test_sparql.py index 7fbfb65..c58fae3 100644 --- a/test/triple_store/sparql/test_sparql.py +++ b/test/triple_store/sparql/test_sparql.py @@ -556,6 +556,76 @@ class TestSparqlStore(unittest.TestCase): self.assertSetEqual(set(q), tag_ids) + def test_fetch(self): + # store setup + store = SparqlStore.Open() + store.schema = self.schema + # add instances + ent_type = self.schema.node(ns.bsfs.Entity) + tag_type = self.schema.node(ns.bsfs.Tag) + ent_ids = {URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')} + tag_ids = {URI('http://example.com/me/tag#1234'), URI('http://example.com/me/tag#4321')} + store.create(ent_type, ent_ids) + store.create(tag_type, tag_ids) + store.set(ent_type, ent_ids, self.schema.predicate(ns.bse.tag), tag_ids) + store.set(ent_type, {URI('http://example.com/me/entity#1234')}, self.schema.predicate(ns.bse.filesize), {1234}) + store.set(ent_type, {URI('http://example.com/me/entity#4321')}, self.schema.predicate(ns.bse.filesize), {4321}) + store.set(ent_type, {URI('http://example.com/me/entity#1234')}, self.schema.predicate(ns.bse.comment), {'hello world'}) + # node_type must be a node from the schema + self.assertRaises(errors.ConsistencyError, list, store.fetch(self.schema.literal(ns.bsfs.Literal), + ast.filter.FilterExpression(), ast.fetch.FetchExpression())) + self.assertRaises(errors.ConsistencyError, list, store.fetch(self.schema.node(ns.bsfs.Node).child(ns.bsfs.Invalid), + ast.filter.FilterExpression(), ast.fetch.FetchExpression())) + # requires a filter and a fetch query + self.assertRaises(TypeError, list, store.fetch(self.schema.node(ns.bsfs.Entity), None, ast.fetch.FetchExpression())) + self.assertRaises(TypeError, list, store.fetch(self.schema.node(ns.bsfs.Entity), 1234, ast.fetch.FetchExpression())) + self.assertRaises(TypeError, list, store.fetch(self.schema.node(ns.bsfs.Entity), 'hello', ast.fetch.FetchExpression())) + self.assertRaises(TypeError, list, store.fetch(self.schema.node(ns.bsfs.Entity), ast.filter.FilterExpression(), None)) + self.assertRaises(TypeError, list, store.fetch(self.schema.node(ns.bsfs.Entity), ast.filter.FilterExpression(), 1234)) + self.assertRaises(TypeError, list, store.fetch(self.schema.node(ns.bsfs.Entity), ast.filter.FilterExpression(), 'hello')) + # fetch emits triples + self.assertSetEqual(set(store.fetch(self.schema.node(ns.bsfs.Entity), + ast.filter.Is('http://example.com/me/entity#1234'), + ast.fetch.Value(ns.bse.filesize, 'filesize'), + )), { + (URI('http://example.com/me/entity#1234'), 'filesize', 1234), + }) + # fetch respects filter query + self.assertSetEqual(set(store.fetch(self.schema.node(ns.bsfs.Entity), + ast.filter.IsIn('http://example.com/me/entity#1234', 'http://example.com/me/entity#4321'), + ast.fetch.Value(ns.bse.filesize, 'filesize'), + )), { + (URI('http://example.com/me/entity#1234'), 'filesize', 1234), + (URI('http://example.com/me/entity#4321'), 'filesize', 4321), + }) + # fetch ignores missing data + self.assertSetEqual(set(store.fetch(self.schema.node(ns.bsfs.Entity), + ast.filter.IsIn('http://example.com/me/entity#1234', 'http://example.com/me/entity#4321'), + ast.fetch.Value(ns.bse.comment, 'comment'), + )), { + (URI('http://example.com/me/entity#1234'), 'comment', 'hello world'), + }) + # fetch emits all triples + self.assertSetEqual(set(store.fetch(self.schema.node(ns.bsfs.Entity), + ast.filter.Is('http://example.com/me/entity#1234'), + ast.fetch.All( + ast.fetch.Value(ns.bse.filesize, 'filesize'), + ast.fetch.Node(ns.bse.tag, 'tag'), + ) + )), { + (URI('http://example.com/me/entity#1234'), 'filesize', 1234), + (URI('http://example.com/me/entity#1234'), 'tag', URI('http://example.com/me/tag#1234')), + (URI('http://example.com/me/entity#1234'), 'tag', URI('http://example.com/me/tag#4321')), + }) + # triples do not repeat + triples = list(store.fetch(self.schema.node(ns.bsfs.Entity), ast.filter.Is('http://example.com/me/entity#1234'), + ast.fetch.All( + ast.fetch.Value(ns.bse.filesize, 'filesize'), + ast.fetch.Node(ns.bse.tag, 'tag'), + ) + )) + self.assertEqual(len(triples), 3) + def test_exists(self): # store setup store = SparqlStore.Open() diff --git a/test/triple_store/sparql/test_utils.py b/test/triple_store/sparql/test_utils.py new file mode 100644 index 0000000..073b8f8 --- /dev/null +++ b/test/triple_store/sparql/test_utils.py @@ -0,0 +1,155 @@ +""" + +Part of the bsfs test suite. +A copy of the license is provided with the project. +Author: Matthias Baumgartner, 2022 +""" +# standard imports +import operator +import re +import unittest + +# external imports +import rdflib + +# bsie imports +from bsfs.namespace import ns + +# objects to test +from bsfs.triple_store.sparql.utils import GenHopName, Query + + +## code ## + +class TestGenHopName(unittest.TestCase): + def test_next(self): + # baseline + self.assertEqual(next(GenHopName(prefix='?foo', start=123)), '?foo123') + # respects prefix + self.assertEqual(next(GenHopName(prefix='?bar', start=123)), '?bar123') + # respects start + self.assertEqual(next(GenHopName(prefix='?foo', start=321)), '?foo321') + # counts up + cnt = GenHopName(prefix='?foo', start=998) + self.assertEqual(next(cnt), '?foo998') + self.assertEqual(next(cnt), '?foo999') + self.assertEqual(next(cnt), '?foo1000') + self.assertEqual(next(cnt), '?foo1001') + + def test_essentials(self): + # can get the prefix + self.assertEqual(GenHopName(prefix='?foo', start=123).prefix, '?foo') + # can get the counter + self.assertEqual(GenHopName(prefix='?foo', start=123).curr, 122) + + +class TestQuery(unittest.TestCase): + def setUp(self): + self.root_type = 'http://bsfs.ai/schema/Entity' + self.root_head = '?root' + self.select = (('?head', 'name'), ) + self.where = f'?root <{ns.bse.tag}> ?head' + + def test_essentials(self): + # can access members + q = Query(self.root_type, self.root_head, self.select, self.where) + self.assertEqual(q.root_type, self.root_type) + self.assertEqual(q.root_head, self.root_head) + self.assertEqual(q.select, self.select) + self.assertEqual(q.where, self.where) + # comparison + self.assertEqual(q, Query(self.root_type, self.root_head, self.select, self.where)) + self.assertEqual(hash(q), hash(Query(self.root_type, self.root_head, self.select, self.where))) + # comparison respects root_type + self.assertNotEqual(q, Query('http://bsfs.ai/schema/Tag', self.root_head, self.select, self.where)) + self.assertNotEqual(hash(q), hash(Query('http://bsfs.ai/schema/Tag', self.root_head, self.select, self.where))) + # comparison respects root_head + self.assertNotEqual(q, Query(self.root_type, '?foo', self.select, self.where)) + self.assertNotEqual(hash(q), hash(Query(self.root_type, '?foo', self.select, self.where))) + # comparison respects select + self.assertNotEqual(q, Query(self.root_type, self.root_head, (('?head', 'foo'), ), self.where)) + self.assertNotEqual(hash(q), hash(Query(self.root_type, self.root_head, (('?head', 'foo'), ), self.where))) + # comparison respects where + self.assertNotEqual(q, Query(self.root_type, self.root_head, self.select, '?root bse:filename ?head')) + self.assertNotEqual(hash(q), hash(Query(self.root_type, self.root_head, self.select, '?root bse:filename ?head'))) + # string conversion + self.assertEqual(str(q), q.query) + self.assertEqual(repr(q), "Query(http://bsfs.ai/schema/Entity, ?root, (('?head', 'name'),), ?root ?head)") + + def test_add(self): + q = Query(self.root_type, self.root_head, self.select, self.where) + # can only add a query + self.assertRaises(TypeError, operator.add, q, 1234) + self.assertRaises(TypeError, operator.add, q, 'foobar') + # root type and head must match + self.assertRaises(ValueError, operator.add, q, Query('http://bsfs.ai/schema/Tag', self.root_head)) + self.assertRaises(ValueError, operator.add, q, Query(self.root_type, '?foobar')) + # select and were are combined + combo = q + Query(self.root_type, self.root_head, (('?foo', 'bar'), ), f'?root <{ns.bse.filename}> ?foo') + self.assertEqual(combo.select, (('?head', 'name'), ('?foo', 'bar'))) + self.assertEqual(combo.where, f'?root <{ns.bse.tag}> ?head . ?root <{ns.bse.filename}> ?foo') + # select can be empty + combo = q + Query(self.root_type, self.root_head, None, f'?root <{ns.bse.filename}> ?foo') + self.assertEqual(combo.select, (('?head', 'name'), )) + combo = Query(self.root_type, self.root_head, None, f'?root <{ns.bse.filename}> ?foo') + q + self.assertEqual(combo.select, (('?head', 'name'), )) + combo = Query(self.root_type, self.root_head, None, self.where) + Query(self.root_type, self.root_head, None, f'?root <{ns.bse.filename}> ?foo') + self.assertEqual(combo.select, tuple()) + # where can be empty + combo = q + Query(self.root_type, self.root_head, (('?foo', 'bar'), )) + self.assertEqual(combo.where, self.where) + combo = Query(self.root_type, self.root_head, (('?foo', 'bar'), )) + q + self.assertEqual(combo.where, self.where) + combo = Query(self.root_type, self.root_head, self.select) + Query(self.root_type, self.root_head, (('?foo', 'bar'), )) + self.assertEqual(combo.where, '') + + def test_names(self): + self.assertEqual(Query(self.root_type, self.root_head, (('?head', 'name'), ), self.where).names, + ('name', )) + self.assertEqual(Query(self.root_type, self.root_head, (('?head', 'name'), ('?foo', 'bar')), self.where).names, + ('name', 'bar')) + + def test_query(self): + def normalize(value): + value = value.strip() + value = value.lower() + value = value.replace(r'\n', ' ') + value, _ = re.subn('\s\s+', ' ', value) + return value + # query composes a valid query + q = Query(self.root_type, self.root_head, self.select, self.where) + self.assertEqual(normalize(q.query), normalize(f'select ?root (?head as ?name) where {{ ?root <{ns.rdf.type}>/<{ns.rdfs.subClassOf}>* . ?root <{ns.bse.tag}> ?head }}')) + # select and where are optional + q = Query(self.root_type, self.root_head) + self.assertEqual(normalize(q.query), normalize(f'select ?root where {{ ?root <{ns.rdf.type}>/<{ns.rdfs.subClassOf}>* . }}')) + # select and where need not to correspond + q = Query(self.root_type, self.root_head, (('?head', 'name'), )) + self.assertEqual(normalize(q.query), normalize(f'select ?root (?head as ?name) where {{ ?root <{ns.rdf.type}>/<{ns.rdfs.subClassOf}>* . }}')) + # query is used for string representation + self.assertEqual(str(q), q.query) + + def test_call(self): + graph = rdflib.Graph() + # schema + graph.add((rdflib.URIRef('http://bsfs.ai/schema/Document'), rdflib.URIRef(ns.rdfs.subClassOf), rdflib.URIRef('http://bsfs.ai/schema/Entity'))) + # nodes + graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.rdf.type), rdflib.URIRef('http://bsfs.ai/schema/Entity'))) + graph.add((rdflib.URIRef('http://example.com/doc#1234'), rdflib.URIRef(ns.rdf.type), rdflib.URIRef('http://bsfs.ai/schema/Document'))) + # links + graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.bse.tag), rdflib.Literal('tag#1234', datatype=rdflib.XSD.string))) + graph.add((rdflib.URIRef('http://example.com/doc#1234'), rdflib.URIRef(ns.bse.tag), rdflib.Literal('tag#1234', datatype=rdflib.XSD.string))) + # run query on a given graph + query = Query(self.root_type, self.root_head, self.select, self.where) + self.assertSetEqual(set(query(graph)), { + (rdflib.URIRef('http://example.com/entity#1234'), rdflib.Literal('tag#1234', datatype=rdflib.XSD.string)), + (rdflib.URIRef('http://example.com/doc#1234'), rdflib.Literal('tag#1234', datatype=rdflib.XSD.string)), + }) + # query actually considers the passed graph + self.assertSetEqual(set(query(rdflib.Graph())), set()) + +## main ## + +if __name__ == '__main__': + unittest.main() + +## EOF ## diff --git a/test/triple_store/test_base.py b/test/triple_store/test_base.py index a0c3260..56a2539 100644 --- a/test/triple_store/test_base.py +++ b/test/triple_store/test_base.py @@ -38,6 +38,9 @@ class DummyBase(TripleStoreBase): def get(self, node_type, query): pass + def fetch(self, node_type, filter, fetch): + pass + def exists(self, node_type, guids): pass -- cgit v1.2.3 From cb819b8c268908b5f6cc680173db86e172847c46 Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Wed, 8 Feb 2023 20:15:41 +0100 Subject: binary blob in schema and sparql triple store --- test/triple_store/sparql/test_sparql.py | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) (limited to 'test/triple_store') diff --git a/test/triple_store/sparql/test_sparql.py b/test/triple_store/sparql/test_sparql.py index c58fae3..30876f2 100644 --- a/test/triple_store/sparql/test_sparql.py +++ b/test/triple_store/sparql/test_sparql.py @@ -34,6 +34,7 @@ class TestSparqlStore(unittest.TestCase): bsfs:User rdfs:subClassOf bsfs:Node . xsd:string rdfs:subClassOf bsfs:Literal . bsfs:Number rdfs:subClassOf bsfs:Literal . + bsfs:BinaryBlob rdfs:subClassOf bsfs:Literal . xsd:integer rdfs:subClassOf bsfs:Number . # non-unique literal @@ -60,6 +61,11 @@ class TestSparqlStore(unittest.TestCase): rdfs:range bsfs:User ; bsfs:unique "true"^^xsd:boolean . + # binary range + bse:asset rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range bsfs:BinaryBlob . + ''') self.schema_triples = { # schema hierarchy @@ -68,6 +74,7 @@ class TestSparqlStore(unittest.TestCase): (rdflib.URIRef(ns.bsfs.User), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), (rdflib.URIRef(ns.xsd.string), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), (rdflib.URIRef(ns.bsfs.Array), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), + (rdflib.URIRef(ns.bsfs.BinaryBlob), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), (rdflib.URIRef(ns.bsfs.Feature), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Array)), (rdflib.URIRef(ns.bsfs.Number), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), (rdflib.URIRef(ns.bsfs.Time), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), @@ -76,6 +83,7 @@ class TestSparqlStore(unittest.TestCase): (rdflib.URIRef(ns.bse.filesize), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), (rdflib.URIRef(ns.bse.tag), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), (rdflib.URIRef(ns.bse.author), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef(ns.bse.asset), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), } def test_essentials(self): @@ -358,6 +366,7 @@ class TestSparqlStore(unittest.TestCase): (rdflib.URIRef(ns.bsfs.User), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), (rdflib.URIRef(ns.xsd.boolean), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), (rdflib.URIRef(ns.bsfs.Array), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), + (rdflib.URIRef(ns.bsfs.BinaryBlob), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), (rdflib.URIRef(ns.bsfs.Feature), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Array)), (rdflib.URIRef(ns.bsfs.Number), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), (rdflib.URIRef(ns.bsfs.Time), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), @@ -931,6 +940,23 @@ class TestSparqlStore(unittest.TestCase): # inexistent guids self.assertRaises(errors.InstanceError, store.set, ent_type, {URI('http://example.com/me/entity#foobar')}, p_comment, {'xyz'}) + # BinaryBlob values are base64 encoded + p_asset = store.schema.predicate(ns.bse.asset) + store.set(ent_type, ent_ids, p_asset, {bytes(range(128)), bytes(range(128, 256))}) + blob1 = rdflib.Literal('AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn8=', + datatype=rdflib.URIRef(ns.bsfs.BinaryBlob)) + blob2 = rdflib.Literal('gIGCg4SFhoeIiYqLjI2Oj5CRkpOUlZaXmJmam5ydnp+goaKjpKWmp6ipqqusra6vsLGys7S1tre4ubq7vL2+v8DBwsPExcbHyMnKy8zNzs/Q0dLT1NXW19jZ2tvc3d7f4OHi4+Tl5ufo6err7O3u7/Dx8vP09fb3+Pn6+/z9/v8=', + datatype=rdflib.URIRef(ns.bsfs.BinaryBlob)) + self.assertTrue(set(store._graph).issuperset({ + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_asset.uri), blob1), + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_asset.uri), blob2), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_asset.uri), blob1), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_asset.uri), blob2), + })) + # lit.value returns the original bytes value + self.assertSetEqual({lit.value for lit in store._graph.objects(None, rdflib.URIRef(p_asset.uri))}, + {bytes(range(128)), bytes(range(128, 256))}) + ## main ## -- cgit v1.2.3 From cd27775b406482b11f44575ab196501a30d9b075 Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Thu, 2 Mar 2023 12:23:49 +0100 Subject: default sort order in sparql backend --- test/triple_store/sparql/test_utils.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'test/triple_store') diff --git a/test/triple_store/sparql/test_utils.py b/test/triple_store/sparql/test_utils.py index 073b8f8..edcf6d7 100644 --- a/test/triple_store/sparql/test_utils.py +++ b/test/triple_store/sparql/test_utils.py @@ -118,13 +118,13 @@ class TestQuery(unittest.TestCase): return value # query composes a valid query q = Query(self.root_type, self.root_head, self.select, self.where) - self.assertEqual(normalize(q.query), normalize(f'select ?root (?head as ?name) where {{ ?root <{ns.rdf.type}>/<{ns.rdfs.subClassOf}>* . ?root <{ns.bse.tag}> ?head }}')) + self.assertEqual(normalize(q.query), normalize(f'select distinct ?root (?head as ?name) where {{ ?root <{ns.rdf.type}>/<{ns.rdfs.subClassOf}>* . ?root <{ns.bse.tag}> ?head }} order by str(?root)')) # select and where are optional q = Query(self.root_type, self.root_head) - self.assertEqual(normalize(q.query), normalize(f'select ?root where {{ ?root <{ns.rdf.type}>/<{ns.rdfs.subClassOf}>* . }}')) + self.assertEqual(normalize(q.query), normalize(f'select distinct ?root where {{ ?root <{ns.rdf.type}>/<{ns.rdfs.subClassOf}>* . }} order by str(?root)')) # select and where need not to correspond q = Query(self.root_type, self.root_head, (('?head', 'name'), )) - self.assertEqual(normalize(q.query), normalize(f'select ?root (?head as ?name) where {{ ?root <{ns.rdf.type}>/<{ns.rdfs.subClassOf}>* . }}')) + self.assertEqual(normalize(q.query), normalize(f'select distinct ?root (?head as ?name) where {{ ?root <{ns.rdf.type}>/<{ns.rdfs.subClassOf}>* . }} order by str(?root)')) # query is used for string representation self.assertEqual(str(q), q.query) -- cgit v1.2.3 From 2e07f33314c238e42bfadc5f39805f93ffbc622e Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Thu, 2 Mar 2023 15:10:05 +0100 Subject: removed author and license notices from individual files --- test/triple_store/sparql/test_distance.py | 5 ----- test/triple_store/sparql/test_parse_fetch.py | 5 ----- test/triple_store/sparql/test_parse_filter.py | 5 ----- test/triple_store/sparql/test_sparql.py | 5 ----- test/triple_store/sparql/test_utils.py | 5 ----- test/triple_store/test_base.py | 5 ----- 6 files changed, 30 deletions(-) (limited to 'test/triple_store') diff --git a/test/triple_store/sparql/test_distance.py b/test/triple_store/sparql/test_distance.py index 0659459..e95be5a 100644 --- a/test/triple_store/sparql/test_distance.py +++ b/test/triple_store/sparql/test_distance.py @@ -1,9 +1,4 @@ -""" -Part of the bsfs test suite. -A copy of the license is provided with the project. -Author: Matthias Baumgartner, 2022 -""" # imports import numpy as np import unittest diff --git a/test/triple_store/sparql/test_parse_fetch.py b/test/triple_store/sparql/test_parse_fetch.py index 0961789..9284608 100644 --- a/test/triple_store/sparql/test_parse_fetch.py +++ b/test/triple_store/sparql/test_parse_fetch.py @@ -1,9 +1,4 @@ -""" -Part of the bsfs test suite. -A copy of the license is provided with the project. -Author: Matthias Baumgartner, 2022 -""" # imports import rdflib import unittest diff --git a/test/triple_store/sparql/test_parse_filter.py b/test/triple_store/sparql/test_parse_filter.py index 6fa0cd3..8a9940e 100644 --- a/test/triple_store/sparql/test_parse_filter.py +++ b/test/triple_store/sparql/test_parse_filter.py @@ -1,9 +1,4 @@ -""" -Part of the bsfs test suite. -A copy of the license is provided with the project. -Author: Matthias Baumgartner, 2022 -""" # imports import rdflib import unittest diff --git a/test/triple_store/sparql/test_sparql.py b/test/triple_store/sparql/test_sparql.py index 30876f2..b1d99ac 100644 --- a/test/triple_store/sparql/test_sparql.py +++ b/test/triple_store/sparql/test_sparql.py @@ -1,9 +1,4 @@ -""" -Part of the bsfs test suite. -A copy of the license is provided with the project. -Author: Matthias Baumgartner, 2022 -""" # imports import rdflib import unittest diff --git a/test/triple_store/sparql/test_utils.py b/test/triple_store/sparql/test_utils.py index edcf6d7..8f894bb 100644 --- a/test/triple_store/sparql/test_utils.py +++ b/test/triple_store/sparql/test_utils.py @@ -1,9 +1,4 @@ -""" -Part of the bsfs test suite. -A copy of the license is provided with the project. -Author: Matthias Baumgartner, 2022 -""" # standard imports import operator import re diff --git a/test/triple_store/test_base.py b/test/triple_store/test_base.py index 56a2539..4c4a9b6 100644 --- a/test/triple_store/test_base.py +++ b/test/triple_store/test_base.py @@ -1,9 +1,4 @@ -""" -Part of the bsfs test suite. -A copy of the license is provided with the project. -Author: Matthias Baumgartner, 2022 -""" # imports import unittest -- cgit v1.2.3 From b66ed641d5cbb4cb83f4a571223e4d65d80ed05c Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Thu, 2 Mar 2023 15:29:12 +0100 Subject: check non-serializable URIs in the sparql store --- test/triple_store/sparql/test_parse_filter.py | 2 ++ test/triple_store/sparql/test_sparql.py | 6 ++++++ 2 files changed, 8 insertions(+) (limited to 'test/triple_store') diff --git a/test/triple_store/sparql/test_parse_filter.py b/test/triple_store/sparql/test_parse_filter.py index 8a9940e..6db9224 100644 --- a/test/triple_store/sparql/test_parse_filter.py +++ b/test/triple_store/sparql/test_parse_filter.py @@ -157,6 +157,8 @@ class TestParseFilter(unittest.TestCase): def test_is(self): # _is requires a node self.assertRaises(errors.BackendError, self.parser._is, self.schema.literal(ns.bsfs.Literal), ast.filter.Is('http://example.com/entity#1234'), '?ent') + # _is requires a serializable guid + self.assertRaises(errors.BackendError, self.parser._is, self.schema.node(ns.bsfs.Entity), ast.filter.Is('http://example.com/entity#foo and bar'), '?ent') # a single Is statement q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Is('http://example.com/entity#1234')) self.assertSetEqual({str(guid) for guid, in q(self.graph)}, diff --git a/test/triple_store/sparql/test_sparql.py b/test/triple_store/sparql/test_sparql.py index b1d99ac..d880082 100644 --- a/test/triple_store/sparql/test_sparql.py +++ b/test/triple_store/sparql/test_sparql.py @@ -678,6 +678,9 @@ class TestSparqlStore(unittest.TestCase): self.assertRaises(errors.ConsistencyError, store.create, self.schema.node(ns.bsfs.Entity).child(ns.bsfs.invalid), { URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')}) + # guid must be valid + self.assertRaises(ValueError, store.create, self.schema.node(ns.bsfs.Entity), {URI('http://example.com/me/foo and bar')}) + # can create some nodes ent_type = store.schema.node(ns.bsfs.Entity) store.create(ent_type, {URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')}) @@ -766,6 +769,9 @@ class TestSparqlStore(unittest.TestCase): # invalid predicate is not permitted self.assertRaises(errors.ConsistencyError, store.set, ent_type, ent_ids, p_invalid, {'http://example.com/me/tag#1234'}) + # invalid guid is not permitted + self.assertRaises(ValueError, store.set, ent_type, {URI('http://example.com/me/foo and bar')}, p_filesize, {1234}) + # predicate must match node_type self.assertRaises(errors.ConsistencyError, store.set, tag_type, tag_ids, p_filesize, {1234}) -- cgit v1.2.3 From 28a021483c13e974e00b6159f0653b0727df9d10 Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Thu, 2 Mar 2023 16:40:00 +0100 Subject: prohibit certain characters in URI and ensure URIs in bsfs.graph --- test/triple_store/sparql/test_parse_filter.py | 2 +- test/triple_store/sparql/test_sparql.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) (limited to 'test/triple_store') diff --git a/test/triple_store/sparql/test_parse_filter.py b/test/triple_store/sparql/test_parse_filter.py index 6db9224..5b6ca8a 100644 --- a/test/triple_store/sparql/test_parse_filter.py +++ b/test/triple_store/sparql/test_parse_filter.py @@ -158,7 +158,7 @@ class TestParseFilter(unittest.TestCase): # _is requires a node self.assertRaises(errors.BackendError, self.parser._is, self.schema.literal(ns.bsfs.Literal), ast.filter.Is('http://example.com/entity#1234'), '?ent') # _is requires a serializable guid - self.assertRaises(errors.BackendError, self.parser._is, self.schema.node(ns.bsfs.Entity), ast.filter.Is('http://example.com/entity#foo and bar'), '?ent') + self.assertRaises(ValueError, self.parser._is, self.schema.node(ns.bsfs.Entity), ast.filter.Is('http://example.com/entity#foo and bar'), '?ent') # a single Is statement q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Is('http://example.com/entity#1234')) self.assertSetEqual({str(guid) for guid, in q(self.graph)}, diff --git a/test/triple_store/sparql/test_sparql.py b/test/triple_store/sparql/test_sparql.py index d880082..f45ca37 100644 --- a/test/triple_store/sparql/test_sparql.py +++ b/test/triple_store/sparql/test_sparql.py @@ -679,7 +679,7 @@ class TestSparqlStore(unittest.TestCase): URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')}) # guid must be valid - self.assertRaises(ValueError, store.create, self.schema.node(ns.bsfs.Entity), {URI('http://example.com/me/foo and bar')}) + self.assertRaises(ValueError, store.create, self.schema.node(ns.bsfs.Entity), {'http://example.com/me/foo and bar'}) # can create some nodes ent_type = store.schema.node(ns.bsfs.Entity) @@ -770,7 +770,7 @@ class TestSparqlStore(unittest.TestCase): self.assertRaises(errors.ConsistencyError, store.set, ent_type, ent_ids, p_invalid, {'http://example.com/me/tag#1234'}) # invalid guid is not permitted - self.assertRaises(ValueError, store.set, ent_type, {URI('http://example.com/me/foo and bar')}, p_filesize, {1234}) + self.assertRaises(ValueError, store.set, ent_type, {'http://example.com/me/foo and bar'}, p_filesize, {1234}) # predicate must match node_type self.assertRaises(errors.ConsistencyError, store.set, tag_type, tag_ids, p_filesize, {1234}) -- cgit v1.2.3 From 4fead04055be4967d9ea3b24ff61fe37a93108dd Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Sat, 4 Mar 2023 13:31:11 +0100 Subject: namespace refactoring and cleanup --- test/triple_store/sparql/test_parse_fetch.py | 95 ++++++++++++----------- test/triple_store/sparql/test_parse_filter.py | 39 +++++----- test/triple_store/sparql/test_sparql.py | 104 ++++++++++++++------------ test/triple_store/sparql/test_utils.py | 24 +++--- 4 files changed, 137 insertions(+), 125 deletions(-) (limited to 'test/triple_store') diff --git a/test/triple_store/sparql/test_parse_fetch.py b/test/triple_store/sparql/test_parse_fetch.py index 9284608..1d793e7 100644 --- a/test/triple_store/sparql/test_parse_fetch.py +++ b/test/triple_store/sparql/test_parse_fetch.py @@ -15,10 +15,9 @@ from bsfs.triple_store.sparql.parse_fetch import Fetch ## code ## -bsfs = Namespace('http://bsfs.ai/schema', fsep='/') -bse = Namespace('http://bsfs.ai/schema/Entity') -bst = Namespace('http://bsfs.ai/schema/Tag') -bsc = Namespace('http://bsfs.ai/schema/Collection') +ns.bse = ns.bsfs.Entity() +ns.bst = ns.bsfs.Tag() +ns.bsc = ns.bsfs.Collection() class TestParseFetch(unittest.TestCase): @@ -27,10 +26,10 @@ class TestParseFetch(unittest.TestCase): prefix rdfs: prefix xsd: - prefix bsfs: - prefix bse: - prefix bst: - prefix bsc: + prefix bsfs: + prefix bse: + prefix bst: + prefix bsc: # nodes bsfs:Entity rdfs:subClassOf bsfs:Node . @@ -83,43 +82,43 @@ class TestParseFetch(unittest.TestCase): # graph to test queries self.graph = rdflib.Graph() # schema hierarchies - self.graph.add((rdflib.URIRef('http://bsfs.ai/schema/Entity'), rdflib.RDFS.subClassOf, rdflib.URIRef('http://bsfs.ai/schema/Node'))) - self.graph.add((rdflib.URIRef('http://bsfs.ai/schema/Collection'), rdflib.RDFS.subClassOf, rdflib.URIRef('http://bsfs.ai/schema/Node'))) - self.graph.add((rdflib.URIRef('http://bsfs.ai/schema/Tag'), rdflib.RDFS.subClassOf, rdflib.URIRef('http://bsfs.ai/schema/Node'))) + self.graph.add((rdflib.URIRef('https://schema.bsfs.io/core/Entity'), rdflib.RDFS.subClassOf, rdflib.URIRef('https://schema.bsfs.io/core/Node'))) + self.graph.add((rdflib.URIRef('https://schema.bsfs.io/core/Collection'), rdflib.RDFS.subClassOf, rdflib.URIRef('https://schema.bsfs.io/core/Node'))) + self.graph.add((rdflib.URIRef('https://schema.bsfs.io/core/Tag'), rdflib.RDFS.subClassOf, rdflib.URIRef('https://schema.bsfs.io/core/Node'))) # entities - self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Entity'))) - self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Entity'))) + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.RDF.type, rdflib.URIRef('https://schema.bsfs.io/core/Entity'))) + self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.RDF.type, rdflib.URIRef('https://schema.bsfs.io/core/Entity'))) # tags - self.graph.add((rdflib.URIRef('http://example.com/tag#1234'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Tag'))) - self.graph.add((rdflib.URIRef('http://example.com/tag#4321'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Tag'))) + self.graph.add((rdflib.URIRef('http://example.com/tag#1234'), rdflib.RDF.type, rdflib.URIRef('https://schema.bsfs.io/core/Tag'))) + self.graph.add((rdflib.URIRef('http://example.com/tag#4321'), rdflib.RDF.type, rdflib.URIRef('https://schema.bsfs.io/core/Tag'))) # collections - self.graph.add((rdflib.URIRef('http://example.com/collection#1234'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Collection'))) - self.graph.add((rdflib.URIRef('http://example.com/collection#4321'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Collection'))) + self.graph.add((rdflib.URIRef('http://example.com/collection#1234'), rdflib.RDF.type, rdflib.URIRef('https://schema.bsfs.io/core/Collection'))) + self.graph.add((rdflib.URIRef('http://example.com/collection#4321'), rdflib.RDF.type, rdflib.URIRef('https://schema.bsfs.io/core/Collection'))) # entity literals - self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(bse.rank), rdflib.Literal('1234', datatype=rdflib.XSD.integer))) - self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(bse.filename), rdflib.Literal('filename_1234', datatype=rdflib.XSD.string))) - #self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef(bse.rank), rdflib.Literal('4321', datatype=rdflib.XSD.integer))) - self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef(bse.filename), rdflib.Literal('filename_4321', datatype=rdflib.XSD.string))) + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.bse.rank), rdflib.Literal('1234', datatype=rdflib.XSD.integer))) + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.bse.filename), rdflib.Literal('filename_1234', datatype=rdflib.XSD.string))) + #self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef(ns.bse.rank), rdflib.Literal('4321', datatype=rdflib.XSD.integer))) + self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef(ns.bse.filename), rdflib.Literal('filename_4321', datatype=rdflib.XSD.string))) # tag literals - self.graph.add((rdflib.URIRef('http://example.com/tag#1234'), rdflib.URIRef(bst.label), rdflib.Literal('tag_label_1234', datatype=rdflib.XSD.string))) - self.graph.add((rdflib.URIRef('http://example.com/tag#4321'), rdflib.URIRef(bst.label), rdflib.Literal('tag_label_4321', datatype=rdflib.XSD.string))) + self.graph.add((rdflib.URIRef('http://example.com/tag#1234'), rdflib.URIRef(ns.bst.label), rdflib.Literal('tag_label_1234', datatype=rdflib.XSD.string))) + self.graph.add((rdflib.URIRef('http://example.com/tag#4321'), rdflib.URIRef(ns.bst.label), rdflib.Literal('tag_label_4321', datatype=rdflib.XSD.string))) # collection literals - self.graph.add((rdflib.URIRef('http://example.com/collection#1234'), rdflib.URIRef(bsc.label), rdflib.Literal('collection_label_1234', datatype=rdflib.XSD.string))) - self.graph.add((rdflib.URIRef('http://example.com/collection#1234'), rdflib.URIRef(bsc.rating), rdflib.Literal('1234', datatype=rdflib.XSD.integer))) - self.graph.add((rdflib.URIRef('http://example.com/collection#4321'), rdflib.URIRef(bsc.label), rdflib.Literal('collection_label_4321', datatype=rdflib.XSD.string))) - self.graph.add((rdflib.URIRef('http://example.com/collection#4321'), rdflib.URIRef(bsc.rating), rdflib.Literal('4321', datatype=rdflib.XSD.integer))) + self.graph.add((rdflib.URIRef('http://example.com/collection#1234'), rdflib.URIRef(ns.bsc.label), rdflib.Literal('collection_label_1234', datatype=rdflib.XSD.string))) + self.graph.add((rdflib.URIRef('http://example.com/collection#1234'), rdflib.URIRef(ns.bsc.rating), rdflib.Literal('1234', datatype=rdflib.XSD.integer))) + self.graph.add((rdflib.URIRef('http://example.com/collection#4321'), rdflib.URIRef(ns.bsc.label), rdflib.Literal('collection_label_4321', datatype=rdflib.XSD.string))) + self.graph.add((rdflib.URIRef('http://example.com/collection#4321'), rdflib.URIRef(ns.bsc.rating), rdflib.Literal('4321', datatype=rdflib.XSD.integer))) # entity-tag links - self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(bse.tag), rdflib.URIRef('http://example.com/tag#1234'))) - self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef(bse.tag), rdflib.URIRef('http://example.com/tag#4321'))) + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.bse.tag), rdflib.URIRef('http://example.com/tag#1234'))) + self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef(ns.bse.tag), rdflib.URIRef('http://example.com/tag#4321'))) # entity-collection links - self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(bse.collection), rdflib.URIRef('http://example.com/collection#1234'))) - self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef(bse.collection), rdflib.URIRef('http://example.com/collection#4321'))) + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.bse.collection), rdflib.URIRef('http://example.com/collection#1234'))) + self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef(ns.bse.collection), rdflib.URIRef('http://example.com/collection#4321'))) # collection-tag links - self.graph.add((rdflib.URIRef('http://example.com/collection#1234'), rdflib.URIRef(bsc.tag), rdflib.URIRef('http://example.com/tag#1234'))) - self.graph.add((rdflib.URIRef('http://example.com/collection#4321'), rdflib.URIRef(bsc.tag), rdflib.URIRef('http://example.com/tag#4321'))) + self.graph.add((rdflib.URIRef('http://example.com/collection#1234'), rdflib.URIRef(ns.bsc.tag), rdflib.URIRef('http://example.com/tag#1234'))) + self.graph.add((rdflib.URIRef('http://example.com/collection#4321'), rdflib.URIRef(ns.bsc.tag), rdflib.URIRef('http://example.com/tag#4321'))) # tag-entity links # NOTE: cross-over - self.graph.add((rdflib.URIRef('http://example.com/tag#1234'), rdflib.URIRef(bst.main), rdflib.URIRef('http://example.com/entity#4321'))) - self.graph.add((rdflib.URIRef('http://example.com/tag#4321'), rdflib.URIRef(bst.main), rdflib.URIRef('http://example.com/entity#1234'))) + self.graph.add((rdflib.URIRef('http://example.com/tag#1234'), rdflib.URIRef(ns.bst.main), rdflib.URIRef('http://example.com/entity#4321'))) + self.graph.add((rdflib.URIRef('http://example.com/tag#4321'), rdflib.URIRef(ns.bst.main), rdflib.URIRef('http://example.com/entity#1234'))) # default parser self.parser = Fetch(self.schema) @@ -135,7 +134,7 @@ class TestParseFetch(unittest.TestCase): # __call__ requires a parseable root self.assertRaises(errors.BackendError, self.parser, self.ent, ast.filter.FilterExpression()) # __call__ returns an executable query - q = self.parser(self.ent, ast.fetch.Fetch(bse.tag, ast.fetch.Value(bst.label, 'label'))) + q = self.parser(self.ent, ast.fetch.Fetch(ns.bse.tag, ast.fetch.Value(ns.bst.label, 'label'))) self.assertSetEqual(set(q(self.graph)), { (rdflib.URIRef('http://example.com/entity#1234'), rdflib.Literal('tag_label_1234', datatype=rdflib.XSD.string)), (rdflib.URIRef('http://example.com/entity#4321'), rdflib.Literal('tag_label_4321', datatype=rdflib.XSD.string)), @@ -149,8 +148,8 @@ class TestParseFetch(unittest.TestCase): def test_all(self): # multiple values query q = self.parser(self.ent, ast.fetch.All( - ast.fetch.Value(bse.filename, name='filename'), - ast.fetch.Value(bse.rank, name='rank')), + ast.fetch.Value(ns.bse.filename, name='filename'), + ast.fetch.Value(ns.bse.rank, name='rank')), ) self.assertSetEqual(set(q.names), {'filename', 'rank'}) if q.names == ('filename', 'rank'): @@ -165,8 +164,8 @@ class TestParseFetch(unittest.TestCase): }) # mixed values and node query q = self.parser(self.ent, ast.fetch.All( - ast.fetch.Value(bse.filename, name='filename'), - ast.fetch.Node(bse.tag, name='tag'), + ast.fetch.Value(ns.bse.filename, name='filename'), + ast.fetch.Node(ns.bse.tag, name='tag'), )) self.assertSetEqual(set(q.names), {'filename', 'tag'}) if q.names == ('filename', 'tag'): @@ -180,9 +179,9 @@ class TestParseFetch(unittest.TestCase): (rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef('http://example.com/tag#4321'), rdflib.Literal('filename_4321', datatype=rdflib.XSD.string)), }) # multiple values and second hop - q = self.parser(self.ent, ast.fetch.Fetch(bse.tag, ast.fetch.All( + q = self.parser(self.ent, ast.fetch.Fetch(ns.bse.tag, ast.fetch.All( ast.fetch.This(name='tag'), - ast.fetch.Value(bst.label, name='label'), + ast.fetch.Value(ns.bst.label, name='label'), ))) self.assertSetEqual(set(q.names), {'tag', 'label'}) if q.names == ('tag', 'label'): @@ -200,13 +199,13 @@ class TestParseFetch(unittest.TestCase): def test_fetch(self): # two-hop query - q = self.parser(self.ent, ast.fetch.Fetch(bse.tag, ast.fetch.Value(bst.label, 'tag_label'))) + q = self.parser(self.ent, ast.fetch.Fetch(ns.bse.tag, ast.fetch.Value(ns.bst.label, 'tag_label'))) self.assertSetEqual(set(q(self.graph)), { (rdflib.URIRef('http://example.com/entity#1234'), rdflib.Literal('tag_label_1234', datatype=rdflib.XSD.string)), (rdflib.URIRef('http://example.com/entity#4321'), rdflib.Literal('tag_label_4321', datatype=rdflib.XSD.string)), }) # three-hop-query - q = self.parser(self.ent, ast.fetch.Fetch(bse.tag, ast.fetch.Fetch(bst.main, ast.fetch.Value(bse.rank, 'entity_rank')))) + q = self.parser(self.ent, ast.fetch.Fetch(ns.bse.tag, ast.fetch.Fetch(ns.bst.main, ast.fetch.Value(ns.bse.rank, 'entity_rank')))) self.assertSetEqual(set(q(self.graph)), { (rdflib.URIRef('http://example.com/entity#1234'), None), (rdflib.URIRef('http://example.com/entity#4321'), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), @@ -215,9 +214,9 @@ class TestParseFetch(unittest.TestCase): def test_node(self): # cannot use the internal hop name - self.assertRaises(errors.BackendError, self.parser, self.ent, ast.fetch.Node(bse.tag, self.parser.ngen.prefix[1:] + '123')) + self.assertRaises(errors.BackendError, self.parser, self.ent, ast.fetch.Node(ns.bse.tag, self.parser.ngen.prefix[1:] + '123')) # a simple Node statement - q = self.parser(self.ent, ast.fetch.Node(bse.tag, 'tag')) + q = self.parser(self.ent, ast.fetch.Node(ns.bse.tag, 'tag')) self.assertSetEqual(set(q.names), {'tag'}) self.assertSetEqual(set(q(self.graph)), { (rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef('http://example.com/tag#1234')), @@ -227,9 +226,9 @@ class TestParseFetch(unittest.TestCase): def test_value(self): # cannot use the internal hop name - self.assertRaises(errors.BackendError, self.parser, self.schema.node(ns.bsfs.Entity), ast.fetch.Value(bse.filename, self.parser.ngen.prefix[1:] + '123')) + self.assertRaises(errors.BackendError, self.parser, self.schema.node(ns.bsfs.Entity), ast.fetch.Value(ns.bse.filename, self.parser.ngen.prefix[1:] + '123')) # a simple Value statement - q = self.parser(self.ent, ast.fetch.Value(bse.filename, 'filename')) + q = self.parser(self.ent, ast.fetch.Value(ns.bse.filename, 'filename')) self.assertSetEqual(set(q.names), {'filename'}) self.assertSetEqual(set(q(self.graph)), { (rdflib.URIRef('http://example.com/entity#1234'), rdflib.Literal('filename_1234', datatype=rdflib.XSD.string)), diff --git a/test/triple_store/sparql/test_parse_filter.py b/test/triple_store/sparql/test_parse_filter.py index 5b6ca8a..a45f2ef 100644 --- a/test/triple_store/sparql/test_parse_filter.py +++ b/test/triple_store/sparql/test_parse_filter.py @@ -15,6 +15,8 @@ from bsfs.triple_store.sparql.parse_filter import Filter ## code ## +ns.bse = ns.bsfs.Entity() + class TestParseFilter(unittest.TestCase): def setUp(self): # schema @@ -22,25 +24,28 @@ class TestParseFilter(unittest.TestCase): prefix rdfs: prefix xsd: - prefix bsfs: - prefix bse: + prefix bsfs: + prefix bse: + prefix bsl: + prefix bsd: + prefix bsa: - bsfs:Array rdfs:subClassOf bsfs:Literal . - bsfs:Feature rdfs:subClassOf bsfs:Array . - bsfs:Number rdfs:subClassOf bsfs:Literal . + bsl:Array rdfs:subClassOf bsfs:Literal . + bsa:Feature rdfs:subClassOf bsl:Array . + bsl:Number rdfs:subClassOf bsfs:Literal . bsfs:Entity rdfs:subClassOf bsfs:Node . bsfs:Image rdfs:subClassOf bsfs:Entity . bsfs:Tag rdfs:subClassOf bsfs:Node . xsd:string rdfs:subClassOf bsfs:Literal . - xsd:integer rdfs:subClassOf bsfs:Number . + xsd:integer rdfs:subClassOf bsl:Number . bsfs:URI rdfs:subClassOf bsfs:Literal . - bsfs:Colors rdfs:subClassOf bsfs:Feature ; + bsfs:Colors rdfs:subClassOf bsa:Feature ; bsfs:dimension "4"^^xsd:integer ; bsfs:dtype xsd:integer ; - bsfs:distance bsfs:euclidean . + bsfs:distance bsd:euclidean . bse:colors rdfs:subClassOf bsfs:Predicate ; rdfs:domain bsfs:Entity ; @@ -81,18 +86,18 @@ class TestParseFilter(unittest.TestCase): # graph to test queries self.graph = rdflib.Graph() # schema hierarchies - self.graph.add((rdflib.URIRef('http://bsfs.ai/schema/Entity'), rdflib.RDFS.subClassOf, rdflib.URIRef('http://bsfs.ai/schema/Node'))) - self.graph.add((rdflib.URIRef('http://bsfs.ai/schema/Image'), rdflib.RDFS.subClassOf, rdflib.URIRef('http://bsfs.ai/schema/Entity'))) - self.graph.add((rdflib.URIRef('http://bsfs.ai/schema/Tag'), rdflib.RDFS.subClassOf, rdflib.URIRef('http://bsfs.ai/schema/Node'))) + self.graph.add((rdflib.URIRef('https://schema.bsfs.io/core/Entity'), rdflib.RDFS.subClassOf, rdflib.URIRef('https://schema.bsfs.io/core/Node'))) + self.graph.add((rdflib.URIRef('https://schema.bsfs.io/core/Image'), rdflib.RDFS.subClassOf, rdflib.URIRef('https://schema.bsfs.io/core/Entity'))) + self.graph.add((rdflib.URIRef('https://schema.bsfs.io/core/Tag'), rdflib.RDFS.subClassOf, rdflib.URIRef('https://schema.bsfs.io/core/Node'))) # entities - self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Entity'))) - self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Entity'))) + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.RDF.type, rdflib.URIRef('https://schema.bsfs.io/core/Entity'))) + self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.RDF.type, rdflib.URIRef('https://schema.bsfs.io/core/Entity'))) # tags - self.graph.add((rdflib.URIRef('http://example.com/tag#1234'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Tag'))) - self.graph.add((rdflib.URIRef('http://example.com/tag#4321'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Tag'))) + self.graph.add((rdflib.URIRef('http://example.com/tag#1234'), rdflib.RDF.type, rdflib.URIRef('https://schema.bsfs.io/core/Tag'))) + self.graph.add((rdflib.URIRef('http://example.com/tag#4321'), rdflib.RDF.type, rdflib.URIRef('https://schema.bsfs.io/core/Tag'))) # images - self.graph.add((rdflib.URIRef('http://example.com/image#1234'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Image'))) - self.graph.add((rdflib.URIRef('http://example.com/image#4321'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Image'))) + self.graph.add((rdflib.URIRef('http://example.com/image#1234'), rdflib.RDF.type, rdflib.URIRef('https://schema.bsfs.io/core/Image'))) + self.graph.add((rdflib.URIRef('http://example.com/image#4321'), rdflib.RDF.type, rdflib.URIRef('https://schema.bsfs.io/core/Image'))) # node comments self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.bse.comment), rdflib.Literal('Me, Myself, and I', datatype=rdflib.XSD.string))) self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.bse.comment), rdflib.Literal('hello world', datatype=rdflib.XSD.string))) diff --git a/test/triple_store/sparql/test_sparql.py b/test/triple_store/sparql/test_sparql.py index f45ca37..a7e7d37 100644 --- a/test/triple_store/sparql/test_sparql.py +++ b/test/triple_store/sparql/test_sparql.py @@ -15,22 +15,25 @@ from bsfs.triple_store.sparql.sparql import SparqlStore ## code ## +ns.bse = ns.bsfs.Entity() + class TestSparqlStore(unittest.TestCase): def setUp(self): self.schema = bsc.from_string(''' prefix rdfs: prefix xsd: - prefix bsfs: - prefix bse: + prefix bsfs: + prefix bse: + prefix bsl: bsfs:Entity rdfs:subClassOf bsfs:Node . bsfs:Tag rdfs:subClassOf bsfs:Node . bsfs:User rdfs:subClassOf bsfs:Node . xsd:string rdfs:subClassOf bsfs:Literal . - bsfs:Number rdfs:subClassOf bsfs:Literal . - bsfs:BinaryBlob rdfs:subClassOf bsfs:Literal . - xsd:integer rdfs:subClassOf bsfs:Number . + bsl:Number rdfs:subClassOf bsfs:Literal . + bsl:BinaryBlob rdfs:subClassOf bsfs:Literal . + xsd:integer rdfs:subClassOf bsl:Number . # non-unique literal bse:comment rdfs:subClassOf bsfs:Predicate ; @@ -59,7 +62,7 @@ class TestSparqlStore(unittest.TestCase): # binary range bse:asset rdfs:subClassOf bsfs:Predicate ; rdfs:domain bsfs:Entity ; - rdfs:range bsfs:BinaryBlob . + rdfs:range bsl:BinaryBlob . ''') self.schema_triples = { @@ -68,12 +71,12 @@ class TestSparqlStore(unittest.TestCase): (rdflib.URIRef(ns.bsfs.Tag), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), (rdflib.URIRef(ns.bsfs.User), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), (rdflib.URIRef(ns.xsd.string), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), - (rdflib.URIRef(ns.bsfs.Array), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), - (rdflib.URIRef(ns.bsfs.BinaryBlob), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), - (rdflib.URIRef(ns.bsfs.Feature), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Array)), - (rdflib.URIRef(ns.bsfs.Number), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), - (rdflib.URIRef(ns.bsfs.Time), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), - (rdflib.URIRef(ns.xsd.integer), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Number)), + (rdflib.URIRef(ns.bsl.Array), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), + (rdflib.URIRef(ns.bsl.BinaryBlob), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), + (rdflib.URIRef(ns.bsl.Array.Feature), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsl.Array)), + (rdflib.URIRef(ns.bsl.Number), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), + (rdflib.URIRef(ns.bsl.Time), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), + (rdflib.URIRef(ns.xsd.integer), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsl.Number)), (rdflib.URIRef(ns.bse.comment), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), (rdflib.URIRef(ns.bse.filesize), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), (rdflib.URIRef(ns.bse.tag), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), @@ -101,7 +104,7 @@ class TestSparqlStore(unittest.TestCase): store.schema = bsc.from_string(''' prefix rdfs: prefix xsd: - prefix bsfs: + prefix bsfs: bsfs:Entity rdfs:subClassOf bsfs:Node . bsfs:Document rdfs:subClassOf bsfs:Entity . @@ -206,10 +209,10 @@ class TestSparqlStore(unittest.TestCase): curr = curr + bsc.from_string(''' prefix rdfs: prefix xsd: - prefix bsfs: - prefix bse: - prefix bst: - prefix bsc: + prefix bsfs: + prefix bse: + prefix bst: + prefix bsc: bsfs:Entity rdfs:subClassOf bsfs:Node . bsfs:Tag rdfs:subClassOf bsfs:Node . @@ -255,16 +258,16 @@ class TestSparqlStore(unittest.TestCase): (rdflib.URIRef(ns.xsd.boolean), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), (rdflib.URIRef(ns.bse.shared), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), (rdflib.URIRef(ns.bse.partOf), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), - (rdflib.URIRef('http://bsfs.ai/schema/Tag#usedIn'), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), - (rdflib.URIRef('http://bsfs.ai/schema/Collection#tag'), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), - (rdflib.URIRef('http://bsfs.ai/schema/Tag#principal'), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef('https://schema.bsfs.io/core/Tag#usedIn'), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef('https://schema.bsfs.io/core/Collection#tag'), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef('https://schema.bsfs.io/core/Tag#principal'), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), }) # add some instances of the new classes p_partOf = curr.predicate(ns.bse.partOf) p_shared = curr.predicate(ns.bse.shared) - p_usedIn = curr.predicate('http://bsfs.ai/schema/Tag#usedIn') - p_ctag = curr.predicate('http://bsfs.ai/schema/Collection#tag') - p_principal = curr.predicate('http://bsfs.ai/schema/Tag#principal') + p_usedIn = curr.predicate('https://schema.bsfs.io/core/Tag#usedIn') + p_ctag = curr.predicate('https://schema.bsfs.io/core/Collection#tag') + p_principal = curr.predicate('https://schema.bsfs.io/core/Tag#principal') store.create(curr.node(ns.bsfs.Collection), {URI('http://example.com/me/collection#1234'), URI('http://example.com/me/collection#4321')}) # add some more triples store.set(curr.node(ns.bsfs.Entity), ent_ids, p_shared, {True}) @@ -283,9 +286,9 @@ class TestSparqlStore(unittest.TestCase): (rdflib.URIRef(ns.xsd.boolean), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), (rdflib.URIRef(ns.bse.shared), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), (rdflib.URIRef(ns.bse.partOf), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), - (rdflib.URIRef('http://bsfs.ai/schema/Tag#usedIn'), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), - (rdflib.URIRef('http://bsfs.ai/schema/Collection#tag'), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), - (rdflib.URIRef('http://bsfs.ai/schema/Tag#principal'), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef('https://schema.bsfs.io/core/Tag#usedIn'), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef('https://schema.bsfs.io/core/Collection#tag'), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef('https://schema.bsfs.io/core/Tag#principal'), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), # collections (rdflib.URIRef('http://example.com/me/collection#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Collection)), (rdflib.URIRef('http://example.com/me/collection#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Collection)), @@ -309,17 +312,18 @@ class TestSparqlStore(unittest.TestCase): curr = bsc.from_string(''' prefix rdfs: prefix xsd: - prefix bsfs: - prefix bse: - prefix bst: + prefix bsfs: + prefix bse: + prefix bst: + prefix bsl: bsfs:Entity rdfs:subClassOf bsfs:Node . bsfs:Tag rdfs:subClassOf bsfs:Node . bsfs:User rdfs:subClassOf bsfs:Node . xsd:boolean rdfs:subClassOf bsfs:Literal . - bsfs:Number rdfs:subClassOf bsfs:Literal . - xsd:integer rdfs:subClassOf bsfs:Number . + bsl:Number rdfs:subClassOf bsfs:Literal . + xsd:integer rdfs:subClassOf bsl:Number . bse:filesize rdfs:subClassOf bsfs:Predicate ; rdfs:domain bsfs:Entity ; @@ -360,16 +364,16 @@ class TestSparqlStore(unittest.TestCase): (rdflib.URIRef(ns.bsfs.Tag), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), (rdflib.URIRef(ns.bsfs.User), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), (rdflib.URIRef(ns.xsd.boolean), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), - (rdflib.URIRef(ns.bsfs.Array), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), - (rdflib.URIRef(ns.bsfs.BinaryBlob), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), - (rdflib.URIRef(ns.bsfs.Feature), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Array)), - (rdflib.URIRef(ns.bsfs.Number), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), - (rdflib.URIRef(ns.bsfs.Time), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), - (rdflib.URIRef(ns.xsd.integer), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Number)), + (rdflib.URIRef(ns.bsl.Array), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), + (rdflib.URIRef(ns.bsl.BinaryBlob), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), + (rdflib.URIRef(ns.bsl.Array.Feature), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsl.Array)), + (rdflib.URIRef(ns.bsl.Number), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), + (rdflib.URIRef(ns.bsl.Time), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), + (rdflib.URIRef(ns.xsd.integer), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsl.Number)), (rdflib.URIRef(ns.bse.shared), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), (rdflib.URIRef(ns.bse.tag), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), (rdflib.URIRef(ns.bse.filesize), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), - (rdflib.URIRef('http://bsfs.ai/schema/Tag#principal'), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef('https://schema.bsfs.io/core/Tag#principal'), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), # node instances (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), @@ -400,13 +404,15 @@ class TestSparqlStore(unittest.TestCase): invalid = bsc.from_string(''' prefix rdfs: prefix xsd: - prefix bsfs: - prefix bse: + prefix bsfs: + prefix bse: + prefix bsl: + prefix bsa: - bsfs:Array rdfs:subClassOf bsfs:Literal . - bsfs:Feature rdfs:subClassOf bsfs:Array . + bsl:Array rdfs:subClassOf bsfs:Literal . + bsa:Feature rdfs:subClassOf bsl:Array . - bsfs:Colors rdfs:subClassOf bsfs:Feature ; + bsfs:Colors rdfs:subClassOf bsa:Feature ; bsfs:dimension "4"^^xsd:integer ; bsfs:distance bsfs:foobar . @@ -417,8 +423,8 @@ class TestSparqlStore(unittest.TestCase): invalid = bsc.from_string(''' prefix rdfs: prefix xsd: - prefix bsfs: - prefix bse: + prefix bsfs: + prefix bse: bsfs:Entity rdfs:subClassOf bsfs:Node . bsfs:Tag rdfs:subClassOf bsfs:Entity . # inconsistent with previous tag definition @@ -433,8 +439,8 @@ class TestSparqlStore(unittest.TestCase): invalid = bsc.from_string(''' prefix rdfs: prefix xsd: - prefix bsfs: - prefix bse: + prefix bsfs: + prefix bse: bsfs:Entity rdfs:subClassOf bsfs:Node . bsfs:User rdfs:subClassOf bsfs:Node . @@ -945,9 +951,9 @@ class TestSparqlStore(unittest.TestCase): p_asset = store.schema.predicate(ns.bse.asset) store.set(ent_type, ent_ids, p_asset, {bytes(range(128)), bytes(range(128, 256))}) blob1 = rdflib.Literal('AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn8=', - datatype=rdflib.URIRef(ns.bsfs.BinaryBlob)) + datatype=rdflib.URIRef(ns.bsl.BinaryBlob)) blob2 = rdflib.Literal('gIGCg4SFhoeIiYqLjI2Oj5CRkpOUlZaXmJmam5ydnp+goaKjpKWmp6ipqqusra6vsLGys7S1tre4ubq7vL2+v8DBwsPExcbHyMnKy8zNzs/Q0dLT1NXW19jZ2tvc3d7f4OHi4+Tl5ufo6err7O3u7/Dx8vP09fb3+Pn6+/z9/v8=', - datatype=rdflib.URIRef(ns.bsfs.BinaryBlob)) + datatype=rdflib.URIRef(ns.bsl.BinaryBlob)) self.assertTrue(set(store._graph).issuperset({ (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_asset.uri), blob1), (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_asset.uri), blob2), diff --git a/test/triple_store/sparql/test_utils.py b/test/triple_store/sparql/test_utils.py index 8f894bb..44a1299 100644 --- a/test/triple_store/sparql/test_utils.py +++ b/test/triple_store/sparql/test_utils.py @@ -16,6 +16,8 @@ from bsfs.triple_store.sparql.utils import GenHopName, Query ## code ## +ns.bse = ns.bsfs.Entity() + class TestGenHopName(unittest.TestCase): def test_next(self): # baseline @@ -40,7 +42,7 @@ class TestGenHopName(unittest.TestCase): class TestQuery(unittest.TestCase): def setUp(self): - self.root_type = 'http://bsfs.ai/schema/Entity' + self.root_type = 'https://schema.bsfs.io/core/Entity' self.root_head = '?root' self.select = (('?head', 'name'), ) self.where = f'?root <{ns.bse.tag}> ?head' @@ -56,8 +58,8 @@ class TestQuery(unittest.TestCase): self.assertEqual(q, Query(self.root_type, self.root_head, self.select, self.where)) self.assertEqual(hash(q), hash(Query(self.root_type, self.root_head, self.select, self.where))) # comparison respects root_type - self.assertNotEqual(q, Query('http://bsfs.ai/schema/Tag', self.root_head, self.select, self.where)) - self.assertNotEqual(hash(q), hash(Query('http://bsfs.ai/schema/Tag', self.root_head, self.select, self.where))) + self.assertNotEqual(q, Query('https://schema.bsfs.io/core/Tag', self.root_head, self.select, self.where)) + self.assertNotEqual(hash(q), hash(Query('https://schema.bsfs.io/core/Tag', self.root_head, self.select, self.where))) # comparison respects root_head self.assertNotEqual(q, Query(self.root_type, '?foo', self.select, self.where)) self.assertNotEqual(hash(q), hash(Query(self.root_type, '?foo', self.select, self.where))) @@ -69,7 +71,7 @@ class TestQuery(unittest.TestCase): self.assertNotEqual(hash(q), hash(Query(self.root_type, self.root_head, self.select, '?root bse:filename ?head'))) # string conversion self.assertEqual(str(q), q.query) - self.assertEqual(repr(q), "Query(http://bsfs.ai/schema/Entity, ?root, (('?head', 'name'),), ?root ?head)") + self.assertEqual(repr(q), "Query(https://schema.bsfs.io/core/Entity, ?root, (('?head', 'name'),), ?root ?head)") def test_add(self): q = Query(self.root_type, self.root_head, self.select, self.where) @@ -77,7 +79,7 @@ class TestQuery(unittest.TestCase): self.assertRaises(TypeError, operator.add, q, 1234) self.assertRaises(TypeError, operator.add, q, 'foobar') # root type and head must match - self.assertRaises(ValueError, operator.add, q, Query('http://bsfs.ai/schema/Tag', self.root_head)) + self.assertRaises(ValueError, operator.add, q, Query('https://schema.bsfs.io/core/Node/Tag', self.root_head)) self.assertRaises(ValueError, operator.add, q, Query(self.root_type, '?foobar')) # select and were are combined combo = q + Query(self.root_type, self.root_head, (('?foo', 'bar'), ), f'?root <{ns.bse.filename}> ?foo') @@ -113,23 +115,23 @@ class TestQuery(unittest.TestCase): return value # query composes a valid query q = Query(self.root_type, self.root_head, self.select, self.where) - self.assertEqual(normalize(q.query), normalize(f'select distinct ?root (?head as ?name) where {{ ?root <{ns.rdf.type}>/<{ns.rdfs.subClassOf}>* . ?root <{ns.bse.tag}> ?head }} order by str(?root)')) + self.assertEqual(normalize(q.query), normalize(f'select distinct ?root (?head as ?name) where {{ ?root <{ns.rdf.type}>/<{ns.rdfs.subClassOf}>* . ?root <{ns.bse.tag}> ?head }} order by str(?root)')) # select and where are optional q = Query(self.root_type, self.root_head) - self.assertEqual(normalize(q.query), normalize(f'select distinct ?root where {{ ?root <{ns.rdf.type}>/<{ns.rdfs.subClassOf}>* . }} order by str(?root)')) + self.assertEqual(normalize(q.query), normalize(f'select distinct ?root where {{ ?root <{ns.rdf.type}>/<{ns.rdfs.subClassOf}>* . }} order by str(?root)')) # select and where need not to correspond q = Query(self.root_type, self.root_head, (('?head', 'name'), )) - self.assertEqual(normalize(q.query), normalize(f'select distinct ?root (?head as ?name) where {{ ?root <{ns.rdf.type}>/<{ns.rdfs.subClassOf}>* . }} order by str(?root)')) + self.assertEqual(normalize(q.query), normalize(f'select distinct ?root (?head as ?name) where {{ ?root <{ns.rdf.type}>/<{ns.rdfs.subClassOf}>* . }} order by str(?root)')) # query is used for string representation self.assertEqual(str(q), q.query) def test_call(self): graph = rdflib.Graph() # schema - graph.add((rdflib.URIRef('http://bsfs.ai/schema/Document'), rdflib.URIRef(ns.rdfs.subClassOf), rdflib.URIRef('http://bsfs.ai/schema/Entity'))) + graph.add((rdflib.URIRef('https://schema.bsfs.io/core/Document'), rdflib.URIRef(ns.rdfs.subClassOf), rdflib.URIRef('https://schema.bsfs.io/core/Entity'))) # nodes - graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.rdf.type), rdflib.URIRef('http://bsfs.ai/schema/Entity'))) - graph.add((rdflib.URIRef('http://example.com/doc#1234'), rdflib.URIRef(ns.rdf.type), rdflib.URIRef('http://bsfs.ai/schema/Document'))) + graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.rdf.type), rdflib.URIRef('https://schema.bsfs.io/core/Entity'))) + graph.add((rdflib.URIRef('http://example.com/doc#1234'), rdflib.URIRef(ns.rdf.type), rdflib.URIRef('https://schema.bsfs.io/core/Document'))) # links graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.bse.tag), rdflib.Literal('tag#1234', datatype=rdflib.XSD.string))) graph.add((rdflib.URIRef('http://example.com/doc#1234'), rdflib.URIRef(ns.bse.tag), rdflib.Literal('tag#1234', datatype=rdflib.XSD.string))) -- cgit v1.2.3