From 791918039979d0743fd2ea4b9a5e74593ff96fd0 Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Mon, 19 Dec 2022 13:32:34 +0100 Subject: query ast file structures and essential interfaces --- test/triple_store/sparql/__init__.py | 0 test/triple_store/sparql/test_sparql.py | 771 ++++++++++++++++++++++++++++++++ test/triple_store/test_sparql.py | 769 ------------------------------- 3 files changed, 771 insertions(+), 769 deletions(-) create mode 100644 test/triple_store/sparql/__init__.py create mode 100644 test/triple_store/sparql/test_sparql.py delete mode 100644 test/triple_store/test_sparql.py (limited to 'test/triple_store') diff --git a/test/triple_store/sparql/__init__.py b/test/triple_store/sparql/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/test/triple_store/sparql/test_sparql.py b/test/triple_store/sparql/test_sparql.py new file mode 100644 index 0000000..0bf664a --- /dev/null +++ b/test/triple_store/sparql/test_sparql.py @@ -0,0 +1,771 @@ +""" + +Part of the bsfs test suite. +A copy of the license is provided with the project. +Author: Matthias Baumgartner, 2022 +""" +# imports +import rdflib +import unittest + +# bsie imports +from bsfs import schema as _schema +from bsfs.namespace import ns +from bsfs.utils import errors, URI + +# objects to test +from bsfs.triple_store.sparql.sparql import SparqlStore + + +## code ## + +class TestSparqlStore(unittest.TestCase): + def setUp(self): + self.schema = _schema.Schema.from_string(''' + prefix rdfs: + prefix xsd: + + prefix bsfs: + prefix bse: + + bsfs:Entity rdfs:subClassOf bsfs:Node . + bsfs:Tag rdfs:subClassOf bsfs:Node . + bsfs:User rdfs:subClassOf bsfs:Node . + xsd:string rdfs:subClassOf bsfs:Literal . + xsd:integer rdfs:subClassOf bsfs:Literal . + + # non-unique literal + bse:comment rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range xsd:string ; + bsfs:unique "false"^^xsd:boolean . + + # unique literal + bse:filesize rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range xsd:integer ; + bsfs:unique "true"^^xsd:boolean . + + # non-unique node + bse:tag rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range bsfs:Tag ; + bsfs:unique "false"^^xsd:boolean . + + # unique node + bse:author rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range bsfs:User ; + bsfs:unique "true"^^xsd:boolean . + + ''') + + def test_essentials(self): + store = SparqlStore.Open() + # equality + self.assertEqual(store, store) + self.assertEqual(hash(store), hash(store)) + self.assertNotEqual(store, SparqlStore.Open()) + self.assertNotEqual(hash(store), hash(SparqlStore.Open())) + # string conversion + self.assertEqual(str(store), 'SparqlStore(uri=None)') + self.assertEqual(repr(store), 'SparqlStore(uri=None)') + # open + self.assertIsInstance(SparqlStore.Open(), SparqlStore) + + + def test__has_type(self): + # setup store + store = SparqlStore.Open() + store.schema = _schema.Schema.from_string(''' + prefix rdfs: + prefix xsd: + prefix bsfs: + + bsfs:Entity rdfs:subClassOf bsfs:Node . + bsfs:Document rdfs:subClassOf bsfs:Entity . + bsfs:Image rdfs:subClassOf bsfs:Entity . + bsfs:PDF rdfs:subClassOf bsfs:Document . + + ''') + # add some instances + store.create(store.schema.node(ns.bsfs.Entity), {URI('http://example.com/me/entity#1234')}) + store.create(store.schema.node(ns.bsfs.Document), {URI('http://example.com/me/document#1234')}) + store.create(store.schema.node(ns.bsfs.Image), {URI('http://example.com/me/image#1234')}) + store.create(store.schema.node(ns.bsfs.PDF), {URI('http://example.com/me/pdf#1234')}) + + # node_type must be in the schema + self.assertRaises(errors.ConsistencyError, store._has_type, URI('http://example.com/me/entity#1234'), store.schema.node(ns.bsfs.Node).get_child(ns.bsfs.invalid)) + + # returns False on inexistent nodes + self.assertFalse(store._has_type(URI('http://example.com/me/entity#4321'), store.schema.node(ns.bsfs.Entity))) + self.assertFalse(store._has_type(URI('http://example.com/me/document#4321'), store.schema.node(ns.bsfs.Document))) + self.assertFalse(store._has_type(URI('http://example.com/me/image#4321'), store.schema.node(ns.bsfs.Image))) + self.assertFalse(store._has_type(URI('http://example.com/me/pdf#4321'), store.schema.node(ns.bsfs.PDF))) + + # _has_type checks direct types + self.assertTrue(store._has_type(URI('http://example.com/me/entity#1234'), store.schema.node(ns.bsfs.Entity))) + self.assertTrue(store._has_type(URI('http://example.com/me/document#1234'), store.schema.node(ns.bsfs.Document))) + self.assertTrue(store._has_type(URI('http://example.com/me/image#1234'), store.schema.node(ns.bsfs.Image))) + self.assertTrue(store._has_type(URI('http://example.com/me/pdf#1234'), store.schema.node(ns.bsfs.PDF))) + + # _has_type checks type hierarchy + self.assertFalse(store._has_type(URI('http://example.com/me/entity#1234'), store.schema.node(ns.bsfs.Document))) + self.assertFalse(store._has_type(URI('http://example.com/me/entity#1234'), store.schema.node(ns.bsfs.Image))) + self.assertFalse(store._has_type(URI('http://example.com/me/entity#1234'), store.schema.node(ns.bsfs.PDF))) + + self.assertTrue(store._has_type(URI('http://example.com/me/document#1234'), store.schema.node(ns.bsfs.Entity))) + self.assertFalse(store._has_type(URI('http://example.com/me/document#1234'), store.schema.node(ns.bsfs.Image))) + self.assertFalse(store._has_type(URI('http://example.com/me/document#1234'), store.schema.node(ns.bsfs.PDF))) + + self.assertTrue(store._has_type(URI('http://example.com/me/image#1234'), store.schema.node(ns.bsfs.Entity))) + self.assertFalse(store._has_type(URI('http://example.com/me/image#1234'), store.schema.node(ns.bsfs.Document))) + self.assertFalse(store._has_type(URI('http://example.com/me/image#1234'), store.schema.node(ns.bsfs.PDF))) + + self.assertTrue(store._has_type(URI('http://example.com/me/pdf#1234'), store.schema.node(ns.bsfs.Entity))) + self.assertTrue(store._has_type(URI('http://example.com/me/pdf#1234'), store.schema.node(ns.bsfs.Document))) + self.assertFalse(store._has_type(URI('http://example.com/me/pdf#1234'), store.schema.node(ns.bsfs.Image))) + + + def test_schema(self): + # setup + store = SparqlStore.Open() + curr = self.schema + p_comment = curr.predicate(ns.bse.comment) + p_filesize = curr.predicate(ns.bse.filesize) + p_tag = curr.predicate(ns.bse.tag) + p_author = curr.predicate(ns.bse.author) + + # migrate to an initial schema + store.schema = curr + # store has migrated + self.assertEqual(store.schema, curr) + + # add some instances + ent_ids = {URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')} + tag_ids = {URI('http://example.com/me/tag#1234'), URI('http://example.com/me/tag#4321')} + store.create(curr.node(ns.bsfs.Entity), ent_ids) + store.create(curr.node(ns.bsfs.Tag), tag_ids) + store.create(curr.node(ns.bsfs.User), {URI('http://example.com/me')}) + # add some triples + store.set(curr.node(ns.bsfs.Entity), ent_ids, p_comment, {'foo', 'bar'}) + store.set(curr.node(ns.bsfs.Entity), ent_ids, p_filesize, {1234}) + store.set(curr.node(ns.bsfs.Entity), ent_ids, p_tag, + {URI('http://example.com/me/tag#1234'), URI('http://example.com/me/tag#4321')}) + store.set(curr.node(ns.bsfs.Entity), ent_ids, p_author, + {URI('http://example.com/me')}) + # check instances + instances = { + # node instances + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + (rdflib.URIRef('http://example.com/me/tag#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), + (rdflib.URIRef('http://example.com/me/tag#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), + (rdflib.URIRef('http://example.com/me'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.User)), + # comments + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foo', datatype=rdflib.XSD.string)), + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('bar', datatype=rdflib.XSD.string)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foo', datatype=rdflib.XSD.string)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('bar', datatype=rdflib.XSD.string)), + # filesize + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), + # tags + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), + # author + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me')), + } + self.assertSetEqual(set(store._graph), instances) + + # add some classes to the schema + curr = curr + _schema.Schema.from_string(''' + prefix rdfs: + prefix xsd: + prefix bsfs: + prefix bse: + prefix bst: + prefix bsc: + + bsfs:Entity rdfs:subClassOf bsfs:Node . + bsfs:Tag rdfs:subClassOf bsfs:Node . + bsfs:Collection rdfs:subClassOf bsfs:Node . + xsd:boolean rdfs:subClassOf bsfs:Literal . + + # literal + bse:shared rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range xsd:boolean ; + bsfs:unique "true"^^xsd:boolean . + + # node + bse:partOf rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range bsfs:Collection ; + bsfs:unique "false"^^xsd:boolean . + + # predicates across auxiliary node classes + bst:usedIn rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Tag ; + rdfs:range bsfs:Collection ; + bsfs:unique "false"^^xsd:boolean . + + bsc:tag rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Collection ; + rdfs:range bsfs:Tag ; + bsfs:unique "false"^^xsd:boolean . + + bst:principal rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Tag ; + rdfs:range bsfs:Node ; + bsfs:unique "true"^^xsd:boolean . + + ''') + # store migrated to the new schema + store.schema = curr + self.assertEqual(store.schema, curr) + # instances have not changed + self.assertSetEqual(set(store._graph), instances) + # add some instances of the new classes + p_partOf = curr.predicate(ns.bse.partOf) + p_shared = curr.predicate(ns.bse.shared) + p_usedIn = curr.predicate('http://bsfs.ai/schema/Tag#usedIn') + p_ctag = curr.predicate('http://bsfs.ai/schema/Collection#tag') + p_principal = curr.predicate('http://bsfs.ai/schema/Tag#principal') + store.create(curr.node(ns.bsfs.Collection), {URI('http://example.com/me/collection#1234'), URI('http://example.com/me/collection#4321')}) + # add some more triples + store.set(curr.node(ns.bsfs.Entity), ent_ids, p_shared, {True}) + store.set(curr.node(ns.bsfs.Entity), ent_ids, p_partOf, + {URI('http://example.com/me/collection#1234'), URI('http://example.com/me/collection#4321')}) + store.set(curr.node(ns.bsfs.Tag), {URI('http://example.com/me/tag#1234')}, p_usedIn, + {URI('http://example.com/me/collection#1234')}) + store.set(curr.node(ns.bsfs.Collection), {URI('http://example.com/me/collection#4321')}, p_ctag, + {URI('http://example.com/me/tag#1234'), URI('http://example.com/me/tag#4321')}) + store.set(curr.node(ns.bsfs.Tag), {URI('http://example.com/me/tag#1234')}, p_principal, + {URI('http://example.com/me/collection#1234')}) + # new instances are now in the graph + self.assertSetEqual(set(store._graph), instances | { + # collections + (rdflib.URIRef('http://example.com/me/collection#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Collection)), + (rdflib.URIRef('http://example.com/me/collection#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Collection)), + # partOf + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_partOf.uri), rdflib.URIRef('http://example.com/me/collection#1234')), + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_partOf.uri), rdflib.URIRef('http://example.com/me/collection#4321')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_partOf.uri), rdflib.URIRef('http://example.com/me/collection#1234')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_partOf.uri), rdflib.URIRef('http://example.com/me/collection#4321')), + # shared + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_shared.uri), rdflib.Literal('true', datatype=rdflib.XSD.boolean)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_shared.uri), rdflib.Literal('true', datatype=rdflib.XSD.boolean)), + # auxiliary node connections + (rdflib.URIRef('http://example.com/me/tag#1234'), rdflib.URIRef(p_usedIn.uri), rdflib.URIRef('http://example.com/me/collection#1234')), + (rdflib.URIRef('http://example.com/me/tag#1234'), rdflib.URIRef(p_principal.uri), rdflib.URIRef('http://example.com/me/collection#1234')), + (rdflib.URIRef('http://example.com/me/collection#4321'), rdflib.URIRef(p_ctag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), + (rdflib.URIRef('http://example.com/me/collection#4321'), rdflib.URIRef(p_ctag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), + }) + + + # remove some classes from the schema + curr = _schema.Schema.from_string(''' + prefix rdfs: + prefix xsd: + prefix bsfs: + prefix bse: + prefix bst: + + bsfs:Entity rdfs:subClassOf bsfs:Node . + bsfs:Tag rdfs:subClassOf bsfs:Node . + bsfs:User rdfs:subClassOf bsfs:Node . + + xsd:boolean rdfs:subClassOf bsfs:Literal . + xsd:integer rdfs:subClassOf bsfs:Literal . + + bse:filesize rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range xsd:integer ; + bsfs:unique "true"^^xsd:boolean . + + bse:tag rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range bsfs:Tag ; + bsfs:unique "false"^^xsd:boolean . + + bse:shared rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range xsd:boolean ; + bsfs:unique "true"^^xsd:boolean . + + bst:principal rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Tag ; + rdfs:range bsfs:Node ; + bsfs:unique "true"^^xsd:boolean . + + # removed: bsfs:Collection + # removed: xsd:string + # removed: bse:comment (bsfs:Entity -> xsd:string) + # removed: bse:partOf (bsfs:Entity -> bsfs:Collection) + # removed: bse:author (bsfs:entity -> bsfs:User) + # removed: bst:usedIn (bsfs:Tag -> bsfs:Collection) + # removed: bsc:tag (bsfs:Collection -> bsfs:Tag) + + ''') + # store migrated to the new schema + store.schema = curr + self.assertEqual(store.schema, curr) + # instances of old classes were removed + self.assertSetEqual(set(store._graph), { + # node instances + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + (rdflib.URIRef('http://example.com/me/tag#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), + (rdflib.URIRef('http://example.com/me/tag#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), + (rdflib.URIRef('http://example.com/me'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.User)), + # filesize + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), + # tags + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), + # shared + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_shared.uri), rdflib.Literal('true', datatype=rdflib.XSD.boolean)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_shared.uri), rdflib.Literal('true', datatype=rdflib.XSD.boolean)), + }) + + # can only assign schema instances + self.assertRaises(TypeError, setattr, store, 'schema', None) + self.assertRaises(TypeError, setattr, store, 'schema', 1234) + self.assertRaises(TypeError, setattr, store, 'schema', 'foo') + class Foo(): pass + self.assertRaises(TypeError, setattr, store, 'schema', Foo()) + + # cannot migrate to incompatible schema + invalid = _schema.Schema.from_string(''' + prefix rdfs: + prefix xsd: + prefix bsfs: + prefix bse: + + bsfs:Entity rdfs:subClassOf bsfs:Node . + bsfs:Tag rdfs:subClassOf bsfs:Entity . # inconsistent with previous tag definition + + bse:tag rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range bsfs:Tag ; + bsfs:unique "false"^^xsd:boolean . + + ''') + self.assertRaises(errors.ConsistencyError, setattr, store, 'schema', invalid) + invalid = _schema.Schema.from_string(''' + prefix rdfs: + prefix xsd: + prefix bsfs: + prefix bse: + + bsfs:Entity rdfs:subClassOf bsfs:Node . + bsfs:User rdfs:subClassOf bsfs:Node . + + # inconsistent predicate + bse:tag rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range bsfs:User; + bsfs:unique "false"^^xsd:boolean . + + ''') + self.assertRaises(errors.ConsistencyError, setattr, store, 'schema', invalid) + + + def test_transaction(self): + # store setup + store = SparqlStore.Open() + store.schema = self.schema + p_tag = store.schema.predicate(ns.bse.tag) + p_filesize = store.schema.predicate(ns.bse.filesize) + # prepare node types + ent_type = store.schema.node(ns.bsfs.Entity) + tag_type = store.schema.node(ns.bsfs.Tag) + ent_ids = {URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')} + tag_ids = {URI('http://example.com/me/tag#1234'), URI('http://example.com/me/tag#4321')} + # target instances + instances = { + # node instances + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + (rdflib.URIRef('http://example.com/me/tag#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), + (rdflib.URIRef('http://example.com/me/tag#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), + # links + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), + } + + # add some data + store.create(ent_type, ent_ids) + store.create(tag_type, tag_ids) + store.set(ent_type, ent_ids, p_tag, tag_ids) + store.set(ent_type, ent_ids, p_filesize, {1234}) + # current transaction is visible + self.assertSetEqual(set(store._graph), instances | { + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(1234, datatype=rdflib.XSD.integer)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(1234, datatype=rdflib.XSD.integer)), + }) + + # rollback undoes previous changes + store.rollback() + self.assertSetEqual(set(store._graph), set()) + + # add some data once more + store.create(ent_type, ent_ids) + store.create(tag_type, tag_ids) + store.set(ent_type, ent_ids, p_tag, tag_ids) + store.set(ent_type, ent_ids, p_filesize, {1234}) + # current transaction is visible + self.assertSetEqual(set(store._graph), instances | { + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(1234, datatype=rdflib.XSD.integer)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(1234, datatype=rdflib.XSD.integer)), + }) + + # commit saves changes + store.commit() + self.assertSetEqual(set(store._graph), instances | { + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(1234, datatype=rdflib.XSD.integer)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(1234, datatype=rdflib.XSD.integer)), + }) + + # add additional data + store.create(ent_type, {URI('http://example.com/me/entity#hello')}) + store.set(ent_type, {URI('http://example.com/me/entity#hello')}, p_tag, tag_ids) + store.set(ent_type, ent_ids, p_filesize, {4321}) + self.assertSetEqual(set(store._graph), instances | { + (rdflib.URIRef('http://example.com/me/entity#hello'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + (rdflib.URIRef('http://example.com/me/entity#hello'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), + (rdflib.URIRef('http://example.com/me/entity#hello'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(4321, datatype=rdflib.XSD.integer)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(4321, datatype=rdflib.XSD.integer)), + }) + + # rollback undoes only changes since last commit + store.rollback() + self.assertSetEqual(set(store._graph), instances | { + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(1234, datatype=rdflib.XSD.integer)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(1234, datatype=rdflib.XSD.integer)), + }) + + def test_get(self): + raise NotImplementedError() + + def test_exists(self): + # store setup + store = SparqlStore.Open() + store.schema = self.schema + # prepare node types + ent_type = store.schema.node(ns.bsfs.Entity) + tag_type = store.schema.node(ns.bsfs.Tag) + # create node instances + ent_ids = { + URI('http://example.com/me/entity#1234'), + URI('http://example.com/me/entity#4321'), + } + tag_ids = { + URI('http://example.com/me/tag#1234'), + URI('http://example.com/me/tag#4321'), + } + store.create(ent_type, ent_ids) + store.create(tag_type, tag_ids) + + # exists returns all existing nodes of the correct type + self.assertSetEqual(ent_ids, set(store.exists(ent_type, ent_ids))) + self.assertSetEqual(tag_ids, set(store.exists(tag_type, tag_ids))) + # exists returns only nodes that match the type + self.assertSetEqual(set(), set(store.exists(ent_type, tag_ids))) + self.assertSetEqual({URI('http://example.com/me/entity#1234')}, set(store.exists(ent_type, { + URI('http://example.com/me/tag#1234'), + URI('http://example.com/me/entity#1234'), + }))) + # exists returns only nodes that exist + self.assertSetEqual(set(), set(store.exists(ent_type, { + URI('http://example.com/me/entity#foo'), + URI('http://example.com/me/entity#bar'), + }))) + self.assertSetEqual({URI('http://example.com/me/entity#1234')}, set(store.exists(ent_type, { + URI('http://example.com/me/entity#foo'), + URI('http://example.com/me/entity#1234'), + }))) + + + def test_create(self): + # setup + store = SparqlStore.Open() + store.schema = self.schema + + # node type must be valid + self.assertRaises(errors.ConsistencyError, store.create, self.schema.node(ns.bsfs.Entity).get_child(ns.bsfs.invalid), { + URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')}) + + # can create some nodes + ent_type = store.schema.node(ns.bsfs.Entity) + store.create(ent_type, {URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')}) + self.assertSetEqual(set(store._graph), { + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + }) + + # existing nodes are skipped + store.create(ent_type, {URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#5678')}) + self.assertSetEqual(set(store._graph), { + # previous triples + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + # new triples + (rdflib.URIRef('http://example.com/me/entity#5678'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + }) + + # can create nodes of a different type + tag_type = store.schema.node(ns.bsfs.Tag) + store.create(tag_type, {URI('http://example.com/me/tag#1234'), URI('http://example.com/me/tag#4321')}) + self.assertSetEqual(set(store._graph), { + # previous triples + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + (rdflib.URIRef('http://example.com/me/entity#5678'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + # new triples + (rdflib.URIRef('http://example.com/me/tag#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), + (rdflib.URIRef('http://example.com/me/tag#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), + }) + + # creation does not change types of existing nodes + tag_type = store.schema.node(ns.bsfs.Tag) + store.create(tag_type, {URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')}) + self.assertSetEqual(set(store._graph), { + # previous triples + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + (rdflib.URIRef('http://example.com/me/entity#5678'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), + # new triples + (rdflib.URIRef('http://example.com/me/tag#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), + (rdflib.URIRef('http://example.com/me/tag#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), + }) + + + def test_set(self): + # store setup + store = SparqlStore.Open() + store.schema = self.schema + # prepare node types + ent_type = store.schema.node(ns.bsfs.Entity) + user_type = store.schema.node(ns.bsfs.User) + tag_type = store.schema.node(ns.bsfs.Tag) + # prepare predicates + p_filesize = store.schema.predicate(ns.bse.filesize) + p_comment = store.schema.predicate(ns.bse.comment) + p_author = store.schema.predicate(ns.bse.author) + p_tag = store.schema.predicate(ns.bse.tag) + p_invalid = store.schema.predicate(ns.bsfs.Predicate).get_child(ns.bsfs.foo, range=store.schema.node(ns.bsfs.Tag)) + # create node instances + ent_ids = { + URI('http://example.com/me/entity#1234'), + URI('http://example.com/me/entity#4321'), + } + tag_ids = { + URI('http://example.com/me/tag#1234'), + URI('http://example.com/me/tag#4321'), + URI('http://example.com/me/tag#foo'), + URI('http://example.com/me/tag#bar'), + URI('http://example.com/me/tag#foobar'), + URI('http://example.com/me/tag#xyz'), + } + user_ids = { + URI('http://example.com/me/user#1234'), + URI('http://example.com/me/user#4321'), + } + store.create(ent_type, ent_ids) + store.create(tag_type, tag_ids) + store.create(user_type, user_ids) + + # invalid node_type is not permitted + self.assertRaises(errors.ConsistencyError, store.set, self.schema.node(ns.bsfs.Node).get_child(ns.bse.foo), + ent_ids, p_comment, {'hello world'}) + + # invalid predicate is not permitted + self.assertRaises(errors.ConsistencyError, store.set, ent_type, ent_ids, p_invalid, {'http://example.com/me/tag#1234'}) + + # predicate must match node_type + self.assertRaises(errors.ConsistencyError, store.set, tag_type, tag_ids, p_filesize, {1234}) + + # empty value does not change the graph + plen = len(store._graph) + store.set(ent_type, ent_ids, p_filesize, []) + store.set(ent_type, ent_ids, p_comment, []) + store.set(ent_type, ent_ids, p_author, []) + store.set(ent_type, ent_ids, p_tag, []) + self.assertEqual(plen, len(store._graph)) + + # cannot set multiple values on unique predicates + self.assertRaises(ValueError, store.set, ent_type, ent_ids, p_filesize, {1234, 4321}) + self.assertRaises(ValueError, store.set, ent_type, ent_ids, p_author, {URI('http://example.com/me/user#1234'), URI('http://example.com/me/user#4321')}) + + # value nodes must exist + self.assertRaises(errors.InstanceError, store.set, ent_type, ent_ids, p_author, {URI('http://example.com/me/user#invalid')}) + self.assertRaises(errors.InstanceError, store.set, ent_type, ent_ids, p_tag, {URI('http://example.com/me/tag#invalid')}) + + # value node types must be consistent with the predicate + self.assertRaises(errors.InstanceError, store.set, ent_type, ent_ids, p_author, {URI('http://example.com/me/entity#1234')}) + self.assertRaises(errors.InstanceError, store.set, ent_type, ent_ids, p_tag, {URI('http://example.com/me/entity#1234')}) + + # all value nodes must exist and be consistent + self.assertRaises(errors.InstanceError, store.set, ent_type, ent_ids, p_tag, { + URI('http://example.com/me/tag#1234'), URI('http://example.com/me/tag#invalid'), URI('http://example.com/me/entity#1234')}) + + + # set unique literal + store.set(ent_type, ent_ids, p_filesize, {1234}) + self.assertIn( + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), + set(store._graph)) + self.assertIn( + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), + set(store._graph)) + # re-assigning the same node changes nothing + store.set(ent_type, ent_ids, p_filesize, {1234}) + self.assertIn( + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), + set(store._graph)) + self.assertIn( + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), + set(store._graph)) + # cannot set multiple unique literals + self.assertRaises(ValueError, store.set, ent_type, ent_ids, p_filesize, {1234, 4321}) # same test as above + # unique literals are overwritten by set + store.set(ent_type, ent_ids, p_filesize, {4321}) + self.assertIn( + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('4321', datatype=rdflib.XSD.integer)), + set(store._graph)) + self.assertNotIn( + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), + set(store._graph)) + self.assertIn( + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('4321', datatype=rdflib.XSD.integer)), + set(store._graph)) + self.assertNotIn( + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), + set(store._graph)) + + # set non-unique literal + store.set(ent_type, ent_ids, p_comment, {'foobar'}) + self.assertTrue(set(store._graph).issuperset({ + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foobar', datatype=rdflib.XSD.string)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foobar', datatype=rdflib.XSD.string)), + })) + # re-assigning the same node changes nothing + store.set(ent_type, ent_ids, p_comment, {'foobar'}) + self.assertTrue(set(store._graph).issuperset({ + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foobar', datatype=rdflib.XSD.string)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foobar', datatype=rdflib.XSD.string)), + })) + # can set multiple non-unique literals at once + store.set(ent_type, ent_ids, p_comment, {'foo', 'bar'}) + self.assertTrue(set(store._graph).issuperset({ + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foo', datatype=rdflib.XSD.string)), + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('bar', datatype=rdflib.XSD.string)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foo', datatype=rdflib.XSD.string)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('bar', datatype=rdflib.XSD.string)), + })) + # non-unique literals are appended by set + store.set(ent_type, ent_ids, p_comment, {'hello world'}) + self.assertTrue(set(store._graph).issuperset({ + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foo', datatype=rdflib.XSD.string)), + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('bar', datatype=rdflib.XSD.string)), + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('hello world', datatype=rdflib.XSD.string)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foo', datatype=rdflib.XSD.string)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('bar', datatype=rdflib.XSD.string)), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('hello world', datatype=rdflib.XSD.string)), + })) + + # set unique node + store.set(ent_type, ent_ids, p_author, {URI('http://example.com/me/user#1234')}) + self.assertIn( + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me/user#1234')), + set(store._graph)) + self.assertIn( + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me/user#1234')), + set(store._graph)) + # re-assigning the same node changes nothing + store.set(ent_type, ent_ids, p_author, {URI('http://example.com/me/user#1234')}) + self.assertIn( + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me/user#1234')), + set(store._graph)) + self.assertIn( + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me/user#1234')), + set(store._graph)) + # cannot set multiple unique nodes + self.assertRaises(ValueError, store.set, ent_type, ent_ids, p_author, {URI('http://example.com/me/user#1234'), URI('http://example.com/me/user#4321')}) + # unique nodes are overwritten by set + store.set(ent_type, ent_ids, p_author, {URI('http://example.com/me/user#4321')}) + self.assertIn( + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me/user#4321')), + set(store._graph)) + self.assertNotIn( + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me/user#1234')), + set(store._graph)) + self.assertIn( + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me/user#4321')), + set(store._graph)) + self.assertNotIn( + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me/user#1234')), + set(store._graph)) + + # set non-unique node + store.set(ent_type, ent_ids, p_tag, {'http://example.com/me/tag#foobar'}) + self.assertTrue(set(store._graph).issuperset({ + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#foobar')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#foobar')), + })) + # re-assigning the same node changes nothing + store.set(ent_type, ent_ids, p_tag, {'http://example.com/me/tag#foobar'}) + self.assertTrue(set(store._graph).issuperset({ + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#foobar')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#foobar')), + })) + # can set multiple non-unique literals at once + store.set(ent_type, ent_ids, p_tag, {'http://example.com/me/tag#1234', 'http://example.com/me/tag#4321'}) + self.assertTrue(set(store._graph).issuperset({ + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), + })) + # non-unique nodes are appended by set + store.set(ent_type, ent_ids, p_tag, {'http://example.com/me/tag#foo', 'http://example.com/me/tag#bar'}) + self.assertTrue(set(store._graph).issuperset({ + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#foo')), + (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#bar')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#foo')), + (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#bar')), + })) + + # nothing happens when no guids are given + plen = len(store._graph) + store.set(ent_type, set(), p_comment, {'xyz'}) + store.set(ent_type, set(), p_tag, {URI('http://example.com/me/tag#xyz')}) + self.assertEqual(plen, len(store._graph)) + + # guids must be instances of node_type + self.assertRaises(errors.InstanceError, store.set, ent_type, tag_ids, p_comment, {'xyz'}) + # inexistent guids + self.assertRaises(errors.InstanceError, store.set, ent_type, {URI('http://example.com/me/entity#foobar')}, p_comment, {'xyz'}) + + +## main ## + +if __name__ == '__main__': + unittest.main() + +## EOF ## diff --git a/test/triple_store/test_sparql.py b/test/triple_store/test_sparql.py deleted file mode 100644 index 8d98749..0000000 --- a/test/triple_store/test_sparql.py +++ /dev/null @@ -1,769 +0,0 @@ -""" - -Part of the bsfs test suite. -A copy of the license is provided with the project. -Author: Matthias Baumgartner, 2022 -""" -# imports -import rdflib -import unittest - -# bsie imports -from bsfs import schema as _schema -from bsfs.namespace import ns -from bsfs.utils import errors, URI - -# objects to test -from bsfs.triple_store.sparql import SparqlStore - - -## code ## - -class TestSparqlStore(unittest.TestCase): - def setUp(self): - self.schema = _schema.Schema.from_string(''' - prefix rdfs: - prefix xsd: - - prefix bsfs: - prefix bse: - - bsfs:Entity rdfs:subClassOf bsfs:Node . - bsfs:Tag rdfs:subClassOf bsfs:Node . - bsfs:User rdfs:subClassOf bsfs:Node . - xsd:string rdfs:subClassOf bsfs:Literal . - xsd:integer rdfs:subClassOf bsfs:Literal . - - # non-unique literal - bse:comment rdfs:subClassOf bsfs:Predicate ; - rdfs:domain bsfs:Entity ; - rdfs:range xsd:string ; - bsfs:unique "false"^^xsd:boolean . - - # unique literal - bse:filesize rdfs:subClassOf bsfs:Predicate ; - rdfs:domain bsfs:Entity ; - rdfs:range xsd:integer ; - bsfs:unique "true"^^xsd:boolean . - - # non-unique node - bse:tag rdfs:subClassOf bsfs:Predicate ; - rdfs:domain bsfs:Entity ; - rdfs:range bsfs:Tag ; - bsfs:unique "false"^^xsd:boolean . - - # unique node - bse:author rdfs:subClassOf bsfs:Predicate ; - rdfs:domain bsfs:Entity ; - rdfs:range bsfs:User ; - bsfs:unique "true"^^xsd:boolean . - - ''') - - def test_essentials(self): - store = SparqlStore.Open() - # equality - self.assertEqual(store, store) - self.assertEqual(hash(store), hash(store)) - self.assertNotEqual(store, SparqlStore.Open()) - self.assertNotEqual(hash(store), hash(SparqlStore.Open())) - # string conversion - self.assertEqual(str(store), 'SparqlStore(uri=None)') - self.assertEqual(repr(store), 'SparqlStore(uri=None)') - # open - self.assertIsInstance(SparqlStore.Open(), SparqlStore) - - - def test__has_type(self): - # setup store - store = SparqlStore.Open() - store.schema = _schema.Schema.from_string(''' - prefix rdfs: - prefix xsd: - prefix bsfs: - - bsfs:Entity rdfs:subClassOf bsfs:Node . - bsfs:Document rdfs:subClassOf bsfs:Entity . - bsfs:Image rdfs:subClassOf bsfs:Entity . - bsfs:PDF rdfs:subClassOf bsfs:Document . - - ''') - # add some instances - store.create(store.schema.node(ns.bsfs.Entity), {URI('http://example.com/me/entity#1234')}) - store.create(store.schema.node(ns.bsfs.Document), {URI('http://example.com/me/document#1234')}) - store.create(store.schema.node(ns.bsfs.Image), {URI('http://example.com/me/image#1234')}) - store.create(store.schema.node(ns.bsfs.PDF), {URI('http://example.com/me/pdf#1234')}) - - # node_type must be in the schema - self.assertRaises(errors.ConsistencyError, store._has_type, URI('http://example.com/me/entity#1234'), store.schema.node(ns.bsfs.Node).get_child(ns.bsfs.invalid)) - - # returns False on inexistent nodes - self.assertFalse(store._has_type(URI('http://example.com/me/entity#4321'), store.schema.node(ns.bsfs.Entity))) - self.assertFalse(store._has_type(URI('http://example.com/me/document#4321'), store.schema.node(ns.bsfs.Document))) - self.assertFalse(store._has_type(URI('http://example.com/me/image#4321'), store.schema.node(ns.bsfs.Image))) - self.assertFalse(store._has_type(URI('http://example.com/me/pdf#4321'), store.schema.node(ns.bsfs.PDF))) - - # _has_type checks direct types - self.assertTrue(store._has_type(URI('http://example.com/me/entity#1234'), store.schema.node(ns.bsfs.Entity))) - self.assertTrue(store._has_type(URI('http://example.com/me/document#1234'), store.schema.node(ns.bsfs.Document))) - self.assertTrue(store._has_type(URI('http://example.com/me/image#1234'), store.schema.node(ns.bsfs.Image))) - self.assertTrue(store._has_type(URI('http://example.com/me/pdf#1234'), store.schema.node(ns.bsfs.PDF))) - - # _has_type checks type hierarchy - self.assertFalse(store._has_type(URI('http://example.com/me/entity#1234'), store.schema.node(ns.bsfs.Document))) - self.assertFalse(store._has_type(URI('http://example.com/me/entity#1234'), store.schema.node(ns.bsfs.Image))) - self.assertFalse(store._has_type(URI('http://example.com/me/entity#1234'), store.schema.node(ns.bsfs.PDF))) - - self.assertTrue(store._has_type(URI('http://example.com/me/document#1234'), store.schema.node(ns.bsfs.Entity))) - self.assertFalse(store._has_type(URI('http://example.com/me/document#1234'), store.schema.node(ns.bsfs.Image))) - self.assertFalse(store._has_type(URI('http://example.com/me/document#1234'), store.schema.node(ns.bsfs.PDF))) - - self.assertTrue(store._has_type(URI('http://example.com/me/image#1234'), store.schema.node(ns.bsfs.Entity))) - self.assertFalse(store._has_type(URI('http://example.com/me/image#1234'), store.schema.node(ns.bsfs.Document))) - self.assertFalse(store._has_type(URI('http://example.com/me/image#1234'), store.schema.node(ns.bsfs.PDF))) - - self.assertTrue(store._has_type(URI('http://example.com/me/pdf#1234'), store.schema.node(ns.bsfs.Entity))) - self.assertTrue(store._has_type(URI('http://example.com/me/pdf#1234'), store.schema.node(ns.bsfs.Document))) - self.assertFalse(store._has_type(URI('http://example.com/me/pdf#1234'), store.schema.node(ns.bsfs.Image))) - - - def test_schema(self): - # setup - store = SparqlStore.Open() - curr = self.schema - p_comment = curr.predicate(ns.bse.comment) - p_filesize = curr.predicate(ns.bse.filesize) - p_tag = curr.predicate(ns.bse.tag) - p_author = curr.predicate(ns.bse.author) - - # migrate to an initial schema - store.schema = curr - # store has migrated - self.assertEqual(store.schema, curr) - - # add some instances - ent_ids = {URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')} - tag_ids = {URI('http://example.com/me/tag#1234'), URI('http://example.com/me/tag#4321')} - store.create(curr.node(ns.bsfs.Entity), ent_ids) - store.create(curr.node(ns.bsfs.Tag), tag_ids) - store.create(curr.node(ns.bsfs.User), {URI('http://example.com/me')}) - # add some triples - store.set(curr.node(ns.bsfs.Entity), ent_ids, p_comment, {'foo', 'bar'}) - store.set(curr.node(ns.bsfs.Entity), ent_ids, p_filesize, {1234}) - store.set(curr.node(ns.bsfs.Entity), ent_ids, p_tag, - {URI('http://example.com/me/tag#1234'), URI('http://example.com/me/tag#4321')}) - store.set(curr.node(ns.bsfs.Entity), ent_ids, p_author, - {URI('http://example.com/me')}) - # check instances - instances = { - # node instances - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - (rdflib.URIRef('http://example.com/me/tag#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), - (rdflib.URIRef('http://example.com/me/tag#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), - (rdflib.URIRef('http://example.com/me'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.User)), - # comments - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foo', datatype=rdflib.XSD.string)), - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('bar', datatype=rdflib.XSD.string)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foo', datatype=rdflib.XSD.string)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('bar', datatype=rdflib.XSD.string)), - # filesize - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), - # tags - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), - # author - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me')), - } - self.assertSetEqual(set(store._graph), instances) - - # add some classes to the schema - curr = curr + _schema.Schema.from_string(''' - prefix rdfs: - prefix xsd: - prefix bsfs: - prefix bse: - prefix bst: - prefix bsc: - - bsfs:Entity rdfs:subClassOf bsfs:Node . - bsfs:Tag rdfs:subClassOf bsfs:Node . - bsfs:Collection rdfs:subClassOf bsfs:Node . - xsd:boolean rdfs:subClassOf bsfs:Literal . - - # literal - bse:shared rdfs:subClassOf bsfs:Predicate ; - rdfs:domain bsfs:Entity ; - rdfs:range xsd:boolean ; - bsfs:unique "true"^^xsd:boolean . - - # node - bse:partOf rdfs:subClassOf bsfs:Predicate ; - rdfs:domain bsfs:Entity ; - rdfs:range bsfs:Collection ; - bsfs:unique "false"^^xsd:boolean . - - # predicates across auxiliary node classes - bst:usedIn rdfs:subClassOf bsfs:Predicate ; - rdfs:domain bsfs:Tag ; - rdfs:range bsfs:Collection ; - bsfs:unique "false"^^xsd:boolean . - - bsc:tag rdfs:subClassOf bsfs:Predicate ; - rdfs:domain bsfs:Collection ; - rdfs:range bsfs:Tag ; - bsfs:unique "false"^^xsd:boolean . - - bst:principal rdfs:subClassOf bsfs:Predicate ; - rdfs:domain bsfs:Tag ; - rdfs:range bsfs:Node ; - bsfs:unique "true"^^xsd:boolean . - - ''') - # store migrated to the new schema - store.schema = curr - self.assertEqual(store.schema, curr) - # instances have not changed - self.assertSetEqual(set(store._graph), instances) - # add some instances of the new classes - p_partOf = curr.predicate(ns.bse.partOf) - p_shared = curr.predicate(ns.bse.shared) - p_usedIn = curr.predicate('http://bsfs.ai/schema/Tag#usedIn') - p_ctag = curr.predicate('http://bsfs.ai/schema/Collection#tag') - p_principal = curr.predicate('http://bsfs.ai/schema/Tag#principal') - store.create(curr.node(ns.bsfs.Collection), {URI('http://example.com/me/collection#1234'), URI('http://example.com/me/collection#4321')}) - # add some more triples - store.set(curr.node(ns.bsfs.Entity), ent_ids, p_shared, {True}) - store.set(curr.node(ns.bsfs.Entity), ent_ids, p_partOf, - {URI('http://example.com/me/collection#1234'), URI('http://example.com/me/collection#4321')}) - store.set(curr.node(ns.bsfs.Tag), {URI('http://example.com/me/tag#1234')}, p_usedIn, - {URI('http://example.com/me/collection#1234')}) - store.set(curr.node(ns.bsfs.Collection), {URI('http://example.com/me/collection#4321')}, p_ctag, - {URI('http://example.com/me/tag#1234'), URI('http://example.com/me/tag#4321')}) - store.set(curr.node(ns.bsfs.Tag), {URI('http://example.com/me/tag#1234')}, p_principal, - {URI('http://example.com/me/collection#1234')}) - # new instances are now in the graph - self.assertSetEqual(set(store._graph), instances | { - # collections - (rdflib.URIRef('http://example.com/me/collection#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Collection)), - (rdflib.URIRef('http://example.com/me/collection#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Collection)), - # partOf - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_partOf.uri), rdflib.URIRef('http://example.com/me/collection#1234')), - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_partOf.uri), rdflib.URIRef('http://example.com/me/collection#4321')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_partOf.uri), rdflib.URIRef('http://example.com/me/collection#1234')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_partOf.uri), rdflib.URIRef('http://example.com/me/collection#4321')), - # shared - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_shared.uri), rdflib.Literal('true', datatype=rdflib.XSD.boolean)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_shared.uri), rdflib.Literal('true', datatype=rdflib.XSD.boolean)), - # auxiliary node connections - (rdflib.URIRef('http://example.com/me/tag#1234'), rdflib.URIRef(p_usedIn.uri), rdflib.URIRef('http://example.com/me/collection#1234')), - (rdflib.URIRef('http://example.com/me/tag#1234'), rdflib.URIRef(p_principal.uri), rdflib.URIRef('http://example.com/me/collection#1234')), - (rdflib.URIRef('http://example.com/me/collection#4321'), rdflib.URIRef(p_ctag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), - (rdflib.URIRef('http://example.com/me/collection#4321'), rdflib.URIRef(p_ctag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), - }) - - - # remove some classes from the schema - curr = _schema.Schema.from_string(''' - prefix rdfs: - prefix xsd: - prefix bsfs: - prefix bse: - prefix bst: - - bsfs:Entity rdfs:subClassOf bsfs:Node . - bsfs:Tag rdfs:subClassOf bsfs:Node . - bsfs:User rdfs:subClassOf bsfs:Node . - - xsd:boolean rdfs:subClassOf bsfs:Literal . - xsd:integer rdfs:subClassOf bsfs:Literal . - - bse:filesize rdfs:subClassOf bsfs:Predicate ; - rdfs:domain bsfs:Entity ; - rdfs:range xsd:integer ; - bsfs:unique "true"^^xsd:boolean . - - bse:tag rdfs:subClassOf bsfs:Predicate ; - rdfs:domain bsfs:Entity ; - rdfs:range bsfs:Tag ; - bsfs:unique "false"^^xsd:boolean . - - bse:shared rdfs:subClassOf bsfs:Predicate ; - rdfs:domain bsfs:Entity ; - rdfs:range xsd:boolean ; - bsfs:unique "true"^^xsd:boolean . - - bst:principal rdfs:subClassOf bsfs:Predicate ; - rdfs:domain bsfs:Tag ; - rdfs:range bsfs:Node ; - bsfs:unique "true"^^xsd:boolean . - - # removed: bsfs:Collection - # removed: xsd:string - # removed: bse:comment (bsfs:Entity -> xsd:string) - # removed: bse:partOf (bsfs:Entity -> bsfs:Collection) - # removed: bse:author (bsfs:entity -> bsfs:User) - # removed: bst:usedIn (bsfs:Tag -> bsfs:Collection) - # removed: bsc:tag (bsfs:Collection -> bsfs:Tag) - - ''') - # store migrated to the new schema - store.schema = curr - self.assertEqual(store.schema, curr) - # instances of old classes were removed - self.assertSetEqual(set(store._graph), { - # node instances - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - (rdflib.URIRef('http://example.com/me/tag#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), - (rdflib.URIRef('http://example.com/me/tag#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), - (rdflib.URIRef('http://example.com/me'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.User)), - # filesize - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), - # tags - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), - # shared - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_shared.uri), rdflib.Literal('true', datatype=rdflib.XSD.boolean)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_shared.uri), rdflib.Literal('true', datatype=rdflib.XSD.boolean)), - }) - - # can only assign schema instances - self.assertRaises(TypeError, setattr, store, 'schema', None) - self.assertRaises(TypeError, setattr, store, 'schema', 1234) - self.assertRaises(TypeError, setattr, store, 'schema', 'foo') - class Foo(): pass - self.assertRaises(TypeError, setattr, store, 'schema', Foo()) - - # cannot migrate to incompatible schema - invalid = _schema.Schema.from_string(''' - prefix rdfs: - prefix xsd: - prefix bsfs: - prefix bse: - - bsfs:Entity rdfs:subClassOf bsfs:Node . - bsfs:Tag rdfs:subClassOf bsfs:Entity . # inconsistent with previous tag definition - - bse:tag rdfs:subClassOf bsfs:Predicate ; - rdfs:domain bsfs:Entity ; - rdfs:range bsfs:Tag ; - bsfs:unique "false"^^xsd:boolean . - - ''') - self.assertRaises(errors.ConsistencyError, setattr, store, 'schema', invalid) - invalid = _schema.Schema.from_string(''' - prefix rdfs: - prefix xsd: - prefix bsfs: - prefix bse: - - bsfs:Entity rdfs:subClassOf bsfs:Node . - bsfs:User rdfs:subClassOf bsfs:Node . - - # inconsistent predicate - bse:tag rdfs:subClassOf bsfs:Predicate ; - rdfs:domain bsfs:Entity ; - rdfs:range bsfs:User; - bsfs:unique "false"^^xsd:boolean . - - ''') - self.assertRaises(errors.ConsistencyError, setattr, store, 'schema', invalid) - - - def test_transaction(self): - # store setup - store = SparqlStore.Open() - store.schema = self.schema - p_tag = store.schema.predicate(ns.bse.tag) - p_filesize = store.schema.predicate(ns.bse.filesize) - # prepare node types - ent_type = store.schema.node(ns.bsfs.Entity) - tag_type = store.schema.node(ns.bsfs.Tag) - ent_ids = {URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')} - tag_ids = {URI('http://example.com/me/tag#1234'), URI('http://example.com/me/tag#4321')} - # target instances - instances = { - # node instances - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - (rdflib.URIRef('http://example.com/me/tag#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), - (rdflib.URIRef('http://example.com/me/tag#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), - # links - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), - } - - # add some data - store.create(ent_type, ent_ids) - store.create(tag_type, tag_ids) - store.set(ent_type, ent_ids, p_tag, tag_ids) - store.set(ent_type, ent_ids, p_filesize, {1234}) - # current transaction is visible - self.assertSetEqual(set(store._graph), instances | { - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(1234, datatype=rdflib.XSD.integer)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(1234, datatype=rdflib.XSD.integer)), - }) - - # rollback undoes previous changes - store.rollback() - self.assertSetEqual(set(store._graph), set()) - - # add some data once more - store.create(ent_type, ent_ids) - store.create(tag_type, tag_ids) - store.set(ent_type, ent_ids, p_tag, tag_ids) - store.set(ent_type, ent_ids, p_filesize, {1234}) - # current transaction is visible - self.assertSetEqual(set(store._graph), instances | { - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(1234, datatype=rdflib.XSD.integer)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(1234, datatype=rdflib.XSD.integer)), - }) - - # commit saves changes - store.commit() - self.assertSetEqual(set(store._graph), instances | { - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(1234, datatype=rdflib.XSD.integer)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(1234, datatype=rdflib.XSD.integer)), - }) - - # add additional data - store.create(ent_type, {URI('http://example.com/me/entity#hello')}) - store.set(ent_type, {URI('http://example.com/me/entity#hello')}, p_tag, tag_ids) - store.set(ent_type, ent_ids, p_filesize, {4321}) - self.assertSetEqual(set(store._graph), instances | { - (rdflib.URIRef('http://example.com/me/entity#hello'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - (rdflib.URIRef('http://example.com/me/entity#hello'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), - (rdflib.URIRef('http://example.com/me/entity#hello'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(4321, datatype=rdflib.XSD.integer)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(4321, datatype=rdflib.XSD.integer)), - }) - - # rollback undoes only changes since last commit - store.rollback() - self.assertSetEqual(set(store._graph), instances | { - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(1234, datatype=rdflib.XSD.integer)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal(1234, datatype=rdflib.XSD.integer)), - }) - - - def test_exists(self): - # store setup - store = SparqlStore.Open() - store.schema = self.schema - # prepare node types - ent_type = store.schema.node(ns.bsfs.Entity) - tag_type = store.schema.node(ns.bsfs.Tag) - # create node instances - ent_ids = { - URI('http://example.com/me/entity#1234'), - URI('http://example.com/me/entity#4321'), - } - tag_ids = { - URI('http://example.com/me/tag#1234'), - URI('http://example.com/me/tag#4321'), - } - store.create(ent_type, ent_ids) - store.create(tag_type, tag_ids) - - # exists returns all existing nodes of the correct type - self.assertSetEqual(ent_ids, set(store.exists(ent_type, ent_ids))) - self.assertSetEqual(tag_ids, set(store.exists(tag_type, tag_ids))) - # exists returns only nodes that match the type - self.assertSetEqual(set(), set(store.exists(ent_type, tag_ids))) - self.assertSetEqual({URI('http://example.com/me/entity#1234')}, set(store.exists(ent_type, { - URI('http://example.com/me/tag#1234'), - URI('http://example.com/me/entity#1234'), - }))) - # exists returns only nodes that exist - self.assertSetEqual(set(), set(store.exists(ent_type, { - URI('http://example.com/me/entity#foo'), - URI('http://example.com/me/entity#bar'), - }))) - self.assertSetEqual({URI('http://example.com/me/entity#1234')}, set(store.exists(ent_type, { - URI('http://example.com/me/entity#foo'), - URI('http://example.com/me/entity#1234'), - }))) - - - def test_create(self): - # setup - store = SparqlStore.Open() - store.schema = self.schema - - # node type must be valid - self.assertRaises(errors.ConsistencyError, store.create, self.schema.node(ns.bsfs.Entity).get_child(ns.bsfs.invalid), { - URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')}) - - # can create some nodes - ent_type = store.schema.node(ns.bsfs.Entity) - store.create(ent_type, {URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')}) - self.assertSetEqual(set(store._graph), { - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - }) - - # existing nodes are skipped - store.create(ent_type, {URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#5678')}) - self.assertSetEqual(set(store._graph), { - # previous triples - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - # new triples - (rdflib.URIRef('http://example.com/me/entity#5678'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - }) - - # can create nodes of a different type - tag_type = store.schema.node(ns.bsfs.Tag) - store.create(tag_type, {URI('http://example.com/me/tag#1234'), URI('http://example.com/me/tag#4321')}) - self.assertSetEqual(set(store._graph), { - # previous triples - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - (rdflib.URIRef('http://example.com/me/entity#5678'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - # new triples - (rdflib.URIRef('http://example.com/me/tag#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), - (rdflib.URIRef('http://example.com/me/tag#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), - }) - - # creation does not change types of existing nodes - tag_type = store.schema.node(ns.bsfs.Tag) - store.create(tag_type, {URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')}) - self.assertSetEqual(set(store._graph), { - # previous triples - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - (rdflib.URIRef('http://example.com/me/entity#5678'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), - # new triples - (rdflib.URIRef('http://example.com/me/tag#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), - (rdflib.URIRef('http://example.com/me/tag#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Tag)), - }) - - - def test_set(self): - # store setup - store = SparqlStore.Open() - store.schema = self.schema - # prepare node types - ent_type = store.schema.node(ns.bsfs.Entity) - user_type = store.schema.node(ns.bsfs.User) - tag_type = store.schema.node(ns.bsfs.Tag) - # prepare predicates - p_filesize = store.schema.predicate(ns.bse.filesize) - p_comment = store.schema.predicate(ns.bse.comment) - p_author = store.schema.predicate(ns.bse.author) - p_tag = store.schema.predicate(ns.bse.tag) - p_invalid = store.schema.predicate(ns.bsfs.Predicate).get_child(ns.bsfs.foo, range=store.schema.node(ns.bsfs.Tag)) - # create node instances - ent_ids = { - URI('http://example.com/me/entity#1234'), - URI('http://example.com/me/entity#4321'), - } - tag_ids = { - URI('http://example.com/me/tag#1234'), - URI('http://example.com/me/tag#4321'), - URI('http://example.com/me/tag#foo'), - URI('http://example.com/me/tag#bar'), - URI('http://example.com/me/tag#foobar'), - URI('http://example.com/me/tag#xyz'), - } - user_ids = { - URI('http://example.com/me/user#1234'), - URI('http://example.com/me/user#4321'), - } - store.create(ent_type, ent_ids) - store.create(tag_type, tag_ids) - store.create(user_type, user_ids) - - # invalid node_type is not permitted - self.assertRaises(errors.ConsistencyError, store.set, self.schema.node(ns.bsfs.Node).get_child(ns.bse.foo), - ent_ids, p_comment, {'hello world'}) - - # invalid predicate is not permitted - self.assertRaises(errors.ConsistencyError, store.set, ent_type, ent_ids, p_invalid, {'http://example.com/me/tag#1234'}) - - # predicate must match node_type - self.assertRaises(errors.ConsistencyError, store.set, tag_type, tag_ids, p_filesize, {1234}) - - # empty value does not change the graph - plen = len(store._graph) - store.set(ent_type, ent_ids, p_filesize, []) - store.set(ent_type, ent_ids, p_comment, []) - store.set(ent_type, ent_ids, p_author, []) - store.set(ent_type, ent_ids, p_tag, []) - self.assertEqual(plen, len(store._graph)) - - # cannot set multiple values on unique predicates - self.assertRaises(ValueError, store.set, ent_type, ent_ids, p_filesize, {1234, 4321}) - self.assertRaises(ValueError, store.set, ent_type, ent_ids, p_author, {URI('http://example.com/me/user#1234'), URI('http://example.com/me/user#4321')}) - - # value nodes must exist - self.assertRaises(errors.InstanceError, store.set, ent_type, ent_ids, p_author, {URI('http://example.com/me/user#invalid')}) - self.assertRaises(errors.InstanceError, store.set, ent_type, ent_ids, p_tag, {URI('http://example.com/me/tag#invalid')}) - - # value node types must be consistent with the predicate - self.assertRaises(errors.InstanceError, store.set, ent_type, ent_ids, p_author, {URI('http://example.com/me/entity#1234')}) - self.assertRaises(errors.InstanceError, store.set, ent_type, ent_ids, p_tag, {URI('http://example.com/me/entity#1234')}) - - # all value nodes must exist and be consistent - self.assertRaises(errors.InstanceError, store.set, ent_type, ent_ids, p_tag, { - URI('http://example.com/me/tag#1234'), URI('http://example.com/me/tag#invalid'), URI('http://example.com/me/entity#1234')}) - - - # set unique literal - store.set(ent_type, ent_ids, p_filesize, {1234}) - self.assertIn( - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), - set(store._graph)) - self.assertIn( - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), - set(store._graph)) - # re-assigning the same node changes nothing - store.set(ent_type, ent_ids, p_filesize, {1234}) - self.assertIn( - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), - set(store._graph)) - self.assertIn( - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), - set(store._graph)) - # cannot set multiple unique literals - self.assertRaises(ValueError, store.set, ent_type, ent_ids, p_filesize, {1234, 4321}) # same test as above - # unique literals are overwritten by set - store.set(ent_type, ent_ids, p_filesize, {4321}) - self.assertIn( - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('4321', datatype=rdflib.XSD.integer)), - set(store._graph)) - self.assertNotIn( - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), - set(store._graph)) - self.assertIn( - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('4321', datatype=rdflib.XSD.integer)), - set(store._graph)) - self.assertNotIn( - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_filesize.uri), rdflib.Literal('1234', datatype=rdflib.XSD.integer)), - set(store._graph)) - - # set non-unique literal - store.set(ent_type, ent_ids, p_comment, {'foobar'}) - self.assertTrue(set(store._graph).issuperset({ - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foobar', datatype=rdflib.XSD.string)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foobar', datatype=rdflib.XSD.string)), - })) - # re-assigning the same node changes nothing - store.set(ent_type, ent_ids, p_comment, {'foobar'}) - self.assertTrue(set(store._graph).issuperset({ - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foobar', datatype=rdflib.XSD.string)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foobar', datatype=rdflib.XSD.string)), - })) - # can set multiple non-unique literals at once - store.set(ent_type, ent_ids, p_comment, {'foo', 'bar'}) - self.assertTrue(set(store._graph).issuperset({ - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foo', datatype=rdflib.XSD.string)), - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('bar', datatype=rdflib.XSD.string)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foo', datatype=rdflib.XSD.string)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('bar', datatype=rdflib.XSD.string)), - })) - # non-unique literals are appended by set - store.set(ent_type, ent_ids, p_comment, {'hello world'}) - self.assertTrue(set(store._graph).issuperset({ - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foo', datatype=rdflib.XSD.string)), - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('bar', datatype=rdflib.XSD.string)), - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_comment.uri), rdflib.Literal('hello world', datatype=rdflib.XSD.string)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('foo', datatype=rdflib.XSD.string)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('bar', datatype=rdflib.XSD.string)), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_comment.uri), rdflib.Literal('hello world', datatype=rdflib.XSD.string)), - })) - - # set unique node - store.set(ent_type, ent_ids, p_author, {URI('http://example.com/me/user#1234')}) - self.assertIn( - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me/user#1234')), - set(store._graph)) - self.assertIn( - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me/user#1234')), - set(store._graph)) - # re-assigning the same node changes nothing - store.set(ent_type, ent_ids, p_author, {URI('http://example.com/me/user#1234')}) - self.assertIn( - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me/user#1234')), - set(store._graph)) - self.assertIn( - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me/user#1234')), - set(store._graph)) - # cannot set multiple unique nodes - self.assertRaises(ValueError, store.set, ent_type, ent_ids, p_author, {URI('http://example.com/me/user#1234'), URI('http://example.com/me/user#4321')}) - # unique nodes are overwritten by set - store.set(ent_type, ent_ids, p_author, {URI('http://example.com/me/user#4321')}) - self.assertIn( - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me/user#4321')), - set(store._graph)) - self.assertNotIn( - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me/user#1234')), - set(store._graph)) - self.assertIn( - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me/user#4321')), - set(store._graph)) - self.assertNotIn( - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_author.uri), rdflib.URIRef('http://example.com/me/user#1234')), - set(store._graph)) - - # set non-unique node - store.set(ent_type, ent_ids, p_tag, {'http://example.com/me/tag#foobar'}) - self.assertTrue(set(store._graph).issuperset({ - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#foobar')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#foobar')), - })) - # re-assigning the same node changes nothing - store.set(ent_type, ent_ids, p_tag, {'http://example.com/me/tag#foobar'}) - self.assertTrue(set(store._graph).issuperset({ - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#foobar')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#foobar')), - })) - # can set multiple non-unique literals at once - store.set(ent_type, ent_ids, p_tag, {'http://example.com/me/tag#1234', 'http://example.com/me/tag#4321'}) - self.assertTrue(set(store._graph).issuperset({ - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), - })) - # non-unique nodes are appended by set - store.set(ent_type, ent_ids, p_tag, {'http://example.com/me/tag#foo', 'http://example.com/me/tag#bar'}) - self.assertTrue(set(store._graph).issuperset({ - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#foo')), - (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#bar')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#1234')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#4321')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#foo')), - (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.URIRef(p_tag.uri), rdflib.URIRef('http://example.com/me/tag#bar')), - })) - - # nothing happens when no guids are given - plen = len(store._graph) - store.set(ent_type, set(), p_comment, {'xyz'}) - store.set(ent_type, set(), p_tag, {URI('http://example.com/me/tag#xyz')}) - self.assertEqual(plen, len(store._graph)) - - # guids must be instances of node_type - self.assertRaises(errors.InstanceError, store.set, ent_type, tag_ids, p_comment, {'xyz'}) - # inexistent guids - self.assertRaises(errors.InstanceError, store.set, ent_type, {URI('http://example.com/me/entity#foobar')}, p_comment, {'xyz'}) - - -## main ## - -if __name__ == '__main__': - unittest.main() - -## EOF ## -- cgit v1.2.3 From a0f2308adcb226d28de3355bc7115a6d9b669462 Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Mon, 19 Dec 2022 13:40:02 +0100 Subject: import fixes --- test/triple_store/test_base.py | 3 +++ 1 file changed, 3 insertions(+) (limited to 'test/triple_store') diff --git a/test/triple_store/test_base.py b/test/triple_store/test_base.py index a4b0559..a0c3260 100644 --- a/test/triple_store/test_base.py +++ b/test/triple_store/test_base.py @@ -35,6 +35,9 @@ class DummyBase(TripleStoreBase): def schema(self, schema): pass + def get(self, node_type, query): + pass + def exists(self, node_type, guids): pass -- cgit v1.2.3 From 73e39cb4967949025aefe874f401e27b0abb772c Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Thu, 22 Dec 2022 20:29:57 +0100 Subject: filter ast parser and get method in sparql store --- test/triple_store/sparql/test_parse_filter.py | 727 ++++++++++++++++++++++++++ test/triple_store/sparql/test_sparql.py | 90 +++- 2 files changed, 808 insertions(+), 9 deletions(-) create mode 100644 test/triple_store/sparql/test_parse_filter.py (limited to 'test/triple_store') diff --git a/test/triple_store/sparql/test_parse_filter.py b/test/triple_store/sparql/test_parse_filter.py new file mode 100644 index 0000000..bd19803 --- /dev/null +++ b/test/triple_store/sparql/test_parse_filter.py @@ -0,0 +1,727 @@ +""" + +Part of the bsfs test suite. +A copy of the license is provided with the project. +Author: Matthias Baumgartner, 2022 +""" +# imports +import rdflib +import unittest + +# bsie imports +from bsfs import schema as _schema +from bsfs.namespace import ns +from bsfs.query import ast +from bsfs.utils import errors + +# objects to test +from bsfs.triple_store.sparql.parse_filter import Filter + + +## code ## + +class TestParseFilter(unittest.TestCase): + def setUp(self): + # schema + self.schema = _schema.Schema.from_string(''' + prefix rdfs: + prefix xsd: + + prefix bsfs: + prefix bse: + + bsfs:Entity rdfs:subClassOf bsfs:Node . + bsfs:Image rdfs:subClassOf bsfs:Entity . + bsfs:Tag rdfs:subClassOf bsfs:Node . + + xsd:string rdfs:subClassOf bsfs:Literal . + xsd:integer rdfs:subClassOf bsfs:Literal . + bsfs:URI rdfs:subClassOf bsfs:Literal . + + bse:comment rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Node ; + rdfs:range xsd:string ; + bsfs:unique "false"^^xsd:boolean . + + bse:filesize rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range xsd:integer ; + bsfs:unique "true"^^xsd:boolean . + + bse:buddy rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range bsfs:Node ; + bsfs:unique "false"^^xsd:boolean . + + bse:tag rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range bsfs:Tag ; + bsfs:unique "false"^^xsd:boolean . + + bse:representative rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Tag ; + rdfs:range bsfs:Image ; + bsfs:unique "false"^^xsd:boolean . + + bse:iso rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Image ; + rdfs:range xsd:integer ; + bsfs:unique "true"^^xsd:boolean . + + ''') + + # parser instance + self.parser = Filter(self.schema) + + # graph to test queries + self.graph = rdflib.Graph() + # schema hierarchies + self.graph.add((rdflib.URIRef('http://bsfs.ai/schema/Entity'), rdflib.RDFS.subClassOf, rdflib.URIRef('http://bsfs.ai/schema/Node'))) + self.graph.add((rdflib.URIRef('http://bsfs.ai/schema/Image'), rdflib.RDFS.subClassOf, rdflib.URIRef('http://bsfs.ai/schema/Entity'))) + self.graph.add((rdflib.URIRef('http://bsfs.ai/schema/Tag'), rdflib.RDFS.subClassOf, rdflib.URIRef('http://bsfs.ai/schema/Node'))) + # entities + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Entity'))) + self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Entity'))) + # tags + self.graph.add((rdflib.URIRef('http://example.com/tag#1234'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Tag'))) + self.graph.add((rdflib.URIRef('http://example.com/tag#4321'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Tag'))) + # images + self.graph.add((rdflib.URIRef('http://example.com/image#1234'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Image'))) + self.graph.add((rdflib.URIRef('http://example.com/image#4321'), rdflib.RDF.type, rdflib.URIRef('http://bsfs.ai/schema/Image'))) + # node comments + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.bse.comment), rdflib.Literal('Me, Myself, and I', datatype=rdflib.XSD.string))) + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.bse.comment), rdflib.Literal('hello world', datatype=rdflib.XSD.string))) + self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef(ns.bse.comment), rdflib.Literal('hello world', datatype=rdflib.XSD.string))) + self.graph.add((rdflib.URIRef('http://example.com/image#1234'), rdflib.URIRef(ns.bse.comment), rdflib.Literal('Me, Myself, and I', datatype=rdflib.XSD.string))) + self.graph.add((rdflib.URIRef('http://example.com/tag#1234'), rdflib.URIRef(ns.bse.comment), rdflib.Literal('Me, Myself, and I', datatype=rdflib.XSD.string))) + self.graph.add((rdflib.URIRef('http://example.com/tag#4321'), rdflib.URIRef(ns.bse.comment), rdflib.Literal('4321', datatype=rdflib.XSD.string))) + # entity filesizes + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.bse.filesize), rdflib.Literal(1234, datatype=rdflib.XSD.integer))) + self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef(ns.bse.filesize), rdflib.Literal(4321, datatype=rdflib.XSD.integer))) + self.graph.add((rdflib.URIRef('http://example.com/image#1234'), rdflib.URIRef(ns.bse.filesize), rdflib.Literal(1234, datatype=rdflib.XSD.integer))) + self.graph.add((rdflib.URIRef('http://example.com/image#4321'), rdflib.URIRef(ns.bse.filesize), rdflib.Literal(4321, datatype=rdflib.XSD.integer))) + # entity tags + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.bse.tag), rdflib.URIRef('http://example.com/tag#1234'))) + self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef(ns.bse.tag), rdflib.URIRef('http://example.com/tag#4321'))) + self.graph.add((rdflib.URIRef('http://example.com/image#1234'), rdflib.URIRef(ns.bse.tag), rdflib.URIRef('http://example.com/tag#1234'))) + # tag representatives + self.graph.add((rdflib.URIRef('http://example.com/tag#1234'), rdflib.URIRef(ns.bse.representative), rdflib.URIRef('http://example.com/image#1234'))) + self.graph.add((rdflib.URIRef('http://example.com/tag#4321'), rdflib.URIRef(ns.bse.representative), rdflib.URIRef('http://example.com/image#4321'))) + # entity buddies + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.bse.buddy), rdflib.URIRef('http://example.com/image#1234'))) + self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef(ns.bse.buddy), rdflib.URIRef('http://example.com/image#4321'))) + # image iso + self.graph.add((rdflib.URIRef('http://example.com/image#1234'), rdflib.URIRef(ns.bse.iso), rdflib.Literal(1234, datatype=rdflib.XSD.integer))) + self.graph.add((rdflib.URIRef('http://example.com/image#4321'), rdflib.URIRef(ns.bse.iso), rdflib.Literal(4321, datatype=rdflib.XSD.integer))) + + + def test_routing(self): + self.assertRaises(errors.BackendError, self.parser._parse_filter_expression, '1234', None, '') + self.assertRaises(errors.BackendError, self.parser._parse_predicate_expression, '1234', None) + + def test_call(self): + # NOTE: The individual ast components are considered in the respective tests. Here, we test __call__ specifics. + + # __call__ requires a valid root type + self.assertRaises(errors.BackendError, self.parser, self.schema.literal(ns.bsfs.Literal), None) + self.assertRaises(errors.ConsistencyError, self.parser, self.schema.node(ns.bsfs.Node).get_child(ns.bsfs.Invalid), None) + # __call__ requires a parseable root + self.assertRaises(errors.BackendError, self.parser, self.schema.node(ns.bsfs.Entity), ast.filter.FilterExpression()) + # __call__ returns an executable query + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Or( + ast.filter.Is('http://example.com/entity#1234'), + ast.filter.Is('http://example.com/entity#5678'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, {'http://example.com/entity#1234'}) + # root is optional + q = self.parser(self.schema.node(ns.bsfs.Entity)) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/entity#4321', 'http://example.com/image#1234', 'http://example.com/image#4321'}) + q = self.parser(self.schema.node(ns.bsfs.Tag)) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/tag#1234', 'http://example.com/tag#4321'}) + + + def test_is(self): + # _is requires a node + self.assertRaises(errors.BackendError, self.parser._is, self.schema.literal(ns.bsfs.Literal), ast.filter.Is('http://example.com/entity#1234'), '?ent') + # a single Is statement + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Is('http://example.com/entity#1234')) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234'}) + # an aggregate of Is statements + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Or( + ast.filter.Is('http://example.com/entity#1234'), + ast.filter.Is('http://example.com/entity#4321'), + )) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/entity#4321'}) + # combined with other filters + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.And( + ast.filter.Or( + ast.filter.Is('http://example.com/entity#1234'), + ast.filter.Is('http://example.com/entity#4321'), + ), + ast.filter.Any(ns.bse.comment, + ast.filter.Equals('Me, Myself, and I') + ), + )) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234'}) + # as argument of Any/All + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Any(ns.bse.tag, ast.filter.Is('http://example.com/tag#1234'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/image#1234'}) + + + def test_equals(self): + # _equals requires a literal + self.assertRaises(errors.BackendError, self.parser._equals, self.schema.node(ns.bsfs.Entity), ast.filter.Equals('hello world'), '?ent') + # a single Equals statement + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.Equals('hello world'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/entity#4321'}) + # a single Equals statement that includes subtypes + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/image#1234'}) + # an Equals statement on an integer + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.filesize, ast.filter.Equals(4321))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#4321', 'http://example.com/image#4321'}) + + + def test_substring(self): + # _substring requires a literal + self.assertRaises(errors.BackendError, self.parser._substring, self.schema.node(ns.bsfs.Entity), ast.filter.Substring('hello world'), '?ent') + # a single Substring statement + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.Substring('hello'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/entity#4321'}) + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.Substring('lo wo'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/entity#4321'}) + # a single Substring statement that includes subtypes + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.Substring('Myself'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/image#1234'}) + # an Substring statement on an integer + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.filesize, ast.filter.Substring('32'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#4321', 'http://example.com/image#4321'}) + + + def test_starts_with(self): + # _starts_with requires a literal + self.assertRaises(errors.BackendError, self.parser._starts_with, self.schema.node(ns.bsfs.Entity), ast.filter.StartsWith('hello world'), '?ent') + # a single StartsWith statement + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.StartsWith('hello'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/entity#4321'}) + # a single StartsWith statement that includes subtypes + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.StartsWith('Me, Mys'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/image#1234'}) + # an StartsWith statement on an integer + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.filesize, ast.filter.StartsWith(432))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#4321', 'http://example.com/image#4321'}) + + + def test_ends_with(self): + # _ends_with requires a literal + self.assertRaises(errors.BackendError, self.parser._ends_with, self.schema.node(ns.bsfs.Entity), ast.filter.EndsWith('hello world'), '?ent') + # a single EndsWith statement + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.EndsWith('orld'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/entity#4321'}) + # a single EndsWith statement that includes subtypes + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.EndsWith('and I'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/image#1234'}) + # an EndsWith statement on an integer + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.filesize, ast.filter.EndsWith(321))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#4321', 'http://example.com/image#4321'}) + + + def test_less_than(self): + # _less_than requires a literal + self.assertRaises(errors.BackendError, self.parser._less_than, self.schema.node(ns.bsfs.Entity), ast.filter.LessThan(2000), '?ent') + # a single LessThan statement + q = self.parser(self.schema.node(ns.bsfs.Image), ast.filter.Any(ns.bse.iso, ast.filter.LessThan(2000))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/image#1234'}) + # _less_than respects boundary + q = self.parser(self.schema.node(ns.bsfs.Image), ast.filter.Any(ns.bse.iso, ast.filter.LessThan(1234, strict=True))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set()) + q = self.parser(self.schema.node(ns.bsfs.Image), ast.filter.Any(ns.bse.iso, ast.filter.LessThan(1234, strict=False))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/image#1234'}) + # a single LessThan statement that includes subtypes + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.filesize, ast.filter.LessThan(2000))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/image#1234'}) + # an LessThan statement on a string + # always negative; note that http://example.com/tag#4321 is also not returned although its comment is a pure number + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.LessThan(10_000))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set()) + + + def test_greater_than(self): + # _greater_than requires a literal + self.assertRaises(errors.BackendError, self.parser._greater_than, self.schema.node(ns.bsfs.Entity), ast.filter.GreaterThan(2000), '?ent') + # a single GreaterThan statement + q = self.parser(self.schema.node(ns.bsfs.Image), ast.filter.Any(ns.bse.iso, ast.filter.GreaterThan(2000))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/image#4321'}) + # _greater_than respects boundary + q = self.parser(self.schema.node(ns.bsfs.Image), ast.filter.Any(ns.bse.iso, ast.filter.GreaterThan(4321, strict=True))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set()) + q = self.parser(self.schema.node(ns.bsfs.Image), ast.filter.Any(ns.bse.iso, ast.filter.GreaterThan(4321, strict=False))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/image#4321'}) + # a single GreaterThan statement that includes subtypes + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.filesize, ast.filter.GreaterThan(2000))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#4321', 'http://example.com/image#4321'}) + # an GreaterThan statement on a string + # always positive + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Any(ns.bse.comment, ast.filter.GreaterThan(0))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/entity#4321', 'http://example.com/image#1234'}) + + + def test_and(self): + # And childs have to match the node type + self.assertRaises(errors.BackendError, self.parser, + self.schema.node(ns.bsfs.Entity), + ast.filter.And( + ast.filter.StartsWith('hello'), + ast.filter.EndsWith('world'), + )) + # no child produces an empty query + self.assertEqual(self.parser._and( + self.schema.node(ns.bsfs.Entity), + ast.filter.And(), '?ent'), '') + # And can mix different conditions + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.And( + ast.filter.Is('http://example.com/entity#1234'), + ast.filter.Any(ns.bse.filesize, ast.filter.Equals(1234)), + ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')), + )) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234'}) + # all conditions have to match + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.And( + ast.filter.Is('http://example.com/entity#4321'), + ast.filter.Any(ns.bse.filesize, ast.filter.Equals(1234)), + ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')), + )) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set()) + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.And( + ast.filter.Is('http://example.com/entity#1234'), + ast.filter.Any(ns.bse.filesize, ast.filter.Equals(4321)), + ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')), + )) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set()) + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.And( + ast.filter.Is('http://example.com/entity#1234'), + ast.filter.Any(ns.bse.filesize, ast.filter.Equals(1234)), + ast.filter.Any(ns.bse.comment, ast.filter.Equals('foobar')), + )) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set()) + # And can be nested + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.And( + ast.filter.Is('http://example.com/entity#1234'), + ast.filter.And( + ast.filter.Any(ns.bse.filesize, ast.filter.Equals(1234)), + ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')), + ), + )) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234'}) + + + def test_or(self): + # Or childs have to match the node type + self.assertRaises(errors.BackendError, self.parser, + self.schema.node(ns.bsfs.Entity), + ast.filter.Or( + ast.filter.StartsWith('hello'), + ast.filter.EndsWith('world'), + )) + # no child produces an empty query + self.assertEqual(self.parser._and( + self.schema.node(ns.bsfs.Entity), + ast.filter.Or(), '?ent'), '') + # Or can mix different conditions + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Or( + ast.filter.Is('http://example.com/entity#1234'), + ast.filter.Any(ns.bse.filesize, ast.filter.Equals(4321)), + ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')), + )) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/image#1234', 'http://example.com/entity#4321', 'http://example.com/image#4321'}) + # at least one condition has to match + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Or( + ast.filter.Is('http://example.com/entity#5678'), + ast.filter.Any(ns.bse.filesize, ast.filter.Equals(8765)), + ast.filter.Any(ns.bse.comment, ast.filter.Equals('foobar')), + )) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, set()) + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Or( + ast.filter.Is('http://example.com/entity#1234'), + ast.filter.Any(ns.bse.filesize, ast.filter.Equals(8765)), + ast.filter.Any(ns.bse.comment, ast.filter.Equals('foobar')), + )) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234'}) + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Or( + ast.filter.Is('http://example.com/entity#5678'), + ast.filter.Any(ns.bse.filesize, ast.filter.Equals(4321)), + ast.filter.Any(ns.bse.comment, ast.filter.Equals('foobar')), + )) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#4321', 'http://example.com/image#4321'}) + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Or( + ast.filter.Is('http://example.com/entity#5678'), + ast.filter.Any(ns.bse.filesize, ast.filter.Equals(8765)), + ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')), + )) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/image#1234'}) + # Or can be nested + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Or( + ast.filter.Is('http://example.com/entity#1234'), + ast.filter.Or( + ast.filter.Any(ns.bse.filesize, ast.filter.Equals(4321)), + ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')), + ), + )) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/image#1234', 'http://example.com/entity#4321', 'http://example.com/image#4321'}) + + + + def test_any(self): + # _any requires a node + self.assertRaises(errors.BackendError, self.parser._any, + self.schema.literal(ns.bsfs.Literal), + ast.filter.Any(ns.bse.filesize, ast.filter.Equals(1234)), '?ent') + # node type must match predicate's domain + self.assertRaises(errors.ConsistencyError, self.parser._any, + self.schema.node(ns.bsfs.Tag), + ast.filter.Any(ns.bse.filesize, ast.filter.Equals(1234)), '?ent') + # predicate must be valid + self.assertRaises(errors.ConsistencyError, self.parser._any, + self.schema.node(ns.bsfs.Entity), + ast.filter.Any(ns.bse.invalid, ast.filter.Equals(1234)), '?ent') + # _any returns a valid query + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Any(ns.bse.filesize, ast.filter.Equals(1234))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/image#1234'}) + # _any can be nested + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Any(ns.bse.tag, + ast.filter.Any(ns.bse.representative, + ast.filter.Is('http://example.com/image#1234')))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/image#1234'}) + + + def test_all(self): + # All requires a Node + self.assertRaises(errors.BackendError, self.parser._all, self.schema.literal(ns.bsfs.Literal), None, '') + # All Nodes + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.All(ns.bse.tag, ast.filter.Is('http://example.com/tag#1234'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/image#1234'}) + # All values + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.All(ns.bse.comment, ast.filter.Equals('hello world'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#4321'}) + # All on value within Or branch + # entity#1234 is selected because all of its comments are in ("hello world", "Me, Myself, and I") + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.All(ns.bse.comment, ast.filter.Or( + ast.filter.Equals('hello world'), + ast.filter.Equals('Me, Myself, and I')))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/entity#4321', 'http://example.com/image#1234'}) + # All requires at least one predicate/value + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.All(ns.bse.comment, ast.filter.Equals('Me, Myself, and I'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/image#1234'}) + # All within a statement + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.And( + ast.filter.All(ns.bse.tag, ast.filter.Is('http://example.com/tag#1234')), # entity#1234, image#1234 + ast.filter.All(ns.bse.comment, ast.filter.Or( # entity#1234, entity#4321, image#1234 + ast.filter.Equals('hello world'), + ast.filter.Equals('Me, Myself, and I'), + )) + ) + ) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/image#1234'}) + # All with reversed Predicate + q = self.parser(self.schema.node(ns.bsfs.Tag), + ast.filter.All(ast.filter.Predicate(ns.bse.tag, reverse=True), ast.filter.Is('http://example.com/entity#4321'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/tag#4321'}) + # All with multiple predicates + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.All(ast.filter.OneOf(ns.bse.tag, ns.bse.buddy), # entity#1234 (tag:tag#1234), entity#1234 (buddy:image#1234), image#1234(tag:tag#1234) + ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')))) # entity#1234, image#1234, tag#1234 + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/image#1234'}) + + + + def test_not(self): + # Not applies on conditions + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Not(ast.filter.Is('http://example.com/entity#1234'))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/image#1234', 'http://example.com/entity#4321', 'http://example.com/image#4321'}) + # Not applies on conditions within branches + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Any(ns.bse.comment, ast.filter.Not(ast.filter.Equals('Me, Myself, and I')))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/entity#4321'}) + # Not applies on branches + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Not(ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#4321', 'http://example.com/image#4321'}) + # Double Not cancel each other + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Not(ast.filter.Not(ast.filter.Is('http://example.com/entity#1234')))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234'}) + # Not works within aggregation (and) + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.And( + ast.filter.Not(ast.filter.Is('http://example.com/entity#1234')), + ast.filter.Any(ns.bse.comment, ast.filter.Equals('hello world')), + )) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#4321'}) + # Not works within aggregation (or) + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Or( + ast.filter.Not(ast.filter.Is('http://example.com/entity#1234')), + ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')), + )) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/entity#4321', 'http://example.com/image#1234', 'http://example.com/image#4321'}) + # Not works outside aggregation (and) + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Not( + ast.filter.And( + ast.filter.Is('http://example.com/entity#1234'), + ast.filter.Any(ns.bse.comment, ast.filter.Equals('hello world')), + ))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#4321', 'http://example.com/image#1234', 'http://example.com/image#4321'}) + # Not works outside aggregation (or) + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Not( + ast.filter.Or( + ast.filter.Is('http://example.com/entity#4321'), + ast.filter.Any(ns.bse.comment, ast.filter.Equals('Me, Myself, and I')), + ))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/image#4321'}) + # Not mixed with branch, aggregation, id, and value + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.And( + ast.filter.Not( # image#1234, image#4321 + ast.filter.Or( # entity#4321, entity#1234 + ast.filter.Is('http://example.com/entity#4321'), + ast.filter.Any(ns.bse.comment, ast.filter.Equals('hello world')), + ) + ), + ast.filter.Any(ns.bse.comment, ast.filter.Not(ast.filter.Equals('foobar'))), # entity#1234, entity#4321, image#1234 + )) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/image#1234'}) + + + def test_has(self): + # Has requires Node + self.assertRaises(errors.BackendError, self.parser._has, self.schema.literal(ns.bsfs.Literal), None, '') + # Has with GreaterThan constraint + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Has(ns.bse.comment, ast.filter.GreaterThan(0))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/entity#4321', 'http://example.com/image#1234'}) + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Has(ns.bse.comment, ast.filter.GreaterThan(1))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234'}) + # Has with Equals constraint + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Has(ns.bse.comment, 1)) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#4321', 'http://example.com/image#1234'}) + # Has with LessThan constraint + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Has(ns.bse.comment, ast.filter.LessThan(2))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#4321', 'http://example.com/image#1234', 'http://example.com/image#4321'}) + # Has with multiple constraints + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.bse.comment), rdflib.Literal('extra1', datatype=rdflib.XSD.string))) + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.bse.comment), rdflib.Literal('extra2', datatype=rdflib.XSD.string))) + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.bse.comment), rdflib.Literal('extra3', datatype=rdflib.XSD.string))) + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.bse.comment), rdflib.Literal('extra4', datatype=rdflib.XSD.string))) + self.graph.add((rdflib.URIRef('http://example.com/entity#1234'), rdflib.URIRef(ns.bse.comment), rdflib.Literal('extra5', datatype=rdflib.XSD.string))) + self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef(ns.bse.comment), rdflib.Literal('extra1', datatype=rdflib.XSD.string))) + self.graph.add((rdflib.URIRef('http://example.com/entity#4321'), rdflib.URIRef(ns.bse.comment), rdflib.Literal('extra2', datatype=rdflib.XSD.string))) + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Has(ns.bse.comment, + ast.filter.And(ast.filter.GreaterThan(1), ast.filter.LessThan(5)))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#4321'}) + # Has with OneOf predicate + q = self.parser(self.schema.node(ns.bsfs.Entity), ast.filter.Has(ast.filter.OneOf(ns.bse.tag, ns.bse.buddy), + ast.filter.GreaterThan(1))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/entity#4321'}) + # Has with reversed predicate + q = self.parser(self.schema.node(ns.bsfs.Tag), ast.filter.Has(ast.filter.Predicate(ns.bse.tag, reverse=True), + ast.filter.GreaterThan(1))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/tag#1234'}) + + + def test_one_of(self): + # _one_of expects a node + self.assertRaises(errors.BackendError, self.parser._one_of, + self.schema.literal(ns.bsfs.Literal), + ast.filter.OneOf(ast.filter.Predicate(ns.bse.filesize))) + # invalid predicate for node type raises an error + self.assertRaises(errors.ConsistencyError, self.parser._one_of, + self.schema.node(ns.bsfs.Node), + ast.filter.OneOf(ast.filter.Predicate(ns.bse.filesize))) + self.assertRaises(errors.ConsistencyError, self.parser, + self.schema.node(ns.bsfs.Tag), + ast.filter.Any(ast.filter.OneOf(ast.filter.Predicate(ns.bse.filesize)), ast.filter.Equals(1234))) + self.assertRaises(errors.BackendError, self.parser._one_of, + self.schema.node(ns.bsfs.Node), + ast.filter.OneOf(ast.filter.Predicate(ns.bsfs.Predicate))) + # invalid predicate combinations raise an error + self.assertRaises(errors.ConsistencyError, self.parser._one_of, + self.schema.node(ns.bsfs.Node), + ast.filter.OneOf( + ast.filter.Predicate(ns.bse.filesize), + ast.filter.Predicate(ns.bse.representative))) + # _one_of returns the URI and range + q = self.parser._one_of(self.schema.node(ns.bsfs.Image), + ast.filter.OneOf( + ast.filter.Predicate(ns.bse.iso), + ast.filter.Predicate(ns.bse.filesize))) + self.assertTrue(q[0] == f'<{ns.bse.iso}>|<{ns.bse.filesize}>' or q[0] == f'<{ns.bse.filesize}>|<{ns.bse.iso}>') + self.assertEqual(q[1], self.schema.literal(ns.xsd.integer)) + # OneOf can be nested + q = self.parser._one_of(self.schema.node(ns.bsfs.Image), + ast.filter.OneOf( + ast.filter.Predicate(ns.bse.iso), + ast.filter.OneOf( + ast.filter.Predicate(ns.bse.filesize)))) + self.assertTrue(q[0] == f'<{ns.bse.iso}>|<{ns.bse.filesize}>' or q[0] == f'<{ns.bse.filesize}>|<{ns.bse.iso}>') + self.assertEqual(q[1], self.schema.literal(ns.xsd.integer)) + # _one_of returns the most generic range + q = self.parser._one_of(self.schema.node(ns.bsfs.Entity), + ast.filter.OneOf( + ast.filter.Predicate(ns.bse.tag), + ast.filter.Predicate(ns.bse.buddy))) + self.assertTrue(q[0] == f'<{ns.bse.tag}>|<{ns.bse.buddy}>' or q[0] == f'<{ns.bse.buddy}>|<{ns.bse.tag}>') + self.assertEqual(q[1], self.schema.node(ns.bsfs.Node)) + # domains must match the given type + self.assertRaises(errors.ConsistencyError, self.parser, + self.schema.node(ns.bsfs.Entity), + ast.filter.Any(ast.filter.OneOf(ns.bse.tag, ns.bse.buddy), + ast.filter.Any(ast.filter.OneOf(ns.bse.filesize), + ast.filter.Equals(1234)))) + # ranges must have the same type (Node/Literal) + self.assertRaises(errors.ConsistencyError, self.parser, + self.schema.node(ns.bsfs.Entity), + ast.filter.Any(ast.filter.OneOf(ns.bse.tag, ns.bse.filesize), + ast.filter.Equals(1234))) + # ranges must be related + self.assertRaises(errors.ConsistencyError, self.parser, + self.schema.node(ns.bsfs.Entity), + ast.filter.Any(ast.filter.OneOf(ns.bse.comment, ns.bse.filesize), + ast.filter.Equals(1234))) + # integration: _one_of returns a valid sparql query + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Any(ast.filter.OneOf(ns.bse.tag, ns.bse.buddy), + ast.filter.Any(ast.filter.OneOf(ns.bse.comment), + ast.filter.Equals('Me, Myself, and I')))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/image#1234'}) + + + def test_predicate(self): + # predicate cannot be the root predicate (ns.bsfs.Predicate) + self.assertRaises(errors.BackendError, self.parser._predicate, self.schema.node(ns.bsfs.Node), ast.filter.Predicate(ns.bsfs.Predicate)) + # _predicate expects a node + self.assertRaises(errors.BackendError, self.parser._predicate, + self.schema.literal(ns.bsfs.Literal), + ast.filter.Predicate(ns.bse.filesize)) + # invalid predicate for node type raises an error + self.assertRaises(errors.ConsistencyError, self.parser._predicate, + self.schema.node(ns.bsfs.Node), + ast.filter.Predicate(ns.bse.filesize)) + self.assertRaises(errors.ConsistencyError, self.parser, + self.schema.node(ns.bsfs.Tag), + ast.filter.Any(ast.filter.Predicate(ns.bse.filesize), ast.filter.Equals(1234))) + # _predicate returns the URI and range + self.assertEqual(self.parser._predicate(self.schema.node(ns.bsfs.Entity), ast.filter.Predicate(ns.bse.filesize)), + (f'<{ns.bse.filesize}>', self.schema.literal(ns.xsd.integer))) + self.assertEqual(self.parser._predicate(self.schema.node(ns.bsfs.Entity), ast.filter.Predicate(ns.bse.tag)), + (f'<{ns.bse.tag}>', self.schema.node(ns.bsfs.Tag))) + # _predicate respects reverse flag + self.assertEqual(self.parser._predicate(self.schema.node(ns.bsfs.Tag), ast.filter.Predicate(ns.bse.tag, reverse=True)), + ('^<' + ns.bse.tag + '>', self.schema.node(ns.bsfs.Entity))) + # integration: _predicate returns a valid sparql query + q = self.parser(self.schema.node(ns.bsfs.Entity), + ast.filter.Any(ns.bse.tag, + ast.filter.Any(ns.bse.representative, + ast.filter.Any(ns.bse.filesize, + ast.filter.Equals(1234))))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/entity#1234', 'http://example.com/image#1234'}) + q = self.parser(self.schema.node(ns.bsfs.Tag), + ast.filter.Any(ast.filter.Predicate(ns.bse.tag, reverse=True), + ast.filter.Any(ns.bse.filesize, + ast.filter.LessThan(2000)))) + self.assertSetEqual({str(guid) for guid, in self.graph.query(q)}, + {'http://example.com/tag#1234'}) + + +## main ## + +if __name__ == '__main__': + unittest.main() + +## EOF ## diff --git a/test/triple_store/sparql/test_sparql.py b/test/triple_store/sparql/test_sparql.py index 0bf664a..3d81de1 100644 --- a/test/triple_store/sparql/test_sparql.py +++ b/test/triple_store/sparql/test_sparql.py @@ -11,6 +11,7 @@ import unittest # bsie imports from bsfs import schema as _schema from bsfs.namespace import ns +from bsfs.query import ast from bsfs.utils import errors, URI # objects to test @@ -59,6 +60,18 @@ class TestSparqlStore(unittest.TestCase): bsfs:unique "true"^^xsd:boolean . ''') + self.schema_triples = { + # schema hierarchy + (rdflib.URIRef(ns.bsfs.Entity), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), + (rdflib.URIRef(ns.bsfs.Tag), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), + (rdflib.URIRef(ns.bsfs.User), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), + (rdflib.URIRef(ns.xsd.string), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), + (rdflib.URIRef(ns.xsd.integer), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), + (rdflib.URIRef(ns.bse.comment), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef(ns.bse.filesize), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef(ns.bse.tag), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef(ns.bse.author), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + } def test_essentials(self): store = SparqlStore.Open() @@ -155,7 +168,7 @@ class TestSparqlStore(unittest.TestCase): store.set(curr.node(ns.bsfs.Entity), ent_ids, p_author, {URI('http://example.com/me')}) # check instances - instances = { + instances = self.schema_triples | { # node instances (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), @@ -228,7 +241,16 @@ class TestSparqlStore(unittest.TestCase): store.schema = curr self.assertEqual(store.schema, curr) # instances have not changed - self.assertSetEqual(set(store._graph), instances) + self.assertSetEqual(set(store._graph), instances | { + # schema hierarchy + (rdflib.URIRef(ns.bsfs.Collection), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), + (rdflib.URIRef(ns.xsd.boolean), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), + (rdflib.URIRef(ns.bse.shared), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef(ns.bse.partOf), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef('http://bsfs.ai/schema/Tag#usedIn'), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef('http://bsfs.ai/schema/Collection#tag'), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef('http://bsfs.ai/schema/Tag#principal'), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + }) # add some instances of the new classes p_partOf = curr.predicate(ns.bse.partOf) p_shared = curr.predicate(ns.bse.shared) @@ -248,6 +270,14 @@ class TestSparqlStore(unittest.TestCase): {URI('http://example.com/me/collection#1234')}) # new instances are now in the graph self.assertSetEqual(set(store._graph), instances | { + # same old schema hierarchy + (rdflib.URIRef(ns.bsfs.Collection), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), + (rdflib.URIRef(ns.xsd.boolean), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), + (rdflib.URIRef(ns.bse.shared), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef(ns.bse.partOf), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef('http://bsfs.ai/schema/Tag#usedIn'), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef('http://bsfs.ai/schema/Collection#tag'), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef('http://bsfs.ai/schema/Tag#principal'), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), # collections (rdflib.URIRef('http://example.com/me/collection#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Collection)), (rdflib.URIRef('http://example.com/me/collection#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Collection)), @@ -316,6 +346,16 @@ class TestSparqlStore(unittest.TestCase): self.assertEqual(store.schema, curr) # instances of old classes were removed self.assertSetEqual(set(store._graph), { + # schema hierarchy + (rdflib.URIRef(ns.bsfs.Entity), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), + (rdflib.URIRef(ns.bsfs.Tag), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), + (rdflib.URIRef(ns.bsfs.User), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Node)), + (rdflib.URIRef(ns.xsd.boolean), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), + (rdflib.URIRef(ns.xsd.integer), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Literal)), + (rdflib.URIRef(ns.bse.shared), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef(ns.bse.tag), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef(ns.bse.filesize), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), + (rdflib.URIRef('http://bsfs.ai/schema/Tag#principal'), rdflib.RDFS.subClassOf, rdflib.URIRef(ns.bsfs.Predicate)), # node instances (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), @@ -390,7 +430,7 @@ class TestSparqlStore(unittest.TestCase): ent_ids = {URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')} tag_ids = {URI('http://example.com/me/tag#1234'), URI('http://example.com/me/tag#4321')} # target instances - instances = { + instances = self.schema_triples | { # node instances (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), @@ -416,7 +456,7 @@ class TestSparqlStore(unittest.TestCase): # rollback undoes previous changes store.rollback() - self.assertSetEqual(set(store._graph), set()) + self.assertSetEqual(set(store._graph), self.schema_triples) # add some data once more store.create(ent_type, ent_ids) @@ -456,7 +496,38 @@ class TestSparqlStore(unittest.TestCase): }) def test_get(self): - raise NotImplementedError() + # store setup + store = SparqlStore.Open() + store.schema = self.schema + ent_type = self.schema.node(ns.bsfs.Entity) + tag_type = self.schema.node(ns.bsfs.Tag) + ent_ids = {URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')} + tag_ids = {URI('http://example.com/me/tag#1234'), URI('http://example.com/me/tag#4321')} + store.create(ent_type, ent_ids) + store.create(tag_type, tag_ids) + store.set(ent_type, ent_ids, self.schema.predicate(ns.bse.tag), tag_ids) + store.set(ent_type, {URI('http://example.com/me/entity#1234')}, self.schema.predicate(ns.bse.filesize), {1234}) + store.set(ent_type, {URI('http://example.com/me/entity#4321')}, self.schema.predicate(ns.bse.filesize), {4321}) + # node_type must be in the schema + self.assertRaises(errors.ConsistencyError, set, store.get(self.schema.node(ns.bsfs.Node).get_child(ns.bsfs.Invalid), ast.filter.IsIn(ent_ids))) + # query must be a filter expression + class Foo(): pass + self.assertRaises(TypeError, set, store.get(ent_type, 1234)) + self.assertRaises(TypeError, set, store.get(ent_type, '1234')) + self.assertRaises(TypeError, set, store.get(ent_type, Foo())) + # run some queries + self.assertSetEqual(set(store.get(tag_type, ast.filter.IsIn(tag_ids))), tag_ids) + self.assertSetEqual(set(store.get(ent_type, ast.filter.Any(ns.bse.tag, ast.filter.IsIn(tag_ids)))), ent_ids) + self.assertSetEqual(set(store.get(ent_type, ast.filter.IsIn(tag_ids))), set()) + # invalid queries raise error + self.assertRaises(errors.ConsistencyError, set, store.get(tag_type, ast.filter.Any(ns.bse.filesize, ast.filter.Equals(1234)))) + self.assertRaises(errors.BackendError, set, store.get(ent_type, ast.filter.Equals('http://example.com/me/entity#1234'))) + # run some more complex query + q = store.get(tag_type, ast.filter.Any(ast.filter.Predicate(ns.bse.tag, reverse=True), + ast.filter.Any(ns.bse.filesize, + ast.filter.LessThan(2000)))) + self.assertSetEqual(set(q), tag_ids) + def test_exists(self): # store setup @@ -509,14 +580,15 @@ class TestSparqlStore(unittest.TestCase): # can create some nodes ent_type = store.schema.node(ns.bsfs.Entity) store.create(ent_type, {URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')}) - self.assertSetEqual(set(store._graph), { + self.assertSetEqual(set(store._graph), self.schema_triples | { + # instances (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), }) # existing nodes are skipped store.create(ent_type, {URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#5678')}) - self.assertSetEqual(set(store._graph), { + self.assertSetEqual(set(store._graph), self.schema_triples | { # previous triples (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), @@ -527,7 +599,7 @@ class TestSparqlStore(unittest.TestCase): # can create nodes of a different type tag_type = store.schema.node(ns.bsfs.Tag) store.create(tag_type, {URI('http://example.com/me/tag#1234'), URI('http://example.com/me/tag#4321')}) - self.assertSetEqual(set(store._graph), { + self.assertSetEqual(set(store._graph), self.schema_triples | { # previous triples (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), @@ -540,7 +612,7 @@ class TestSparqlStore(unittest.TestCase): # creation does not change types of existing nodes tag_type = store.schema.node(ns.bsfs.Tag) store.create(tag_type, {URI('http://example.com/me/entity#1234'), URI('http://example.com/me/entity#4321')}) - self.assertSetEqual(set(store._graph), { + self.assertSetEqual(set(store._graph), self.schema_triples | { # previous triples (rdflib.URIRef('http://example.com/me/entity#1234'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), (rdflib.URIRef('http://example.com/me/entity#4321'), rdflib.RDF.type, rdflib.URIRef(ns.bsfs.Entity)), -- cgit v1.2.3