diff --git a/core/sail/elasticsearch-store/pom.xml b/core/sail/elasticsearch-store/pom.xml index 57aeb65c505..667b043b147 100644 --- a/core/sail/elasticsearch-store/pom.xml +++ b/core/sail/elasticsearch-store/pom.xml @@ -193,7 +193,7 @@ 9200 false - ${java.sec.mgr} + ${java.sec.mgr} -Xmx512m 1 diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/AbstractComplianceTest.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/AbstractComplianceTest.java index 7f9fa95172e..10255c775b7 100644 --- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/AbstractComplianceTest.java +++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/AbstractComplianceTest.java @@ -15,6 +15,7 @@ import java.io.InputStream; import java.io.Reader; import java.net.URL; +import java.util.function.Supplier; import org.eclipse.rdf4j.common.iteration.CloseableIteration; import org.eclipse.rdf4j.common.transaction.IsolationLevel; @@ -60,36 +61,36 @@ public abstract class AbstractComplianceTest { protected DynamicTest makeTest(String name, Executable x) { return DynamicTest.dynamicTest(name, () -> { - setUp(); x.execute(); - tearDown(); }); } - protected final Logger logger = LoggerFactory.getLogger(this.getClass()); - - protected final Repository repo; - protected RepositoryConnection conn; - - public AbstractComplianceTest(Repository repo) { - this.repo = repo; + protected Repository openRepository() { + Repository r = repo.get(); + r.init(); + return r; } - public void setUp() { - repo.init(); - conn = new RepositoryConnectionWrapper(repo.getConnection()); + protected RepositoryConnection openConnection(Repository r) { + return new RepositoryConnectionWrapper(r.getConnection()); } - public void tearDown() { - try { + protected void closeRepository(Repository r) { + try (RepositoryConnection conn = r.getConnection()) { conn.clear(); - conn.close(); - } finally { - repo.shutDown(); } + r.shutDown(); + } + + protected final Logger logger = LoggerFactory.getLogger(this.getClass()); + + protected final Supplier repo; + + public AbstractComplianceTest(Supplier repo) { + this.repo = repo; } - protected void loadTestData(String dataFile, Resource... contexts) + protected void loadTestData(String dataFile, RepositoryConnection conn, Resource... contexts) throws RDFParseException, RepositoryException, IOException { logger.debug("loading dataset {}", dataFile); try (InputStream dataset = this.getClass().getResourceAsStream(dataFile)) { diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/RepositorySPARQLComplianceTestSuite.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/RepositorySPARQLComplianceTestSuite.java index 50539b84b9c..d362b020ef6 100644 --- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/RepositorySPARQLComplianceTestSuite.java +++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/RepositorySPARQLComplianceTestSuite.java @@ -10,6 +10,8 @@ *******************************************************************************/ package org.eclipse.rdf4j.testsuite.sparql; +import static org.junit.jupiter.api.Assertions.fail; + import java.io.File; import java.io.IOException; import java.util.stream.Stream; @@ -56,92 +58,92 @@ public abstract class RepositorySPARQLComplianceTestSuite { @TestFactory Stream aggregate() throws RDF4JException, IOException { - return new AggregateTest(getEmptyInitializedRepository()).tests(); + return new AggregateTest(this::getEmptyInitializedRepository).tests(); } @TestFactory Stream arbitraryLengthPath() throws RDF4JException, IOException { - return new ArbitraryLengthPathTest(getEmptyInitializedRepository()).tests(); + return new ArbitraryLengthPathTest(this::getEmptyInitializedRepository).tests(); } @TestFactory Stream basic() throws RDF4JException, IOException { - return new BasicTest(getEmptyInitializedRepository()).tests(); + return new BasicTest(this::getEmptyInitializedRepository).tests(); } @TestFactory Stream bind() throws RDF4JException, IOException { - return new BindTest(getEmptyInitializedRepository()).tests(); + return new BindTest(this::getEmptyInitializedRepository).tests(); } @TestFactory Stream builtinFunction() throws RDF4JException, IOException { - return new BuiltinFunctionTest(getEmptyInitializedRepository()).tests(); + return new BuiltinFunctionTest(this::getEmptyInitializedRepository).tests(); } @TestFactory Stream construct() throws RDF4JException, IOException { - return new ConstructTest(getEmptyInitializedRepository()).tests(); + return new ConstructTest(this::getEmptyInitializedRepository).tests(); } @TestFactory Stream defaultGraph() throws RDF4JException, IOException { - return new DefaultGraphTest(getEmptyInitializedRepository()).tests(); + return new DefaultGraphTest(this::getEmptyInitializedRepository).tests(); } @TestFactory Stream describe() throws RDF4JException, IOException { - return new DescribeTest(getEmptyInitializedRepository()).tests(); + return new DescribeTest(this::getEmptyInitializedRepository).tests(); } @TestFactory Stream groupBy() throws RDF4JException, IOException { - return new GroupByTest(getEmptyInitializedRepository()).tests(); + return new GroupByTest(this::getEmptyInitializedRepository).tests(); } @TestFactory Stream in() throws RDF4JException, IOException { - return new InTest(getEmptyInitializedRepository()).tests(); + return new InTest(this::getEmptyInitializedRepository).tests(); } @TestFactory Stream optional() throws RDF4JException, IOException { - return new OptionalTest(getEmptyInitializedRepository()).tests(); + return new OptionalTest(this::getEmptyInitializedRepository).tests(); } @TestFactory Stream propertyPath() throws RDF4JException, IOException { - return new PropertyPathTest(getEmptyInitializedRepository()).tests(); + return new PropertyPathTest(this::getEmptyInitializedRepository).tests(); } @TestFactory Stream subselect() throws RDF4JException, IOException { - return new SubselectTest(getEmptyInitializedRepository()).tests(); + return new SubselectTest(this::getEmptyInitializedRepository).tests(); } @TestFactory Stream union() throws RDF4JException, IOException { - return new UnionTest(getEmptyInitializedRepository()).tests(); + return new UnionTest(this::getEmptyInitializedRepository).tests(); } @TestFactory Stream values() throws RDF4JException, IOException { - return new ValuesTest(getEmptyInitializedRepository()).tests(); + return new ValuesTest(this::getEmptyInitializedRepository).tests(); } @TestFactory Stream orderBy() throws RDF4JException, IOException { - return new OrderByTest(getEmptyInitializedRepository()).tests(); + return new OrderByTest(this::getEmptyInitializedRepository).tests(); } @TestFactory Stream exists() throws RDF4JException, IOException { - return new ExistsTest(getEmptyInitializedRepository()).tests(); + return new ExistsTest(this::getEmptyInitializedRepository).tests(); } @TestFactory Stream minus() throws RDF4JException, IOException { - return new MinusTest(getEmptyInitializedRepository()).tests(); + return new MinusTest(this::getEmptyInitializedRepository).tests(); } @BeforeAll @@ -164,13 +166,19 @@ public RepositorySPARQLComplianceTestSuite(RepositoryFactory factory) { this.factory = factory; } - public Repository getEmptyInitializedRepository() throws RDF4JException, IOException { - Repository repository = factory.getRepository(factory.getConfig()); - repository.setDataDir(dataDir); - try (RepositoryConnection con = repository.getConnection()) { - con.clear(); - con.clearNamespaces(); + public Repository getEmptyInitializedRepository() { + try { + Repository repository = factory.getRepository(factory.getConfig()); + repository.setDataDir(dataDir); + try (RepositoryConnection con = repository.getConnection()) { + con.clear(); + con.clearNamespaces(); + } + return repository; + + } catch (RDF4JException e) { + fail(e); + return null; } - return repository; } } diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/AggregateTest.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/AggregateTest.java index 814e6fe91f0..12fcbc1df0a 100644 --- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/AggregateTest.java +++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/AggregateTest.java @@ -21,6 +21,7 @@ import java.time.ZoneId; import java.time.ZonedDateTime; import java.util.List; +import java.util.function.Supplier; import java.util.stream.Stream; import org.eclipse.rdf4j.model.BNode; @@ -36,6 +37,7 @@ import org.eclipse.rdf4j.query.TupleQuery; import org.eclipse.rdf4j.query.TupleQueryResult; import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; import org.eclipse.rdf4j.testsuite.sparql.AbstractComplianceTest; import org.junit.jupiter.api.DynamicTest; @@ -46,13 +48,12 @@ */ public class AggregateTest extends AbstractComplianceTest { - public AggregateTest(Repository repo) { + public AggregateTest(Supplier repo) { super(repo); } public Stream tests() { - return Stream.of( - makeTest("MaxAggregateWithGroupEmptyResult", this::testMaxAggregateWithGroupEmptyResult), + return Stream.of(makeTest("MaxAggregateWithGroupEmptyResult", this::testMaxAggregateWithGroupEmptyResult), makeTest("MaxAggregateWithoutGroupEmptySolution", this::testMaxAggregateWithoutGroupEmptySolution), makeTest("MinAggregateWithGroupEmptyResult", this::testMinAggregateWithGroupEmptyResult), makeTest("MinAggregateWithoutGroupEmptySolution", this::testMinAggregateWithoutGroupEmptySolution), @@ -61,12 +62,9 @@ public Stream tests() { this::testSampleAggregateWithoutGroupEmptySolution), makeTest("SES2361UndefMin", this::testSES2361UndefMin), makeTest("CountOrderBy_ImplicitGroup", this::testCountOrderBy_ImplicitGroup), - makeTest("DistinctMax", this::testDistinctMax), - makeTest("Max", this::testMax), - makeTest("DistinctAvg", this::testDistinctAvg), - makeTest("Avg", this::testAvg), - makeTest("DistinctSum", this::testDistinctSum), - makeTest("Sum", this::testSum), + makeTest("DistinctMax", this::testDistinctMax), makeTest("Max", this::testMax), + makeTest("DistinctAvg", this::testDistinctAvg), makeTest("Avg", this::testAvg), + makeTest("DistinctSum", this::testDistinctSum), makeTest("Sum", this::testSum), makeTest("CountHaving", this::testCountHaving), makeTest("SES1970CountDistinctWildcard", this::testSES1970CountDistinctWildcard), makeTest("GroupConcatNonDistinct", this::testGroupConcatNonDistinct), @@ -75,18 +73,22 @@ public Stream tests() { makeTest("SES2361UndefSum", this::testSES2361UndefSum), makeTest("SES2361UndefCountWildcard", this::testSES2361UndefCountWildcard), makeTest("SES2361UndefCount", this::testSES2361UndefCount), - makeTest("SES2361UndefMax", this::testSES2361UndefMax) - ); + makeTest("SES2361UndefMax", this::testSES2361UndefMax)); } /** * See https://github.com/eclipse/rdf4j/issues/1978 */ private void testMaxAggregateWithGroupEmptyResult() { - String query = "select ?s (max(?o) as ?omax) {\n" + " ?s ?p ?o .\n" + " }\n" + " group by ?s\n"; + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + String query = "select ?s (max(?o) as ?omax) {\n" + " ?s ?p ?o .\n" + " }\n" + " group by ?s\n"; - try (TupleQueryResult result = conn.prepareTupleQuery(query).evaluate()) { - assertThat(result.hasNext()).isFalse(); + try (TupleQueryResult result = conn.prepareTupleQuery(query).evaluate()) { + assertThat(result.hasNext()).isFalse(); + } + } finally { + closeRepository(repo); } } @@ -94,10 +96,15 @@ private void testMaxAggregateWithGroupEmptyResult() { * See https://github.com/eclipse/rdf4j/issues/1978 */ private void testMaxAggregateWithoutGroupEmptySolution() { - String query = "select (max(?o) as ?omax) {\n" + " ?s ?p ?o .\n" + " }\n"; + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + String query = "select (max(?o) as ?omax) {\n" + " ?s ?p ?o .\n" + " }\n"; - try (TupleQueryResult result = conn.prepareTupleQuery(query).evaluate()) { - assertThat(result.next()).isEmpty(); + try (TupleQueryResult result = conn.prepareTupleQuery(query).evaluate()) { + assertThat(result.next()).isEmpty(); + } + } finally { + closeRepository(repo); } } @@ -106,10 +113,15 @@ private void testMaxAggregateWithoutGroupEmptySolution() { */ private void testMinAggregateWithGroupEmptyResult() { - String query = "select ?s (min(?o) as ?omin) {\n" + " ?s ?p ?o .\n" + " }\n" + " group by ?s\n"; + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + String query = "select ?s (min(?o) as ?omin) {\n" + " ?s ?p ?o .\n" + " }\n" + " group by ?s\n"; - try (TupleQueryResult result = conn.prepareTupleQuery(query).evaluate()) { - assertThat(result.hasNext()).isFalse(); + try (TupleQueryResult result = conn.prepareTupleQuery(query).evaluate()) { + assertThat(result.hasNext()).isFalse(); + } + } finally { + closeRepository(repo); } } @@ -117,10 +129,15 @@ private void testMinAggregateWithGroupEmptyResult() { * See https://github.com/eclipse/rdf4j/issues/1978 */ private void testMinAggregateWithoutGroupEmptySolution() { - String query = "select (min(?o) as ?omin) {\n" + " ?s ?p ?o .\n" + " }\n"; + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + String query = "select (min(?o) as ?omin) {\n" + " ?s ?p ?o .\n" + " }\n"; - try (TupleQueryResult result = conn.prepareTupleQuery(query).evaluate()) { - assertThat(result.next()).isEmpty(); + try (TupleQueryResult result = conn.prepareTupleQuery(query).evaluate()) { + assertThat(result.next()).isEmpty(); + } + } finally { + closeRepository(repo); } } @@ -128,10 +145,15 @@ private void testMinAggregateWithoutGroupEmptySolution() { * See https://github.com/eclipse/rdf4j/issues/1978 */ private void testSampleAggregateWithGroupEmptyResult() { - String query = "select ?s (sample(?o) as ?osample) {\n" + " ?s ?p ?o .\n" + " }\n" + " group by ?s\n"; + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + String query = "select ?s (sample(?o) as ?osample) {\n" + " ?s ?p ?o .\n" + " }\n" + " group by ?s\n"; - try (TupleQueryResult result = conn.prepareTupleQuery(query).evaluate()) { - assertThat(result.hasNext()).isFalse(); + try (TupleQueryResult result = conn.prepareTupleQuery(query).evaluate()) { + assertThat(result.hasNext()).isFalse(); + } + } finally { + closeRepository(repo); } } @@ -139,317 +161,405 @@ private void testSampleAggregateWithGroupEmptyResult() { * See https://github.com/eclipse/rdf4j/issues/1978 */ private void testSampleAggregateWithoutGroupEmptySolution() { - String query = "select (sample(?o) as ?osample) {\n" + " ?s ?p ?o .\n" + " }\n"; + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + String query = "select (sample(?o) as ?osample) {\n" + " ?s ?p ?o .\n" + " }\n"; - try (TupleQueryResult result = conn.prepareTupleQuery(query).evaluate()) { - assertThat(result.next()).isEmpty(); + try (TupleQueryResult result = conn.prepareTupleQuery(query).evaluate()) { + assertThat(result.next()).isEmpty(); + } + } finally { + closeRepository(repo); } } private void testSES2361UndefMin() { - String query = "SELECT (MIN(?v) as ?min) WHERE { VALUES ?v { 1 2 undef 3 4 }}"; - try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) { - assertThat((Iterable) result).isNotNull(); - assertThat(result.hasNext()).isTrue(); - assertThat(result.next().getValue("min").stringValue()).isEqualTo("1"); - assertThat(result.hasNext()).isFalse(); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + String query = "SELECT (MIN(?v) as ?min) WHERE { VALUES ?v { 1 2 undef 3 4 }}"; + try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) { + assertThat((Iterable) result).isNotNull(); + assertThat(result.hasNext()).isTrue(); + assertThat(result.next().getValue("min").stringValue()).isEqualTo("1"); + assertThat(result.hasNext()).isFalse(); + } + } finally { + closeRepository(repo); } } private void testSES2361UndefMax() { - String query = "SELECT (MAX(?v) as ?max) WHERE { VALUES ?v { 1 2 7 undef 3 4 }}"; - try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) { - assertThat((Iterable) result).isNotNull(); - assertThat(result.hasNext()).isTrue(); - assertThat(result.next().getValue("max").stringValue()).isEqualTo("7"); - assertThat((Iterable) result).isEmpty(); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + String query = "SELECT (MAX(?v) as ?max) WHERE { VALUES ?v { 1 2 7 undef 3 4 }}"; + try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) { + assertThat((Iterable) result).isNotNull(); + assertThat(result.hasNext()).isTrue(); + assertThat(result.next().getValue("max").stringValue()).isEqualTo("7"); + assertThat((Iterable) result).isEmpty(); + } + } finally { + closeRepository(repo); } } private void testSES2361UndefCount() { String query = "SELECT (COUNT(?v) as ?c) WHERE { VALUES ?v { 1 2 undef 3 4 }}"; - try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) { - assertThat((Iterable) result).isNotNull(); - assertThat(result.hasNext()).isTrue(); - assertThat(result.next().getValue("c").stringValue()).isEqualTo("4"); - assertThat((Iterable) result).isEmpty(); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) { + assertThat((Iterable) result).isNotNull(); + assertThat(result.hasNext()).isTrue(); + assertThat(result.next().getValue("c").stringValue()).isEqualTo("4"); + assertThat((Iterable) result).isEmpty(); + } + } finally { + closeRepository(repo); } } private void testSES2361UndefCountWildcard() { String query = "SELECT (COUNT(*) as ?c) WHERE { VALUES ?v { 1 2 undef 3 4 }}"; - try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) { - assertThat((Iterable) result).isNotNull(); - assertThat(result.hasNext()).isTrue(); - assertThat(result.next().getValue("c").stringValue()).isEqualTo("4"); - assertThat((Iterable) result).isEmpty(); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) { + assertThat((Iterable) result).isNotNull(); + assertThat(result.hasNext()).isTrue(); + assertThat(result.next().getValue("c").stringValue()).isEqualTo("4"); + assertThat((Iterable) result).isEmpty(); + } + } finally { + closeRepository(repo); } } private void testSES2361UndefSum() { - String query = "SELECT (SUM(?v) as ?s) WHERE { VALUES ?v { 1 2 undef 3 4 }}"; - try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) { - assertThat((Iterable) result).isNotNull(); - assertThat(result.hasNext()).isTrue(); - assertThat(result.next().getValue("s").stringValue()).isEqualTo("10"); - assertThat((Iterable) result).isEmpty(); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + String query = "SELECT (SUM(?v) as ?s) WHERE { VALUES ?v { 1 2 undef 3 4 }}"; + try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) { + assertThat((Iterable) result).isNotNull(); + assertThat(result.hasNext()).isTrue(); + assertThat(result.next().getValue("s").stringValue()).isEqualTo("10"); + assertThat((Iterable) result).isEmpty(); + } + } finally { + closeRepository(repo); } } private void testSES1979MinMaxInf() throws Exception { - loadTestData("/testdata-query/dataset-ses1979.trig"); - String query = "prefix : select (min(?o) as ?min) (max(?o) as ?max) where { ?s :float ?o }"; - - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - - try (TupleQueryResult evaluate = tq.evaluate()) { - List result = QueryResults.asList(evaluate); - assertThat((Iterable) result).isNotNull().hasSize(1); - assertThat(result.get(0).getValue("min")).isEqualTo(literal(Float.NEGATIVE_INFINITY)); - assertThat(result.get(0).getValue("max")).isEqualTo(literal(Float.POSITIVE_INFINITY)); - } catch (QueryEvaluationException e) { - e.printStackTrace(); - fail(e.getMessage()); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/dataset-ses1979.trig", conn); + String query = "prefix : select (min(?o) as ?min) (max(?o) as ?max) where { ?s :float ?o }"; + + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + + try (TupleQueryResult evaluate = tq.evaluate()) { + List result = QueryResults.asList(evaluate); + assertThat((Iterable) result).isNotNull().hasSize(1); + assertThat(result.get(0).getValue("min")).isEqualTo(literal(Float.NEGATIVE_INFINITY)); + assertThat(result.get(0).getValue("max")).isEqualTo(literal(Float.POSITIVE_INFINITY)); + } catch (QueryEvaluationException e) { + e.printStackTrace(); + fail(e.getMessage()); + } + } finally { + closeRepository(repo); } } private void testGroupConcatDistinct() throws Exception { - loadTestData("/testdata-query/dataset-query.trig"); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/dataset-query.trig", conn); - String query = getNamespaceDeclarations() + "SELECT (GROUP_CONCAT(DISTINCT ?l) AS ?concat)" - + "WHERE { ex:groupconcat-test ?p ?l . }"; + String query = getNamespaceDeclarations() + "SELECT (GROUP_CONCAT(DISTINCT ?l) AS ?concat)" + + "WHERE { ex:groupconcat-test ?p ?l . }"; - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - try (TupleQueryResult result = tq.evaluate()) { - assertThat((Iterable) result).isNotNull(); + try (TupleQueryResult result = tq.evaluate()) { + assertThat((Iterable) result).isNotNull(); - while (result.hasNext()) { - BindingSet bs = result.next(); - assertThat(bs).isNotNull(); + while (result.hasNext()) { + BindingSet bs = result.next(); + assertThat(bs).isNotNull(); - Value concat = bs.getValue("concat"); + Value concat = bs.getValue("concat"); - assertThat(concat).isInstanceOf(Literal.class); + assertThat(concat).isInstanceOf(Literal.class); - String lexValue = ((Literal) concat).getLabel(); + String lexValue = ((Literal) concat).getLabel(); - int occ = countCharOccurrences(lexValue, 'a'); - assertThat(occ).isEqualTo(1); - occ = countCharOccurrences(lexValue, 'b'); - assertThat(occ).isEqualTo(1); - occ = countCharOccurrences(lexValue, 'c'); - assertThat(occ).isEqualTo(1); - occ = countCharOccurrences(lexValue, 'd'); - assertThat(occ).isEqualTo(1); + int occ = countCharOccurrences(lexValue, 'a'); + assertThat(occ).isEqualTo(1); + occ = countCharOccurrences(lexValue, 'b'); + assertThat(occ).isEqualTo(1); + occ = countCharOccurrences(lexValue, 'c'); + assertThat(occ).isEqualTo(1); + occ = countCharOccurrences(lexValue, 'd'); + assertThat(occ).isEqualTo(1); + } + } catch (QueryEvaluationException e) { + e.printStackTrace(); + fail(e.getMessage()); } - } catch (QueryEvaluationException e) { - e.printStackTrace(); - fail(e.getMessage()); + } finally { + closeRepository(repo); } } private void testGroupConcatNonDistinct() throws Exception { - loadTestData("/testdata-query/dataset-query.trig"); - String query = getNamespaceDeclarations() + "SELECT (GROUP_CONCAT(?l) AS ?concat)" - + "WHERE { ex:groupconcat-test ?p ?l . }"; - - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - - try (TupleQueryResult result = tq.evaluate()) { - assertThat((Iterable) result).isNotNull(); - - while (result.hasNext()) { - BindingSet bs = result.next(); - assertThat(bs).isNotNull(); - - Value concat = bs.getValue("concat"); - - assertThat(concat).isInstanceOf(Literal.class); - - String lexValue = ((Literal) concat).getLabel(); - - int occ = countCharOccurrences(lexValue, 'a'); - assertThat(occ).isEqualTo(1); - occ = countCharOccurrences(lexValue, 'b'); - assertThat(occ).isEqualTo(2); - occ = countCharOccurrences(lexValue, 'c'); - assertThat(occ).isEqualTo(2); - occ = countCharOccurrences(lexValue, 'd'); - assertThat(occ).isEqualTo(1); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/dataset-query.trig", conn); + String query = getNamespaceDeclarations() + "SELECT (GROUP_CONCAT(?l) AS ?concat)" + + "WHERE { ex:groupconcat-test ?p ?l . }"; + + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + + try (TupleQueryResult result = tq.evaluate()) { + assertThat((Iterable) result).isNotNull(); + + while (result.hasNext()) { + BindingSet bs = result.next(); + assertThat(bs).isNotNull(); + + Value concat = bs.getValue("concat"); + + assertThat(concat).isInstanceOf(Literal.class); + + String lexValue = ((Literal) concat).getLabel(); + + int occ = countCharOccurrences(lexValue, 'a'); + assertThat(occ).isEqualTo(1); + occ = countCharOccurrences(lexValue, 'b'); + assertThat(occ).isEqualTo(2); + occ = countCharOccurrences(lexValue, 'c'); + assertThat(occ).isEqualTo(2); + occ = countCharOccurrences(lexValue, 'd'); + assertThat(occ).isEqualTo(1); + } + } catch (QueryEvaluationException e) { + e.printStackTrace(); + fail(e.getMessage()); } - } catch (QueryEvaluationException e) { - e.printStackTrace(); - fail(e.getMessage()); + } finally { + closeRepository(repo); } } private void testSES1970CountDistinctWildcard() throws Exception { - loadTestData("/testdata-query/dataset-ses1970.trig"); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/dataset-ses1970.trig", conn); - String query = "SELECT (COUNT(DISTINCT *) AS ?c) {?s ?p ?o }"; + String query = "SELECT (COUNT(DISTINCT *) AS ?c) {?s ?p ?o }"; - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - try (TupleQueryResult result = tq.evaluate()) { - assertThat((Iterable) result).isNotNull(); + try (TupleQueryResult result = tq.evaluate()) { + assertThat((Iterable) result).isNotNull(); - assertThat(result.hasNext()).isTrue(); - BindingSet s = result.next(); - assertThat(getIntValue(s.getValue("c"), 0)).isEqualTo(3); - } catch (QueryEvaluationException e) { - e.printStackTrace(); - fail(e.getMessage()); + assertThat(result.hasNext()).isTrue(); + BindingSet s = result.next(); + assertThat(getIntValue(s.getValue("c"), 0)).isEqualTo(3); + } catch (QueryEvaluationException e) { + e.printStackTrace(); + fail(e.getMessage()); + } + } finally { + closeRepository(repo); } } private void testCountHaving() { - BNode bnode1 = bnode(); - BNode bnode2 = bnode(); - BNode bnode3 = bnode(); - - conn.add(bnode3, FOAF.KNOWS, bnode()); - conn.add(bnode1, FOAF.KNOWS, bnode()); - conn.add(bnode1, FOAF.KNOWS, bnode()); - conn.add(bnode2, FOAF.KNOWS, bnode()); - conn.add(bnode3, FOAF.KNOWS, bnode()); - conn.add(bnode3, FOAF.KNOWS, bnode()); - conn.add(bnode1, FOAF.KNOWS, bnode()); - - String query = "SELECT ?a WHERE { ?a ?b ?c } GROUP BY ?a HAVING( (COUNT(?c) > 1 ) && ( COUNT(?c) != 0 ) ) "; - try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) { - List collect = QueryResults.asList(result); - assertThat(collect).hasSize(2); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + BNode bnode1 = bnode(); + BNode bnode2 = bnode(); + BNode bnode3 = bnode(); + + conn.add(bnode3, FOAF.KNOWS, bnode()); + conn.add(bnode1, FOAF.KNOWS, bnode()); + conn.add(bnode1, FOAF.KNOWS, bnode()); + conn.add(bnode2, FOAF.KNOWS, bnode()); + conn.add(bnode3, FOAF.KNOWS, bnode()); + conn.add(bnode3, FOAF.KNOWS, bnode()); + conn.add(bnode1, FOAF.KNOWS, bnode()); + + String query = "SELECT ?a WHERE { ?a ?b ?c } GROUP BY ?a HAVING( (COUNT(?c) > 1 ) && ( COUNT(?c) != 0 ) ) "; + try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) { + List collect = QueryResults.asList(result); + assertThat(collect).hasSize(2); + } + } finally { + closeRepository(repo); } } private void testSum() { - mixedDataForNumericAggregates(); - - String query = "SELECT ?a (SUM(?c) as ?aggregate) WHERE { ?a ?b ?c } GROUP BY ?a ORDER BY ?aggregate "; - try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) { - List collect = QueryResults.asList(result); - int i = 0; - assertThat(collect.get(i++).getValue("aggregate")).isNull(); - assertThat(collect.get(i++).getValue("aggregate")).isNull(); - assertThat(collect.get(i++).getValue("aggregate")).isNull(); - assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal(30.11)); - assertThat(collect.get(i++).getValue("aggregate")) - .isEqualTo(literal(new BigDecimal("89.4786576482391284723864721567342354783275234"))); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + mixedDataForNumericAggregates(conn); + String query = "SELECT ?a (SUM(?c) as ?aggregate) WHERE { ?a ?b ?c } GROUP BY ?a ORDER BY ?aggregate "; + try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) { + List collect = QueryResults.asList(result); + int i = 0; + assertThat(collect.get(i++).getValue("aggregate")).isNull(); + assertThat(collect.get(i++).getValue("aggregate")).isNull(); + assertThat(collect.get(i++).getValue("aggregate")).isNull(); + assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal(30.11)); + assertThat(collect.get(i++).getValue("aggregate")) + .isEqualTo(literal(new BigDecimal("89.4786576482391284723864721567342354783275234"))); - } + } + } finally { + closeRepository(repo); + } } private void testDistinctSum() { - mixedDataForNumericAggregates(); - - String query = "SELECT ?a (SUM(DISTINCT ?c) as ?aggregate) WHERE { ?a ?b ?c } GROUP BY ?a ORDER BY ?aggregate "; - try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) { - List collect = QueryResults.asList(result); - int i = 0; - assertThat(collect.get(i++).getValue("aggregate")).isNull(); - assertThat(collect.get(i++).getValue("aggregate")).isNull(); - assertThat(collect.get(i++).getValue("aggregate")).isNull(); - assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal(30.11)); - assertThat(collect.get(i++).getValue("aggregate")) - .isEqualTo(literal(new BigDecimal("55.4786576482391284723864721567342354783275234"))); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + mixedDataForNumericAggregates(conn); + + String query = "SELECT ?a (SUM(DISTINCT ?c) as ?aggregate) WHERE { ?a ?b ?c } GROUP BY ?a ORDER BY ?aggregate "; + try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) { + List collect = QueryResults.asList(result); + int i = 0; + assertThat(collect.get(i++).getValue("aggregate")).isNull(); + assertThat(collect.get(i++).getValue("aggregate")).isNull(); + assertThat(collect.get(i++).getValue("aggregate")).isNull(); + assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal(30.11)); + assertThat(collect.get(i++).getValue("aggregate")) + .isEqualTo(literal(new BigDecimal("55.4786576482391284723864721567342354783275234"))); + } + } finally { + closeRepository(repo); } } private void testAvg() { - mixedDataForNumericAggregates(); - - String query = "SELECT ?a (AVG(?c) as ?aggregate) WHERE { ?a ?b ?c } GROUP BY ?a ORDER BY ?aggregate "; - try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) { - List collect = QueryResults.asList(result); - int i = 0; - assertThat(collect.get(i++).getValue("aggregate")).isNull(); - assertThat(collect.get(i++).getValue("aggregate")).isNull(); - assertThat(collect.get(i++).getValue("aggregate")).isNull(); - assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal(15.055)); - assertThat(collect.get(i++).getValue("aggregate")) - .isEqualTo(literal(new BigDecimal("17.89573152964782569447729443134684709566550468"))); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + mixedDataForNumericAggregates(conn); + + String query = "SELECT ?a (AVG(?c) as ?aggregate) WHERE { ?a ?b ?c } GROUP BY ?a ORDER BY ?aggregate "; + try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) { + List collect = QueryResults.asList(result); + int i = 0; + assertThat(collect.get(i++).getValue("aggregate")).isNull(); + assertThat(collect.get(i++).getValue("aggregate")).isNull(); + assertThat(collect.get(i++).getValue("aggregate")).isNull(); + assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal(15.055)); + assertThat(collect.get(i++).getValue("aggregate")) + .isEqualTo(literal(new BigDecimal("17.89573152964782569447729443134684709566550468"))); + } + } finally { + closeRepository(repo); } } private void testDistinctAvg() { - mixedDataForNumericAggregates(); - - String query = "SELECT ?a (AVG(DISTINCT ?c) as ?aggregate) WHERE { ?a ?b ?c } GROUP BY ?a ORDER BY ?aggregate "; - try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) { - List collect = QueryResults.asList(result); - int i = 0; - assertThat(collect.get(i++).getValue("aggregate")).isNull(); - assertThat(collect.get(i++).getValue("aggregate")).isNull(); - assertThat(collect.get(i++).getValue("aggregate")).isNull(); - - assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal(15.055)); - assertThat(collect.get(i++).getValue("aggregate")) - .isEqualTo(literal(new BigDecimal("18.492885882746376157462157"))); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + mixedDataForNumericAggregates(conn); + + String query = "SELECT ?a (AVG(DISTINCT ?c) as ?aggregate) WHERE { ?a ?b ?c } GROUP BY ?a ORDER BY ?aggregate "; + try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) { + List collect = QueryResults.asList(result); + int i = 0; + assertThat(collect.get(i++).getValue("aggregate")).isNull(); + assertThat(collect.get(i++).getValue("aggregate")).isNull(); + assertThat(collect.get(i++).getValue("aggregate")).isNull(); + + assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal(15.055)); + assertThat(collect.get(i++).getValue("aggregate")) + .isEqualTo(literal(new BigDecimal("18.492885882746376157462157"))); + } + } finally { + closeRepository(repo); } } private void testMax() { - mixedDataForNumericAggregates(); - - String query = "SELECT ?a (MAX(?c) as ?aggregate) WHERE { ?a ?b ?c } GROUP BY ?a ORDER BY ?aggregate "; - try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) { - List collect = QueryResults.asList(result); - int i = 0; - assertThat(collect.get(i++).getValue("aggregate")) - .isEqualTo(literal(new BigDecimal("19.4786576482391284723864721567342354783275234"))); - assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal(23)); - assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal(23)); - assertThat(collect.get(i++).getValue("aggregate")) - .isEqualTo(literal("2022-01-01T01:01:01.000000001Z", CoreDatatype.XSD.DATETIME)); - assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal("3")); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + mixedDataForNumericAggregates(conn); + + String query = "SELECT ?a (MAX(?c) as ?aggregate) WHERE { ?a ?b ?c } GROUP BY ?a ORDER BY ?aggregate "; + try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) { + List collect = QueryResults.asList(result); + int i = 0; + assertThat(collect.get(i++).getValue("aggregate")) + .isEqualTo(literal(new BigDecimal("19.4786576482391284723864721567342354783275234"))); + assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal(23)); + assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal(23)); + assertThat(collect.get(i++).getValue("aggregate")) + .isEqualTo(literal("2022-01-01T01:01:01.000000001Z", CoreDatatype.XSD.DATETIME)); + assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal("3")); + } + } finally { + closeRepository(repo); } - } private void testDistinctMax() { - mixedDataForNumericAggregates(); - - String query = "SELECT ?a (MAX(DISTINCT ?c) as ?aggregate) WHERE { ?a ?b ?c } GROUP BY ?a ORDER BY ?aggregate "; - try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) { - List collect = QueryResults.asList(result); - int i = 0; - assertThat(collect.get(i++).getValue("aggregate")) - .isEqualTo(literal(new BigDecimal("19.4786576482391284723864721567342354783275234"))); - assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal(23)); - assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal(23)); - assertThat(collect.get(i++).getValue("aggregate")) - .isEqualTo(literal("2022-01-01T01:01:01.000000001Z", CoreDatatype.XSD.DATETIME)); - assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal("3")); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + mixedDataForNumericAggregates(conn); + + String query = "SELECT ?a (MAX(DISTINCT ?c) as ?aggregate) WHERE { ?a ?b ?c } GROUP BY ?a ORDER BY ?aggregate "; + try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) { + List collect = QueryResults.asList(result); + int i = 0; + assertThat(collect.get(i++).getValue("aggregate")) + .isEqualTo(literal(new BigDecimal("19.4786576482391284723864721567342354783275234"))); + assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal(23)); + assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal(23)); + assertThat(collect.get(i++).getValue("aggregate")) + .isEqualTo(literal("2022-01-01T01:01:01.000000001Z", CoreDatatype.XSD.DATETIME)); + assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal("3")); + } + } finally { + closeRepository(repo); } } /** - * @see https://github.com/eclipse/rdf4j/issues/4290 + * @see https://github.com/eclipse/rdf4j/issues/4290 */ private void testCountOrderBy_ImplicitGroup() { - mixedDataForNumericAggregates(); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + mixedDataForNumericAggregates(conn); - String query = "select (count(*) as ?c) where { \n" + " ?s ?p ?o .\n" + "} \n" + "order by (?s)"; + String query = "select (count(*) as ?c) where { \n" + " ?s ?p ?o .\n" + "} \n" + "order by (?s)"; - TupleQuery preparedQuery = conn.prepareTupleQuery(query); + TupleQuery preparedQuery = conn.prepareTupleQuery(query); - List result = QueryResults.asList(preparedQuery.evaluate()); - assertThat(result).hasSize(1); + List result = QueryResults.asList(preparedQuery.evaluate()); + assertThat(result).hasSize(1); - BindingSet bs = result.get(0); - assertThat(bs.size()).isEqualTo(1); - assertThat(getIntValue(bs.getValue("c"), 0)).isEqualTo(19); + BindingSet bs = result.get(0); + assertThat(bs.size()).isEqualTo(1); + assertThat(getIntValue(bs.getValue("c"), 0)).isEqualTo(19); + } finally { + closeRepository(repo); + } } // private methods - private void mixedDataForNumericAggregates() { + private void mixedDataForNumericAggregates(RepositoryConnection conn) { IRI node1 = iri("http://example.com/1"); IRI node2 = iri("http://example.com/2"); IRI node3 = iri("http://example.com/3"); diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/ArbitraryLengthPathTest.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/ArbitraryLengthPathTest.java index 66fbe513183..b8da5ce7734 100644 --- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/ArbitraryLengthPathTest.java +++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/ArbitraryLengthPathTest.java @@ -15,6 +15,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; +import java.util.function.Supplier; import java.util.stream.Stream; import org.eclipse.rdf4j.model.vocabulary.OWL; @@ -25,10 +26,10 @@ import org.eclipse.rdf4j.query.TupleQueryResult; import org.eclipse.rdf4j.query.impl.SimpleDataset; import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; import org.eclipse.rdf4j.testsuite.sparql.AbstractComplianceTest; import org.eclipse.rdf4j.testsuite.sparql.vocabulary.EX; import org.junit.jupiter.api.DynamicTest; -import org.junit.jupiter.api.Test; /** * Tests on SPARQL property paths involving * or + operators (arbitrary length paths). @@ -39,7 +40,7 @@ */ public class ArbitraryLengthPathTest extends AbstractComplianceTest { - public ArbitraryLengthPathTest(Repository repo) { + public ArbitraryLengthPathTest(Supplier repo) { super(repo); } @@ -64,45 +65,49 @@ public Stream tests() { */ private void testArbitraryLengthPathWithBinding1() throws Exception { - loadTestData("/testdata-query/alp-testdata.ttl"); - String query = getNamespaceDeclarations() + "SELECT ?parent ?child " - + "WHERE { ?child a owl:Class . ?child rdfs:subClassOf+ ?parent . }"; - - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - - try (TupleQueryResult result = tq.evaluate()) { - // first execute without binding - assertNotNull(result); - - int count = 0; - while (result.hasNext()) { - count++; - BindingSet bs = result.next(); - assertTrue(bs.hasBinding("child")); - assertTrue(bs.hasBinding("parent")); - } - assertEquals(7, count); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/alp-testdata.ttl", conn); + String query = getNamespaceDeclarations() + "SELECT ?parent ?child " + + "WHERE { ?child a owl:Class . ?child rdfs:subClassOf+ ?parent . }"; - // execute again, but this time setting a binding - tq.setBinding("parent", OWL.THING); + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - try (TupleQueryResult result2 = tq.evaluate()) { - assertNotNull(result2); + try (TupleQueryResult result = tq.evaluate()) { + // first execute without binding + assertNotNull(result); - count = 0; - while (result2.hasNext()) { + int count = 0; + while (result.hasNext()) { count++; - BindingSet bs = result2.next(); + BindingSet bs = result.next(); assertTrue(bs.hasBinding("child")); assertTrue(bs.hasBinding("parent")); } - assertEquals(4, count); + assertEquals(7, count); + + // execute again, but this time setting a binding + tq.setBinding("parent", OWL.THING); + + try (TupleQueryResult result2 = tq.evaluate()) { + assertNotNull(result2); + + count = 0; + while (result2.hasNext()) { + count++; + BindingSet bs = result2.next(); + assertTrue(bs.hasBinding("child")); + assertTrue(bs.hasBinding("parent")); + } + assertEquals(4, count); + } + } catch (QueryEvaluationException e) { + e.printStackTrace(); + fail(e.getMessage()); } - } catch (QueryEvaluationException e) { - e.printStackTrace(); - fail(e.getMessage()); + } finally { + closeRepository(repo); } - } /** @@ -111,47 +116,51 @@ private void testArbitraryLengthPathWithBinding1() throws Exception { */ private void testArbitraryLengthPathWithBinding2() throws Exception { - loadTestData("/testdata-query/alp-testdata.ttl"); - - // query without initializing ?child first. - String query = getNamespaceDeclarations() + "SELECT ?parent ?child " - + "WHERE { ?child rdfs:subClassOf+ ?parent . }"; + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/alp-testdata.ttl", conn); - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + // query without initializing ?child first. + String query = getNamespaceDeclarations() + "SELECT ?parent ?child " + + "WHERE { ?child rdfs:subClassOf+ ?parent . }"; - try (TupleQueryResult result = tq.evaluate()) { - // first execute without binding - assertNotNull(result); + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - int count = 0; - while (result.hasNext()) { - count++; - BindingSet bs = result.next(); - assertTrue(bs.hasBinding("child")); - assertTrue(bs.hasBinding("parent")); - } - assertEquals(7, count); - - // execute again, but this time setting a binding - tq.setBinding("parent", OWL.THING); - - try (TupleQueryResult result2 = tq.evaluate()) { - assertNotNull(result2); + try (TupleQueryResult result = tq.evaluate()) { + // first execute without binding + assertNotNull(result); - count = 0; - while (result2.hasNext()) { + int count = 0; + while (result.hasNext()) { count++; - BindingSet bs = result2.next(); + BindingSet bs = result.next(); assertTrue(bs.hasBinding("child")); assertTrue(bs.hasBinding("parent")); } - assertEquals(4, count); + assertEquals(7, count); + + // execute again, but this time setting a binding + tq.setBinding("parent", OWL.THING); + + try (TupleQueryResult result2 = tq.evaluate()) { + assertNotNull(result2); + + count = 0; + while (result2.hasNext()) { + count++; + BindingSet bs = result2.next(); + assertTrue(bs.hasBinding("child")); + assertTrue(bs.hasBinding("parent")); + } + assertEquals(4, count); + } + } catch (QueryEvaluationException e) { + e.printStackTrace(); + fail(e.getMessage()); } - } catch (QueryEvaluationException e) { - e.printStackTrace(); - fail(e.getMessage()); + } finally { + closeRepository(repo); } - } /** @@ -160,47 +169,51 @@ private void testArbitraryLengthPathWithBinding2() throws Exception { */ private void testArbitraryLengthPathWithBinding3() throws Exception { - loadTestData("/testdata-query/alp-testdata.ttl"); - - // binding on child instead of parent. - String query = getNamespaceDeclarations() + "SELECT ?parent ?child " - + "WHERE { ?child rdfs:subClassOf+ ?parent . }"; - - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/alp-testdata.ttl", conn); - try (TupleQueryResult result = tq.evaluate()) { - // first execute without binding - assertNotNull(result); - - int count = 0; - while (result.hasNext()) { - count++; - BindingSet bs = result.next(); - assertTrue(bs.hasBinding("child")); - assertTrue(bs.hasBinding("parent")); - } - assertEquals(7, count); + // binding on child instead of parent. + String query = getNamespaceDeclarations() + "SELECT ?parent ?child " + + "WHERE { ?child rdfs:subClassOf+ ?parent . }"; - // execute again, but this time setting a binding - tq.setBinding("child", EX.C); + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - try (TupleQueryResult result2 = tq.evaluate()) { - assertNotNull(result2); + try (TupleQueryResult result = tq.evaluate()) { + // first execute without binding + assertNotNull(result); - count = 0; - while (result2.hasNext()) { + int count = 0; + while (result.hasNext()) { count++; - BindingSet bs = result2.next(); + BindingSet bs = result.next(); assertTrue(bs.hasBinding("child")); assertTrue(bs.hasBinding("parent")); } - assertEquals(2, count); + assertEquals(7, count); + + // execute again, but this time setting a binding + tq.setBinding("child", EX.C); + + try (TupleQueryResult result2 = tq.evaluate()) { + assertNotNull(result2); + + count = 0; + while (result2.hasNext()) { + count++; + BindingSet bs = result2.next(); + assertTrue(bs.hasBinding("child")); + assertTrue(bs.hasBinding("parent")); + } + assertEquals(2, count); + } + } catch (QueryEvaluationException e) { + e.printStackTrace(); + fail(e.getMessage()); } - } catch (QueryEvaluationException e) { - e.printStackTrace(); - fail(e.getMessage()); + } finally { + closeRepository(repo); } - } /** @@ -209,47 +222,51 @@ private void testArbitraryLengthPathWithBinding3() throws Exception { */ private void testArbitraryLengthPathWithBinding4() throws Exception { - loadTestData("/testdata-query/alp-testdata.ttl", EX.ALICE); - - // binding on child instead of parent. - String query = getNamespaceDeclarations() + "SELECT ?parent ?child " - + "WHERE { ?child rdfs:subClassOf+ ?parent . }"; - - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/alp-testdata.ttl", conn, EX.ALICE); - try (TupleQueryResult result = tq.evaluate()) { - // first execute without binding - assertNotNull(result); + // binding on child instead of parent. + String query = getNamespaceDeclarations() + "SELECT ?parent ?child " + + "WHERE { ?child rdfs:subClassOf+ ?parent . }"; - int count = 0; - while (result.hasNext()) { - count++; - BindingSet bs = result.next(); - assertTrue(bs.hasBinding("child")); - assertTrue(bs.hasBinding("parent")); - } - assertEquals(7, count); - - // execute again, but this time setting a binding - tq.setBinding("child", EX.C); + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - try (TupleQueryResult result2 = tq.evaluate()) { - assertNotNull(result2); + try (TupleQueryResult result = tq.evaluate()) { + // first execute without binding + assertNotNull(result); - count = 0; - while (result2.hasNext()) { + int count = 0; + while (result.hasNext()) { count++; - BindingSet bs = result2.next(); + BindingSet bs = result.next(); assertTrue(bs.hasBinding("child")); assertTrue(bs.hasBinding("parent")); } - assertEquals(2, count); + assertEquals(7, count); + + // execute again, but this time setting a binding + tq.setBinding("child", EX.C); + + try (TupleQueryResult result2 = tq.evaluate()) { + assertNotNull(result2); + + count = 0; + while (result2.hasNext()) { + count++; + BindingSet bs = result2.next(); + assertTrue(bs.hasBinding("child")); + assertTrue(bs.hasBinding("parent")); + } + assertEquals(2, count); + } + } catch (QueryEvaluationException e) { + e.printStackTrace(); + fail(e.getMessage()); } - } catch (QueryEvaluationException e) { - e.printStackTrace(); - fail(e.getMessage()); + } finally { + closeRepository(repo); } - } /** @@ -258,53 +275,57 @@ private void testArbitraryLengthPathWithBinding4() throws Exception { */ private void testArbitraryLengthPathWithBinding5() throws Exception { - loadTestData("/testdata-query/alp-testdata.ttl", EX.ALICE, EX.BOB); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/alp-testdata.ttl", conn, EX.ALICE, EX.BOB); - // binding on child instead of parent. - String query = getNamespaceDeclarations() + "SELECT ?parent ?child " - + "WHERE { ?child rdfs:subClassOf+ ?parent . }"; + // binding on child instead of parent. + String query = getNamespaceDeclarations() + "SELECT ?parent ?child " + + "WHERE { ?child rdfs:subClassOf+ ?parent . }"; - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - try (TupleQueryResult result = tq.evaluate()) { - // first execute without binding - assertNotNull(result); + try (TupleQueryResult result = tq.evaluate()) { + // first execute without binding + assertNotNull(result); - // System.out.println("--- testArbitraryLengthPathWithBinding5 - // ---"); + // System.out.println("--- testArbitraryLengthPathWithBinding5 + // ---"); - int count = 0; - while (result.hasNext()) { - count++; - BindingSet bs = result.next(); - - // System.out.println(bs); - - assertTrue(bs.hasBinding("child")); - assertTrue(bs.hasBinding("parent")); - } - assertEquals(7, count); - - // execute again, but this time setting a binding - tq.setBinding("child", EX.C); + int count = 0; + while (result.hasNext()) { + count++; + BindingSet bs = result.next(); - try (TupleQueryResult result2 = tq.evaluate()) { - assertNotNull(result2); + // System.out.println(bs); - count = 0; - while (result2.hasNext()) { - count++; - BindingSet bs = result2.next(); assertTrue(bs.hasBinding("child")); assertTrue(bs.hasBinding("parent")); } - assertEquals(2, count); + assertEquals(7, count); + + // execute again, but this time setting a binding + tq.setBinding("child", EX.C); + + try (TupleQueryResult result2 = tq.evaluate()) { + assertNotNull(result2); + + count = 0; + while (result2.hasNext()) { + count++; + BindingSet bs = result2.next(); + assertTrue(bs.hasBinding("child")); + assertTrue(bs.hasBinding("parent")); + } + assertEquals(2, count); + } + } catch (QueryEvaluationException e) { + e.printStackTrace(); + fail(e.getMessage()); } - } catch (QueryEvaluationException e) { - e.printStackTrace(); - fail(e.getMessage()); + } finally { + closeRepository(repo); } - } /** @@ -313,53 +334,57 @@ private void testArbitraryLengthPathWithBinding5() throws Exception { */ private void testArbitraryLengthPathWithBinding6() throws Exception { - loadTestData("/testdata-query/alp-testdata.ttl", EX.ALICE, EX.BOB, EX.MARY); - - // binding on child instead of parent. - String query = getNamespaceDeclarations() + "SELECT ?parent ?child " - + "WHERE { ?child rdfs:subClassOf+ ?parent . }"; + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/alp-testdata.ttl", conn, EX.ALICE, EX.BOB, EX.MARY); - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + // binding on child instead of parent. + String query = getNamespaceDeclarations() + "SELECT ?parent ?child " + + "WHERE { ?child rdfs:subClassOf+ ?parent . }"; - try (TupleQueryResult result = tq.evaluate()) { - // first execute without binding - assertNotNull(result); + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - // System.out.println("--- testArbitraryLengthPathWithBinding6 - // ---"); + try (TupleQueryResult result = tq.evaluate()) { + // first execute without binding + assertNotNull(result); - int count = 0; - while (result.hasNext()) { - count++; - BindingSet bs = result.next(); + // System.out.println("--- testArbitraryLengthPathWithBinding6 + // ---"); - // System.out.println(bs); - - assertTrue(bs.hasBinding("child")); - assertTrue(bs.hasBinding("parent")); - } - assertEquals(7, count); - - // execute again, but this time setting a binding - tq.setBinding("child", EX.C); + int count = 0; + while (result.hasNext()) { + count++; + BindingSet bs = result.next(); - try (TupleQueryResult result2 = tq.evaluate()) { - assertNotNull(result2); + // System.out.println(bs); - count = 0; - while (result2.hasNext()) { - count++; - BindingSet bs = result2.next(); assertTrue(bs.hasBinding("child")); assertTrue(bs.hasBinding("parent")); } - assertEquals(2, count); + assertEquals(7, count); + + // execute again, but this time setting a binding + tq.setBinding("child", EX.C); + + try (TupleQueryResult result2 = tq.evaluate()) { + assertNotNull(result2); + + count = 0; + while (result2.hasNext()) { + count++; + BindingSet bs = result2.next(); + assertTrue(bs.hasBinding("child")); + assertTrue(bs.hasBinding("parent")); + } + assertEquals(2, count); + } + } catch (QueryEvaluationException e) { + e.printStackTrace(); + fail(e.getMessage()); } - } catch (QueryEvaluationException e) { - e.printStackTrace(); - fail(e.getMessage()); + } finally { + closeRepository(repo); } - } /** @@ -368,56 +393,60 @@ private void testArbitraryLengthPathWithBinding6() throws Exception { */ private void testArbitraryLengthPathWithBinding7() throws Exception { - loadTestData("/testdata-query/alp-testdata.ttl", EX.ALICE, EX.BOB, EX.MARY); - - // binding on child instead of parent. - String query = getNamespaceDeclarations() + "SELECT ?parent ?child " - + "WHERE { ?child rdfs:subClassOf+ ?parent . }"; - - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - SimpleDataset dt = new SimpleDataset(); - dt.addDefaultGraph(EX.ALICE); - tq.setDataset(dt); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/alp-testdata.ttl", conn, EX.ALICE, EX.BOB, EX.MARY); - try (TupleQueryResult result = tq.evaluate()) { - // first execute without binding - assertNotNull(result); + // binding on child instead of parent. + String query = getNamespaceDeclarations() + "SELECT ?parent ?child " + + "WHERE { ?child rdfs:subClassOf+ ?parent . }"; - // System.out.println("--- testArbitraryLengthPathWithBinding7 - // ---"); + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + SimpleDataset dt = new SimpleDataset(); + dt.addDefaultGraph(EX.ALICE); + tq.setDataset(dt); - int count = 0; - while (result.hasNext()) { - count++; - BindingSet bs = result.next(); + try (TupleQueryResult result = tq.evaluate()) { + // first execute without binding + assertNotNull(result); - // System.out.println(bs); + // System.out.println("--- testArbitraryLengthPathWithBinding7 + // ---"); - assertTrue(bs.hasBinding("child")); - assertTrue(bs.hasBinding("parent")); - } - assertEquals(7, count); - - // execute again, but this time setting a binding - tq.setBinding("child", EX.C); + int count = 0; + while (result.hasNext()) { + count++; + BindingSet bs = result.next(); - try (TupleQueryResult result2 = tq.evaluate()) { - assertNotNull(result2); + // System.out.println(bs); - count = 0; - while (result2.hasNext()) { - count++; - BindingSet bs = result2.next(); assertTrue(bs.hasBinding("child")); assertTrue(bs.hasBinding("parent")); } - assertEquals(2, count); + assertEquals(7, count); + + // execute again, but this time setting a binding + tq.setBinding("child", EX.C); + + try (TupleQueryResult result2 = tq.evaluate()) { + assertNotNull(result2); + + count = 0; + while (result2.hasNext()) { + count++; + BindingSet bs = result2.next(); + assertTrue(bs.hasBinding("child")); + assertTrue(bs.hasBinding("parent")); + } + assertEquals(2, count); + } + } catch (QueryEvaluationException e) { + e.printStackTrace(); + fail(e.getMessage()); } - } catch (QueryEvaluationException e) { - e.printStackTrace(); - fail(e.getMessage()); + } finally { + closeRepository(repo); } - } /** @@ -426,55 +455,59 @@ private void testArbitraryLengthPathWithBinding7() throws Exception { */ private void testArbitraryLengthPathWithBinding8() throws Exception { - loadTestData("/testdata-query/alp-testdata.ttl", EX.ALICE, EX.BOB, EX.MARY); - - // binding on child instead of parent. - String query = getNamespaceDeclarations() + "SELECT ?parent ?child " - + "WHERE { ?child rdfs:subClassOf+ ?parent . }"; - - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - SimpleDataset dt = new SimpleDataset(); - dt.addDefaultGraph(EX.ALICE); - dt.addDefaultGraph(EX.BOB); - tq.setDataset(dt); - - try (TupleQueryResult result = tq.evaluate()) { - // first execute without binding - assertNotNull(result); - // System.out.println("--- testArbitraryLengthPathWithBinding8 - // ---"); - int count = 0; - while (result.hasNext()) { - count++; - BindingSet bs = result.next(); - - // System.out.println(bs); - - assertTrue(bs.hasBinding("child")); - assertTrue(bs.hasBinding("parent")); - } - assertEquals(7, count); - - // execute again, but this time setting a binding - tq.setBinding("child", EX.C); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/alp-testdata.ttl", conn, EX.ALICE, EX.BOB, EX.MARY); + + // binding on child instead of parent. + String query = getNamespaceDeclarations() + "SELECT ?parent ?child " + + "WHERE { ?child rdfs:subClassOf+ ?parent . }"; + + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + SimpleDataset dt = new SimpleDataset(); + dt.addDefaultGraph(EX.ALICE); + dt.addDefaultGraph(EX.BOB); + tq.setDataset(dt); + + try (TupleQueryResult result = tq.evaluate()) { + // first execute without binding + assertNotNull(result); + // System.out.println("--- testArbitraryLengthPathWithBinding8 + // ---"); + int count = 0; + while (result.hasNext()) { + count++; + BindingSet bs = result.next(); - try (TupleQueryResult result2 = tq.evaluate()) { - assertNotNull(result2); + // System.out.println(bs); - count = 0; - while (result2.hasNext()) { - count++; - BindingSet bs = result2.next(); assertTrue(bs.hasBinding("child")); assertTrue(bs.hasBinding("parent")); } - assertEquals(2, count); + assertEquals(7, count); + + // execute again, but this time setting a binding + tq.setBinding("child", EX.C); + + try (TupleQueryResult result2 = tq.evaluate()) { + assertNotNull(result2); + + count = 0; + while (result2.hasNext()) { + count++; + BindingSet bs = result2.next(); + assertTrue(bs.hasBinding("child")); + assertTrue(bs.hasBinding("parent")); + } + assertEquals(2, count); + } + } catch (QueryEvaluationException e) { + e.printStackTrace(); + fail(e.getMessage()); } - } catch (QueryEvaluationException e) { - e.printStackTrace(); - fail(e.getMessage()); + } finally { + closeRepository(repo); } - } /** @@ -483,28 +516,32 @@ private void testArbitraryLengthPathWithBinding8() throws Exception { */ private void testArbitraryLengthPathWithFilter1() throws Exception { - loadTestData("/testdata-query/alp-testdata.ttl"); - String query = getNamespaceDeclarations() + "SELECT ?parent ?child " - + "WHERE { ?child a owl:Class . ?child rdfs:subClassOf+ ?parent . FILTER (?parent = owl:Thing) }"; + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/alp-testdata.ttl", conn); + String query = getNamespaceDeclarations() + "SELECT ?parent ?child " + + "WHERE { ?child a owl:Class . ?child rdfs:subClassOf+ ?parent . FILTER (?parent = owl:Thing) }"; - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - try (TupleQueryResult result = tq.evaluate()) { - assertNotNull(result); + try (TupleQueryResult result = tq.evaluate()) { + assertNotNull(result); - int count = 0; - while (result.hasNext()) { - count++; - BindingSet bs = result.next(); - assertTrue(bs.hasBinding("child")); - assertTrue(bs.hasBinding("parent")); + int count = 0; + while (result.hasNext()) { + count++; + BindingSet bs = result.next(); + assertTrue(bs.hasBinding("child")); + assertTrue(bs.hasBinding("parent")); + } + assertEquals(4, count); + } catch (QueryEvaluationException e) { + e.printStackTrace(); + fail(e.getMessage()); } - assertEquals(4, count); - } catch (QueryEvaluationException e) { - e.printStackTrace(); - fail(e.getMessage()); + } finally { + closeRepository(repo); } - } /** @@ -513,28 +550,32 @@ private void testArbitraryLengthPathWithFilter1() throws Exception { */ private void testArbitraryLengthPathWithFilter2() throws Exception { - loadTestData("/testdata-query/alp-testdata.ttl"); - String query = getNamespaceDeclarations() + "SELECT ?parent ?child " - + "WHERE { ?child rdfs:subClassOf+ ?parent . FILTER (?parent = owl:Thing) }"; + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/alp-testdata.ttl", conn); + String query = getNamespaceDeclarations() + "SELECT ?parent ?child " + + "WHERE { ?child rdfs:subClassOf+ ?parent . FILTER (?parent = owl:Thing) }"; - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - try (TupleQueryResult result = tq.evaluate()) { - assertNotNull(result); + try (TupleQueryResult result = tq.evaluate()) { + assertNotNull(result); - int count = 0; - while (result.hasNext()) { - count++; - BindingSet bs = result.next(); - assertTrue(bs.hasBinding("child")); - assertTrue(bs.hasBinding("parent")); + int count = 0; + while (result.hasNext()) { + count++; + BindingSet bs = result.next(); + assertTrue(bs.hasBinding("child")); + assertTrue(bs.hasBinding("parent")); + } + assertEquals(4, count); + } catch (QueryEvaluationException e) { + e.printStackTrace(); + fail(e.getMessage()); } - assertEquals(4, count); - } catch (QueryEvaluationException e) { - e.printStackTrace(); - fail(e.getMessage()); + } finally { + closeRepository(repo); } - } /** @@ -543,52 +584,60 @@ private void testArbitraryLengthPathWithFilter2() throws Exception { */ private void testArbitraryLengthPathWithFilter3() throws Exception { - loadTestData("/testdata-query/alp-testdata.ttl"); - String query = getNamespaceDeclarations() + "SELECT ?parent ?child " - + "WHERE { ?child rdfs:subClassOf+ ?parent . FILTER (?child = ) }"; + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/alp-testdata.ttl", conn); + String query = getNamespaceDeclarations() + "SELECT ?parent ?child " + + "WHERE { ?child rdfs:subClassOf+ ?parent . FILTER (?child = ) }"; - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - try (TupleQueryResult result = tq.evaluate()) { - assertNotNull(result); + try (TupleQueryResult result = tq.evaluate()) { + assertNotNull(result); - int count = 0; - while (result.hasNext()) { - count++; - BindingSet bs = result.next(); - assertTrue(bs.hasBinding("child")); - assertTrue(bs.hasBinding("parent")); + int count = 0; + while (result.hasNext()) { + count++; + BindingSet bs = result.next(); + assertTrue(bs.hasBinding("child")); + assertTrue(bs.hasBinding("parent")); + } + assertEquals(2, count); + } catch (QueryEvaluationException e) { + e.printStackTrace(); + fail(e.getMessage()); } - assertEquals(2, count); - } catch (QueryEvaluationException e) { - e.printStackTrace(); - fail(e.getMessage()); + } finally { + closeRepository(repo); } - } private void testPropertyPathInTree() throws Exception { - loadTestData("/testdata-query/dataset-query.trig"); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/dataset-query.trig", conn); - String query = getNamespaceDeclarations() + " SELECT ?node ?name " + " FROM ex:tree-graph " - + " WHERE { ?node ex:hasParent+ ex:b . ?node ex:name ?name . }"; + String query = getNamespaceDeclarations() + " SELECT ?node ?name " + " FROM ex:tree-graph " + + " WHERE { ?node ex:hasParent+ ex:b . ?node ex:name ?name . }"; - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - try (TupleQueryResult result = tq.evaluate()) { - assertNotNull(result); + try (TupleQueryResult result = tq.evaluate()) { + assertNotNull(result); - while (result.hasNext()) { - BindingSet bs = result.next(); - assertNotNull(bs); + while (result.hasNext()) { + BindingSet bs = result.next(); + assertNotNull(bs); - // System.out.println(bs); + // System.out.println(bs); + } + } catch (QueryEvaluationException e) { + e.printStackTrace(); + fail(e.getMessage()); } - } catch (QueryEvaluationException e) { - e.printStackTrace(); - fail(e.getMessage()); + } finally { + closeRepository(repo); } - } } diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/BasicTest.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/BasicTest.java index 4fc374278a0..f558459abf4 100644 --- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/BasicTest.java +++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/BasicTest.java @@ -14,6 +14,7 @@ import static org.junit.jupiter.api.Assertions.fail; import java.util.List; +import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -25,6 +26,7 @@ import org.eclipse.rdf4j.query.BindingSet; import org.eclipse.rdf4j.query.QueryLanguage; import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; import org.eclipse.rdf4j.testsuite.sparql.AbstractComplianceTest; import org.eclipse.rdf4j.testsuite.sparql.vocabulary.EX; import org.junit.jupiter.api.DynamicTest; @@ -38,23 +40,27 @@ */ public class BasicTest extends AbstractComplianceTest { - public BasicTest(Repository repo) { + public BasicTest(Supplier repo) { super(repo); } private void testIdenticalVariablesInStatementPattern() { - conn.add(EX.ALICE, DC.PUBLISHER, EX.BOB); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + conn.add(EX.ALICE, DC.PUBLISHER, EX.BOB); - String queryBuilder = "SELECT ?publisher " - + "{ ?publisher ?publisher }"; + String queryBuilder = "SELECT ?publisher " + + "{ ?publisher ?publisher }"; - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryBuilder).evaluate(new AbstractTupleQueryResultHandler() { + conn.prepareTupleQuery(QueryLanguage.SPARQL, queryBuilder).evaluate(new AbstractTupleQueryResultHandler() { - @Override - public void handleSolution(BindingSet bindingSet) { - fail("nobody is self published"); - } - }); + @Override + public void handleSolution(BindingSet bindingSet) { + fail("nobody is self published"); + } + }); + } + closeRepository(repo); } public Stream tests() { @@ -64,26 +70,26 @@ public Stream tests() { @Test public void testIdenticalVariablesSubjectContextInStatementPattern() { - conn.add(EX.ALICE, FOAF.KNOWS, EX.BOB, EX.ALICE); - conn.add(EX.ALICE, RDF.TYPE, FOAF.PERSON, EX.ALICE); - conn.add(EX.ALICE, FOAF.KNOWS, EX.A, EX.BOB); - conn.add(EX.ALICE, FOAF.KNOWS, EX.B, EX.BOB); - conn.add(EX.ALICE, FOAF.KNOWS, EX.C, EX.BOB); - conn.add(EX.ALICE, FOAF.KNOWS, EX.MARY, EX.BOB); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + conn.add(EX.ALICE, FOAF.KNOWS, EX.BOB, EX.ALICE); + conn.add(EX.ALICE, RDF.TYPE, FOAF.PERSON, EX.ALICE); + conn.add(EX.ALICE, FOAF.KNOWS, EX.A, EX.BOB); + conn.add(EX.ALICE, FOAF.KNOWS, EX.B, EX.BOB); + conn.add(EX.ALICE, FOAF.KNOWS, EX.C, EX.BOB); + conn.add(EX.ALICE, FOAF.KNOWS, EX.MARY, EX.BOB); - String queryBuilder = "SELECT ?knows { " + - " graph ?alice {" + - " ?alice a <" + FOAF.PERSON + ">; " + - " <" + FOAF.KNOWS + "> ?knows ." + - " }" + - "}"; + String queryBuilder = "SELECT ?knows { " + " graph ?alice {" + " ?alice a <" + FOAF.PERSON + ">; " + + " <" + FOAF.KNOWS + "> ?knows ." + " }" + "}"; - try (Stream stream = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryBuilder) - .evaluate() - .stream()) { - List knows = stream.map(b -> b.getValue("knows")).collect(Collectors.toList()); - assertEquals(List.of(EX.BOB), knows); + try (Stream stream = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryBuilder) + .evaluate() + .stream()) { + List knows = stream.map(b -> b.getValue("knows")).collect(Collectors.toList()); + assertEquals(List.of(EX.BOB), knows); + } } + closeRepository(repo); } } diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/BindTest.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/BindTest.java index fcbfe95fb32..2d37d2067fd 100644 --- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/BindTest.java +++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/BindTest.java @@ -17,6 +17,7 @@ import static org.junit.jupiter.api.Assertions.assertNull; import java.util.List; +import java.util.function.Supplier; import java.util.stream.Stream; import org.eclipse.rdf4j.model.IRI; @@ -33,6 +34,7 @@ import org.eclipse.rdf4j.query.TupleQuery; import org.eclipse.rdf4j.query.TupleQueryResult; import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; import org.eclipse.rdf4j.testsuite.sparql.AbstractComplianceTest; import org.junit.jupiter.api.DynamicTest; @@ -44,7 +46,7 @@ */ public class BindTest extends AbstractComplianceTest { - public BindTest(Repository repo) { + public BindTest(Supplier repo) { super(repo); } @@ -53,16 +55,21 @@ public BindTest(Repository repo) { */ private void testBindError() { + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + conn.prepareUpdate(QueryLanguage.SPARQL, "insert data { _:blank }") + .execute(); - conn.prepareUpdate(QueryLanguage.SPARQL, "insert data { _:blank }").execute(); + String qb = "SELECT * \n" + "WHERE { \n" + " VALUES (?NAValue) { () } \n " + + " BIND(IF(?NAValue != , ?NAValue, ?notBoundVar) as ?ValidNAValue) \n " + + " { ?disjClass (owl:disjointWith|^owl:disjointWith)? ?disjClass2 . }\n" + "}\n"; - String qb = "SELECT * \n" + "WHERE { \n" + " VALUES (?NAValue) { () } \n " - + " BIND(IF(?NAValue != , ?NAValue, ?notBoundVar) as ?ValidNAValue) \n " - + " { ?disjClass (owl:disjointWith|^owl:disjointWith)? ?disjClass2 . }\n" + "}\n"; + List result = QueryResults.asList(conn.prepareTupleQuery(qb).evaluate()); - List result = QueryResults.asList(conn.prepareTupleQuery(qb).evaluate()); - - assertEquals(2, result.size(), "query should return 2 solutions"); + assertEquals(2, result.size(), "query should return 2 solutions"); + } finally { + closeRepository(repo); + } } /** @@ -70,15 +77,20 @@ private void testBindError() { */ private void testBindScope() { - String query = "SELECT * {\n" + " { BIND (\"a\" AS ?a) }\n" + " { BIND (?a AS ?b) } \n" + "}"; + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + String query = "SELECT * {\n" + " { BIND (\"a\" AS ?a) }\n" + " { BIND (?a AS ?b) } \n" + "}"; - TupleQuery q = conn.prepareTupleQuery(query); - List result = QueryResults.asList(q.evaluate()); + TupleQuery q = conn.prepareTupleQuery(query); + List result = QueryResults.asList(q.evaluate()); - assertEquals(1, result.size()); + assertEquals(1, result.size()); - assertEquals(conn.getValueFactory().createLiteral("a"), result.get(0).getValue("a")); - assertNull(result.get(0).getValue("b")); + assertEquals(conn.getValueFactory().createLiteral("a"), result.get(0).getValue("a")); + assertNull(result.get(0).getValue("b")); + } finally { + closeRepository(repo); + } } /** @@ -86,145 +98,179 @@ private void testBindScope() { */ private void testBindScopeUnion() { - - ValueFactory f = conn.getValueFactory(); - String query = "prefix ex: \n" + "select * {\n" + " bind(ex:v1 as ?v)\n" - + " bind(strafter(str(?v),str(ex:)) as ?b)\n" + " {\n" + " bind(?b as ?b1)\n" + " } union {\n" - + " bind(?b as ?b2)\n" + " }\n" + "}"; - - TupleQuery q = conn.prepareTupleQuery(query); - List result = QueryResults.asList(q.evaluate()); - - assertEquals(2, result.size()); - - IRI v1 = f.createIRI("http://example.org/v1"); - Literal b = f.createLiteral("v1"); - for (BindingSet bs : result) { - assertThat(bs.getValue("v")).isEqualTo(v1); - assertThat(bs.getValue("b1")).isNull(); - assertThat(bs.getValue("b2")).isNull(); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + ValueFactory f = conn.getValueFactory(); + String query = "prefix ex: \n" + "select * {\n" + " bind(ex:v1 as ?v)\n" + + " bind(strafter(str(?v),str(ex:)) as ?b)\n" + " {\n" + " bind(?b as ?b1)\n" + " } union {\n" + + " bind(?b as ?b2)\n" + " }\n" + "}"; + + TupleQuery q = conn.prepareTupleQuery(query); + List result = QueryResults.asList(q.evaluate()); + + assertEquals(2, result.size()); + + IRI v1 = f.createIRI("http://example.org/v1"); + Literal b = f.createLiteral("v1"); + for (BindingSet bs : result) { + assertThat(bs.getValue("v")).isEqualTo(v1); + assertThat(bs.getValue("b1")).isNull(); + assertThat(bs.getValue("b2")).isNull(); + } + } finally { + closeRepository(repo); } } private void testSES2250BindErrors() { - - conn.prepareUpdate(QueryLanguage.SPARQL, "insert data { _:blank }").execute(); - - String qb = "SELECT * {\n" + " ?s1 ?p1 ?blank . " + " FILTER(isBlank(?blank))" - + " BIND (iri(?blank) as ?biri)" + " ?biri ?p2 ?o2 ." + "}"; - - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, qb); - try (TupleQueryResult evaluate = tq.evaluate()) { - assertFalse(evaluate.hasNext(), "The query should not return a result"); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + conn.prepareUpdate(QueryLanguage.SPARQL, "insert data { _:blank }") + .execute(); + + String qb = "SELECT * {\n" + " ?s1 ?p1 ?blank . " + " FILTER(isBlank(?blank))" + + " BIND (iri(?blank) as ?biri)" + " ?biri ?p2 ?o2 ." + "}"; + + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, qb); + try (TupleQueryResult evaluate = tq.evaluate()) { + assertFalse(evaluate.hasNext(), "The query should not return a result"); + } + } finally { + closeRepository(repo); } } private void testSES2250BindErrorsInPath() { - - conn.prepareUpdate(QueryLanguage.SPARQL, "insert data { _:blank }").execute(); - - String qb = "SELECT * {\n" + " ?s1 ?p1 ?blank . " + " FILTER(isBlank(?blank))" - + " BIND (iri(?blank) as ?biri)" + " ?biri * ?o2 ." + "}"; - - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, qb); - try (TupleQueryResult evaluate = tq.evaluate()) { - assertFalse(evaluate.hasNext(), "The query should not return a result"); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + conn.prepareUpdate(QueryLanguage.SPARQL, "insert data { _:blank }") + .execute(); + + String qb = "SELECT * {\n" + " ?s1 ?p1 ?blank . " + " FILTER(isBlank(?blank))" + + " BIND (iri(?blank) as ?biri)" + " ?biri * ?o2 ." + "}"; + + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, qb); + try (TupleQueryResult evaluate = tq.evaluate()) { + assertFalse(evaluate.hasNext(), "The query should not return a result"); + } + } finally { + closeRepository(repo); } } private void testSelectBindOnly() { - String query = "select ?b1 ?b2 ?b3\n" + "where {\n" + " bind(1 as ?b1)\n" + "}"; + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + String query = "select ?b1 ?b2 ?b3\n" + "where {\n" + " bind(1 as ?b1)\n" + "}"; - List result = QueryResults.asList(conn.prepareTupleQuery(query).evaluate()); + List result = QueryResults.asList(conn.prepareTupleQuery(query).evaluate()); - assertThat(result.size()).isEqualTo(1); - BindingSet solution = result.get(0); + assertThat(result.size()).isEqualTo(1); + BindingSet solution = result.get(0); - assertThat(solution.getValue("b1")).isEqualTo(literal("1", CoreDatatype.XSD.INTEGER)); - assertThat(solution.getValue("b2")).isNull(); - assertThat(solution.getValue("b3")).isNull(); + assertThat(solution.getValue("b1")).isEqualTo(literal("1", CoreDatatype.XSD.INTEGER)); + assertThat(solution.getValue("b2")).isNull(); + assertThat(solution.getValue("b3")).isNull(); + } finally { + closeRepository(repo); + } } private void testGH3696Bind() { - Model testData = new ModelBuilder().setNamespace("ex", "http://example.org/") - .subject("ex:unit1") - .add(RDF.TYPE, "ex:Unit") - .add(RDFS.LABEL, "Unit1") - .add("ex:has", "Unit1") - .subject("ex:unit2") - .add(RDF.TYPE, "ex:Unit") - .add(RDFS.LABEL, "Unit2") - .build(); - conn.add(testData); - - String query = "PREFIX ex: \n" + "SELECT * {\n" + " ?bind rdfs:label ?b1 ;\n" - + " a ex:Unit .\n" + " FILTER (?b1 = 'Unit2') .\n" + " BIND(?bind AS ?n0)\n" - + " ?n0 ex:has ?n1 \n" + " }"; - - List result = QueryResults.asList(conn.prepareTupleQuery(query).evaluate()); - - assertThat(result).isEmpty(); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + Model testData = new ModelBuilder().setNamespace("ex", "http://example.org/") + .subject("ex:unit1") + .add(RDF.TYPE, "ex:Unit") + .add(RDFS.LABEL, "Unit1") + .add("ex:has", "Unit1") + .subject("ex:unit2") + .add(RDF.TYPE, "ex:Unit") + .add(RDFS.LABEL, "Unit2") + .build(); + conn.add(testData); + + String query = "PREFIX ex: \n" + "SELECT * {\n" + " ?bind rdfs:label ?b1 ;\n" + + " a ex:Unit .\n" + " FILTER (?b1 = 'Unit2') .\n" + " BIND(?bind AS ?n0)\n" + + " ?n0 ex:has ?n1 \n" + " }"; + + List result = QueryResults.asList(conn.prepareTupleQuery(query).evaluate()); + + assertThat(result).isEmpty(); + } finally { + closeRepository(repo); + } } private void testGH4499BindFilterNotExist1() { - Model testData = new ModelBuilder().setNamespace("ex", "http://example.org/") - .subject("ex:a") - .add("ex:p", "ex:c1") - .add("ex:p", "ex:c2") - .add("ex:p", "ex:c3") - .subject("ex:c1") - .add(RDF.TYPE, "ex:T") - .add("ex:q", "something") - .subject("ex:c2") - .add(RDF.TYPE, "ex:T") - .build(); - conn.add(testData); - - String query = "PREFIX ex: \n" + "SELECT *\n" + " WHERE {\n" - + " BIND ( ex:a AS ?a )\n" + " BIND ( ex:b AS ?b )\n" - + " ?a ex:p* ?c .\n" + " FILTER EXISTS { ?c rdf:type ex:T }\n" - + " FILTER NOT EXISTS { ?c ex:q ?d}\n" + "}"; - - List result = QueryResults.asList(conn.prepareTupleQuery(query).evaluate()); - - assertThat(result).hasSize(1); - - var bs = result.get(0); - - assertThat(bs.getValue("a").stringValue()).isEqualTo("http://example.org/a"); - assertThat(bs.getValue("c").stringValue()).isEqualTo("http://example.org/c2"); - assertThat(bs.getValue("d")).isNull(); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + Model testData = new ModelBuilder().setNamespace("ex", "http://example.org/") + .subject("ex:a") + .add("ex:p", "ex:c1") + .add("ex:p", "ex:c2") + .add("ex:p", "ex:c3") + .subject("ex:c1") + .add(RDF.TYPE, "ex:T") + .add("ex:q", "something") + .subject("ex:c2") + .add(RDF.TYPE, "ex:T") + .build(); + conn.add(testData); + + String query = "PREFIX ex: \n" + "SELECT *\n" + " WHERE {\n" + + " BIND ( ex:a AS ?a )\n" + " BIND ( ex:b AS ?b )\n" + + " ?a ex:p* ?c .\n" + " FILTER EXISTS { ?c rdf:type ex:T }\n" + + " FILTER NOT EXISTS { ?c ex:q ?d}\n" + "}"; + + List result = QueryResults.asList(conn.prepareTupleQuery(query).evaluate()); + + assertThat(result).hasSize(1); + + var bs = result.get(0); + + assertThat(bs.getValue("a").stringValue()).isEqualTo("http://example.org/a"); + assertThat(bs.getValue("c").stringValue()).isEqualTo("http://example.org/c2"); + assertThat(bs.getValue("d")).isNull(); + } finally { + closeRepository(repo); + } } private void testGH4499BindFilterNotExist2() { - Model testData = new ModelBuilder().setNamespace("ex", "http://example.org/") - .subject("ex:a") - .add("ex:p", "ex:c1") - .add("ex:p", "ex:c2") - .add("ex:p", "ex:c3") - .subject("ex:c1") - .add(RDF.TYPE, "ex:T") - .add("ex:q", "something") - .subject("ex:c2") - .add(RDF.TYPE, "ex:T") - .build(); - conn.add(testData); - - String query = "PREFIX ex: \n" + "SELECT *\n" + " WHERE {\n" - + " FILTER EXISTS { ?c rdf:type ex:T }\n" + " FILTER NOT EXISTS { ?c ex:q ?d }\n" - + " BIND ( ex:a AS ?a )\n" + " BIND ( ex:b AS ?b )\n" - + " ?a ex:p* ?c .\n" + "}"; - - List result = QueryResults.asList(conn.prepareTupleQuery(query).evaluate()); - - assertThat(result).hasSize(1); - - var bs = result.get(0); - - assertThat(bs.getValue("a").stringValue()).isEqualTo("http://example.org/a"); - assertThat(bs.getValue("c").stringValue()).isEqualTo("http://example.org/c2"); - assertThat(bs.getValue("d")).isNull(); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + Model testData = new ModelBuilder().setNamespace("ex", "http://example.org/") + .subject("ex:a") + .add("ex:p", "ex:c1") + .add("ex:p", "ex:c2") + .add("ex:p", "ex:c3") + .subject("ex:c1") + .add(RDF.TYPE, "ex:T") + .add("ex:q", "something") + .subject("ex:c2") + .add(RDF.TYPE, "ex:T") + .build(); + conn.add(testData); + + String query = "PREFIX ex: \n" + "SELECT *\n" + " WHERE {\n" + + " FILTER EXISTS { ?c rdf:type ex:T }\n" + + " FILTER NOT EXISTS { ?c ex:q ?d }\n" + " BIND ( ex:a AS ?a )\n" + + " BIND ( ex:b AS ?b )\n" + " ?a ex:p* ?c .\n" + "}"; + + List result = QueryResults.asList(conn.prepareTupleQuery(query).evaluate()); + + assertThat(result).hasSize(1); + + var bs = result.get(0); + + assertThat(bs.getValue("a").stringValue()).isEqualTo("http://example.org/a"); + assertThat(bs.getValue("c").stringValue()).isEqualTo("http://example.org/c2"); + assertThat(bs.getValue("d")).isNull(); + } finally { + closeRepository(repo); + } } diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/BuiltinFunctionTest.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/BuiltinFunctionTest.java index 6b71c64aaa7..01dfe7b0831 100644 --- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/BuiltinFunctionTest.java +++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/BuiltinFunctionTest.java @@ -19,6 +19,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; +import java.util.function.Supplier; import java.util.stream.Stream; import org.eclipse.rdf4j.model.IRI; @@ -31,6 +32,7 @@ import org.eclipse.rdf4j.query.TupleQuery; import org.eclipse.rdf4j.query.TupleQueryResult; import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; import org.eclipse.rdf4j.testsuite.sparql.AbstractComplianceTest; import org.junit.jupiter.api.DynamicTest; @@ -42,7 +44,7 @@ */ public class BuiltinFunctionTest extends AbstractComplianceTest { - public BuiltinFunctionTest(Repository repo) { + public BuiltinFunctionTest(Supplier repo) { super(repo); } @@ -53,13 +55,16 @@ public BuiltinFunctionTest(Repository repo) { private void testSeconds() { String qry = "PREFIX xsd: " + "SELECT (SECONDS(\"2011-01-10T14:45:13\"^^xsd:dateTime) AS ?sec) { }"; - - try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, qry).evaluate()) { - assertNotNull(result); - assertTrue(result.hasNext()); - assertEquals("13", result.next().getValue("sec").stringValue()); - assertFalse(result.hasNext()); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, qry).evaluate()) { + assertNotNull(result); + assertTrue(result.hasNext()); + assertEquals("13", result.next().getValue("sec").stringValue()); + assertFalse(result.hasNext()); + } } + closeRepository(repo); } /** @@ -70,278 +75,341 @@ private void testSecondsMilliseconds() { String qry = "PREFIX xsd: " + "SELECT (SECONDS(\"2011-01-10T14:45:13.815-05:00\"^^xsd:dateTime) AS ?sec) { }"; - try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, qry).evaluate()) { - assertNotNull(result); - assertTrue(result.hasNext()); - assertEquals("13.815", result.next().getValue("sec").stringValue()); - assertFalse(result.hasNext()); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, qry).evaluate()) { + assertNotNull(result); + assertTrue(result.hasNext()); + assertEquals("13.815", result.next().getValue("sec").stringValue()); + assertFalse(result.hasNext()); + } } + closeRepository(repo); } private void testSES1991NOWEvaluation() throws Exception { - loadTestData("/testdata-query/defaultgraph.ttl"); - String query = "SELECT ?d WHERE {?s ?p ?o . BIND(NOW() as ?d) } LIMIT 2"; - - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - - try (TupleQueryResult result = tq.evaluate()) { - assertNotNull(result); - assertTrue(result.hasNext()); - Literal d1 = (Literal) result.next().getValue("d"); - assertTrue(result.hasNext()); - Literal d2 = (Literal) result.next().getValue("d"); - assertFalse(result.hasNext()); - assertNotNull(d1); - assertEquals(d1, d2); - } catch (QueryEvaluationException e) { - e.printStackTrace(); - fail(e.getMessage()); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/defaultgraph.ttl", conn); + String query = "SELECT ?d WHERE {?s ?p ?o . BIND(NOW() as ?d) } LIMIT 2"; + + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + + try (TupleQueryResult result = tq.evaluate()) { + assertNotNull(result); + assertTrue(result.hasNext()); + Literal d1 = (Literal) result.next().getValue("d"); + assertTrue(result.hasNext()); + Literal d2 = (Literal) result.next().getValue("d"); + assertFalse(result.hasNext()); + assertNotNull(d1); + assertEquals(d1, d2); + } catch (QueryEvaluationException e) { + e.printStackTrace(); + fail(e.getMessage()); + } } + closeRepository(repo); } private void testSES869ValueOfNow() { - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, - "SELECT ?p ( NOW() as ?n ) { BIND (NOW() as ?p ) }"); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, + "SELECT ?p ( NOW() as ?n ) { BIND (NOW() as ?p ) }"); - try (TupleQueryResult result = tq.evaluate()) { - assertNotNull(result); - assertTrue(result.hasNext()); + try (TupleQueryResult result = tq.evaluate()) { + assertNotNull(result); + assertTrue(result.hasNext()); - BindingSet bs = result.next(); - Value p = bs.getValue("p"); - Value n = bs.getValue("n"); + BindingSet bs = result.next(); + Value p = bs.getValue("p"); + Value n = bs.getValue("n"); - assertNotNull(p); - assertNotNull(n); - assertEquals(p, n); - assertTrue(p == n); + assertNotNull(p); + assertNotNull(n); + assertEquals(p, n); + assertTrue(p == n); + } } + closeRepository(repo); } private void testSES1991UUIDEvaluation() throws Exception { - loadTestData("/testdata-query/defaultgraph.ttl"); - String query = "SELECT ?uid WHERE {?s ?p ?o . BIND(UUID() as ?uid) } LIMIT 2"; + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/defaultgraph.ttl", conn); + String query = "SELECT ?uid WHERE {?s ?p ?o . BIND(UUID() as ?uid) } LIMIT 2"; - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - try (TupleQueryResult result = tq.evaluate()) { - assertNotNull(result); + try (TupleQueryResult result = tq.evaluate()) { + assertNotNull(result); - IRI uuid1 = (IRI) result.next().getValue("uid"); - IRI uuid2 = (IRI) result.next().getValue("uid"); + IRI uuid1 = (IRI) result.next().getValue("uid"); + IRI uuid2 = (IRI) result.next().getValue("uid"); - assertNotNull(uuid1); - assertNotNull(uuid2); - assertNotEquals(uuid1, uuid2); - } catch (QueryEvaluationException e) { - e.printStackTrace(); - fail(e.getMessage()); + assertNotNull(uuid1); + assertNotNull(uuid2); + assertNotEquals(uuid1, uuid2); + } catch (QueryEvaluationException e) { + e.printStackTrace(); + fail(e.getMessage()); + } } + closeRepository(repo); } private void testSES1991STRUUIDEvaluation() throws Exception { - loadTestData("/testdata-query/defaultgraph.ttl"); - String query = "SELECT ?uid WHERE {?s ?p ?o . BIND(STRUUID() as ?uid) } LIMIT 2"; + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/defaultgraph.ttl", conn); + String query = "SELECT ?uid WHERE {?s ?p ?o . BIND(STRUUID() as ?uid) } LIMIT 2"; - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - try (TupleQueryResult result = tq.evaluate()) { - assertNotNull(result); + try (TupleQueryResult result = tq.evaluate()) { + assertNotNull(result); - Literal uid1 = (Literal) result.next().getValue("uid"); - Literal uid2 = (Literal) result.next().getValue("uid"); + Literal uid1 = (Literal) result.next().getValue("uid"); + Literal uid2 = (Literal) result.next().getValue("uid"); - assertNotNull(uid1); - assertNotEquals(uid1, uid2); - } catch (QueryEvaluationException e) { - e.printStackTrace(); - fail(e.getMessage()); + assertNotNull(uid1); + assertNotEquals(uid1, uid2); + } catch (QueryEvaluationException e) { + e.printStackTrace(); + fail(e.getMessage()); + } } + closeRepository(repo); } private void testSES1991RANDEvaluation() throws Exception { - loadTestData("/testdata-query/defaultgraph.ttl"); - String query = "SELECT ?r WHERE {?s ?p ?o . BIND(RAND() as ?r) } LIMIT 3"; - - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - - try (TupleQueryResult result = tq.evaluate()) { - assertNotNull(result); - - Literal r1 = (Literal) result.next().getValue("r"); - Literal r2 = (Literal) result.next().getValue("r"); - Literal r3 = (Literal) result.next().getValue("r"); - - assertNotNull(r1); - - // there is a small chance that two successive calls to the random - // number generator will generate the exact same value, so we check - // for - // three successive calls (still theoretically possible to be - // identical, but phenomenally unlikely). - assertFalse(r1.equals(r2) && r1.equals(r3)); - } catch (QueryEvaluationException e) { - e.printStackTrace(); - fail(e.getMessage()); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/defaultgraph.ttl", conn); + String query = "SELECT ?r WHERE {?s ?p ?o . BIND(RAND() as ?r) } LIMIT 3"; + + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + + try (TupleQueryResult result = tq.evaluate()) { + assertNotNull(result); + + Literal r1 = (Literal) result.next().getValue("r"); + Literal r2 = (Literal) result.next().getValue("r"); + Literal r3 = (Literal) result.next().getValue("r"); + + assertNotNull(r1); + + // there is a small chance that two successive calls to the random + // number generator will generate the exact same value, so we check + // for + // three successive calls (still theoretically possible to be + // identical, but phenomenally unlikely). + assertFalse(r1.equals(r2) && r1.equals(r3)); + } catch (QueryEvaluationException e) { + e.printStackTrace(); + fail(e.getMessage()); + } } + closeRepository(repo); } private void testSES2121URIFunction() { - String query = "SELECT (URI(\"foo bar\") as ?uri) WHERE {}"; - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - try (TupleQueryResult result = tq.evaluate()) { - assertNotNull(result); - assertTrue(result.hasNext()); - BindingSet bs = result.next(); - IRI uri = (IRI) bs.getValue("uri"); - assertNull(uri, "uri result for invalid URI should be unbound"); - } + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + String query = "SELECT (URI(\"foo bar\") as ?uri) WHERE {}"; + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + try (TupleQueryResult result = tq.evaluate()) { + assertNotNull(result); + assertTrue(result.hasNext()); + BindingSet bs = result.next(); + IRI uri = (IRI) bs.getValue("uri"); + assertNull(uri, "uri result for invalid URI should be unbound"); + } - query = "BASE SELECT (URI(\"foo bar\") as ?uri) WHERE {}"; - tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - try (TupleQueryResult result = tq.evaluate()) { - assertNotNull(result); - assertTrue(result.hasNext()); - BindingSet bs = result.next(); - IRI uri = (IRI) bs.getValue("uri"); - assertNotNull(uri, "uri result for valid URI reference should be bound"); + query = "BASE SELECT (URI(\"foo bar\") as ?uri) WHERE {}"; + tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + try (TupleQueryResult result = tq.evaluate()) { + assertNotNull(result); + assertTrue(result.hasNext()); + BindingSet bs = result.next(); + IRI uri = (IRI) bs.getValue("uri"); + assertNotNull(uri, "uri result for valid URI reference should be bound"); + } } + closeRepository(repo); } private void test27NormalizeIRIFunction() { - String query = "SELECT (IRI(\"../bar\") as ?Iri) WHERE {}"; - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query, "http://example.com/foo/"); - try (TupleQueryResult result = tq.evaluate()) { - assertNotNull(result); - assertTrue(result.hasNext()); - BindingSet bs = result.next(); - IRI actual = (IRI) bs.getValue("Iri"); - IRI expected = iri("http://example.com/bar"); - assertEquals(expected, actual, "IRI result for relative IRI should be normalized"); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + String query = "SELECT (IRI(\"../bar\") as ?Iri) WHERE {}"; + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query, "http://example.com/foo/"); + try (TupleQueryResult result = tq.evaluate()) { + assertNotNull(result); + assertTrue(result.hasNext()); + BindingSet bs = result.next(); + IRI actual = (IRI) bs.getValue("Iri"); + IRI expected = iri("http://example.com/bar"); + assertEquals(expected, actual, "IRI result for relative IRI should be normalized"); + } } + closeRepository(repo); } private void testSES2052If1() throws Exception { - loadTestData("/testdata-query/dataset-query.trig"); - String query = "SELECT ?p \n" + "WHERE { \n" + " ?s ?p ?o . \n" - + " FILTER(IF(BOUND(?p), ?p = , false)) \n" - + "}"; - - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - try (TupleQueryResult result = tq.evaluate()) { - assertNotNull(result); - while (result.hasNext()) { - BindingSet bs = result.next(); - - IRI p = (IRI) bs.getValue("p"); - assertNotNull(p); - assertEquals(RDF.TYPE, p); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/dataset-query.trig", conn); + String query = "SELECT ?p \n" + "WHERE { \n" + " ?s ?p ?o . \n" + + " FILTER(IF(BOUND(?p), ?p = , false)) \n" + + "}"; + + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + try (TupleQueryResult result = tq.evaluate()) { + assertNotNull(result); + while (result.hasNext()) { + BindingSet bs = result.next(); + + IRI p = (IRI) bs.getValue("p"); + assertNotNull(p); + assertEquals(RDF.TYPE, p); + } + } catch (Exception e) { + e.printStackTrace(); + fail(e.getMessage()); } - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); } + closeRepository(repo); } private void testSES2052If2() throws Exception { - loadTestData("/testdata-query/dataset-query.trig"); - String query = "SELECT ?p \n" + "WHERE { \n" + " ?s ?p ?o . \n" - + " FILTER(IF(!BOUND(?p), false , ?p = )) \n" - + "}"; - - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - try (TupleQueryResult result = tq.evaluate()) { - assertNotNull(result); - while (result.hasNext()) { - BindingSet bs = result.next(); - - IRI p = (IRI) bs.getValue("p"); - assertNotNull(p); - assertEquals(RDF.TYPE, p); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/dataset-query.trig", conn); + String query = "SELECT ?p \n" + "WHERE { \n" + " ?s ?p ?o . \n" + + " FILTER(IF(!BOUND(?p), false , ?p = )) \n" + + "}"; + + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + try (TupleQueryResult result = tq.evaluate()) { + assertNotNull(result); + while (result.hasNext()) { + BindingSet bs = result.next(); + + IRI p = (IRI) bs.getValue("p"); + assertNotNull(p); + assertEquals(RDF.TYPE, p); + } + } catch (Exception e) { + e.printStackTrace(); + fail(e.getMessage()); } - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); } - + closeRepository(repo); } private void testRegexCaseNonAscii() { - String query = "ask {filter (regex(\"Валовой\", \"валовой\", \"i\")) }"; + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + String query = "ask {filter (regex(\"Валовой\", \"валовой\", \"i\")) }"; - assertTrue(conn.prepareBooleanQuery(query).evaluate(), "case-insensitive match on Cyrillic should succeed"); + assertTrue(conn.prepareBooleanQuery(query).evaluate(), "case-insensitive match on Cyrillic should succeed"); - query = "ask {filter (regex(\"Валовой\", \"валовой\")) }"; + query = "ask {filter (regex(\"Валовой\", \"валовой\")) }"; - assertFalse(conn.prepareBooleanQuery(query).evaluate(), "case-sensitive match on Cyrillic should fail"); + assertFalse(conn.prepareBooleanQuery(query).evaluate(), "case-sensitive match on Cyrillic should fail"); + } + closeRepository(repo); } private void testFilterRegexBoolean() throws Exception { - loadTestData("/testdata-query/dataset-query.trig"); - - // test case for issue SES-1050 - String query = getNamespaceDeclarations() + " SELECT *" + " WHERE { " + " ?x foaf:name ?name ; " - + " foaf:mbox ?mbox . " + " FILTER(EXISTS { " - + " FILTER(REGEX(?name, \"Bo\") && REGEX(?mbox, \"bob\")) " + - // query.append(" FILTER(REGEX(?mbox, \"bob\")) "); - " } )" + " } "; - - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - - try (Stream result = tq.evaluate().stream()) { - long count = result.count(); - assertEquals(1, count); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/dataset-query.trig", conn); + + // test case for issue SES-1050 + String query = getNamespaceDeclarations() + " SELECT *" + " WHERE { " + " ?x foaf:name ?name ; " + + " foaf:mbox ?mbox . " + " FILTER(EXISTS { " + + " FILTER(REGEX(?name, \"Bo\") && REGEX(?mbox, \"bob\")) " + + // query.append(" FILTER(REGEX(?mbox, \"bob\")) "); + " } )" + " } "; + + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + + try (Stream result = tq.evaluate().stream()) { + long count = result.count(); + assertEquals(1, count); + } } + closeRepository(repo); } private void testDateCastFunction_date() { - String qry = "PREFIX xsd: " - + "SELECT (xsd:date(\"2022-09-09\") AS ?date) { }"; - - try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, qry).evaluate()) { - assertNotNull(result); - assertTrue(result.hasNext()); - assertEquals("2022-09-09", result.next().getValue("date").stringValue()); - assertFalse(result.hasNext()); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + String qry = "PREFIX xsd: " + + "SELECT (xsd:date(\"2022-09-09\") AS ?date) { }"; + + try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, qry).evaluate()) { + assertNotNull(result); + assertTrue(result.hasNext()); + assertEquals("2022-09-09", result.next().getValue("date").stringValue()); + assertFalse(result.hasNext()); + } } + closeRepository(repo); } private void testDateCastFunction_date_withTimeZone_utc() { - String qry = "PREFIX xsd: " - + "SELECT (xsd:date(\"2022-09-09Z\") AS ?date) { }"; - - try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, qry).evaluate()) { - assertNotNull(result); - assertTrue(result.hasNext()); - assertEquals("2022-09-09Z", result.next().getValue("date").stringValue()); - assertFalse(result.hasNext()); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + String qry = "PREFIX xsd: " + + "SELECT (xsd:date(\"2022-09-09Z\") AS ?date) { }"; + + try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, qry).evaluate()) { + assertNotNull(result); + assertTrue(result.hasNext()); + assertEquals("2022-09-09Z", result.next().getValue("date").stringValue()); + assertFalse(result.hasNext()); + } } + closeRepository(repo); } private void testDateCastFunction_dateTime_withTimeZone_offset() { - String qry = "PREFIX xsd: " - + "SELECT (xsd:date(\"2022-09-09T14:45:13+03:00\") AS ?date) { }"; - - try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, qry).evaluate()) { - assertNotNull(result); - assertTrue(result.hasNext()); - assertEquals("2022-09-09+03:00", result.next().getValue("date").stringValue()); - assertFalse(result.hasNext()); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + String qry = "PREFIX xsd: " + + "SELECT (xsd:date(\"2022-09-09T14:45:13+03:00\") AS ?date) { }"; + + try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, qry).evaluate()) { + assertNotNull(result); + assertTrue(result.hasNext()); + assertEquals("2022-09-09+03:00", result.next().getValue("date").stringValue()); + assertFalse(result.hasNext()); + } } + closeRepository(repo); } private void testDateCastFunction_invalidInput() { - String qry = "PREFIX xsd: " - + "SELECT (xsd:date(\"2022-09-xx\") AS ?date) { }"; - - try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, qry).evaluate()) { - assertNotNull(result); - assertTrue(result.hasNext()); - assertFalse(result.next().hasBinding("date"), - "There should be no binding because the cast should have failed."); - assertFalse(result.hasNext()); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + String qry = "PREFIX xsd: " + + "SELECT (xsd:date(\"2022-09-xx\") AS ?date) { }"; + + try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, qry).evaluate()) { + assertNotNull(result); + assertTrue(result.hasNext()); + assertFalse(result.next().hasBinding("date"), + "There should be no binding because the cast should have failed."); + assertFalse(result.hasNext()); + } } + closeRepository(repo); } public Stream tests() { diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/ConstructTest.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/ConstructTest.java index 20b1282e6de..d7476c0e8ef 100644 --- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/ConstructTest.java +++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/ConstructTest.java @@ -20,6 +20,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.StringReader; +import java.util.function.Supplier; import java.util.stream.Stream; import org.eclipse.rdf4j.model.IRI; @@ -32,6 +33,7 @@ import org.eclipse.rdf4j.query.QueryLanguage; import org.eclipse.rdf4j.query.QueryResults; import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; import org.eclipse.rdf4j.rio.RDFFormat; import org.eclipse.rdf4j.testsuite.sparql.AbstractComplianceTest; import org.junit.jupiter.api.DynamicTest; @@ -44,38 +46,42 @@ */ public class ConstructTest extends AbstractComplianceTest { - public ConstructTest(Repository repo) { + public ConstructTest(Supplier repo) { super(repo); } private void testConstructModifiers() throws Exception { - loadTestData("/testdata-query/dataset-construct-modifiers.ttl"); - String qry = "PREFIX foaf: \n" + "PREFIX site: \n" - + "CONSTRUCT { \n" + " ?iri foaf:name ?name . \n" + " ?iri foaf:nick ?nick . \n" + "} \n" - + "WHERE { \n" + " ?iri foaf:name ?name ; \n" + " site:hits ?hits ; \n" + " foaf:nick ?nick . \n" - + "} \n" + "ORDER BY desc(?hits) \n" + "LIMIT 3"; - Statement[] correctResult = { - statement(iri("urn:1"), iri("http://xmlns.com/foaf/0.1/name"), literal("Alice"), null), - statement(iri("urn:1"), iri("http://xmlns.com/foaf/0.1/nick"), literal("Al"), null), - - statement(iri("urn:3"), iri("http://xmlns.com/foaf/0.1/name"), literal("Eve"), null), - statement(iri("urn:3"), iri("http://xmlns.com/foaf/0.1/nick"), literal("Ev"), null), - - statement(iri("urn:2"), iri("http://xmlns.com/foaf/0.1/name"), literal("Bob"), null), - statement(iri("urn:2"), iri("http://xmlns.com/foaf/0.1/nick"), literal("Bo"), null), }; - GraphQuery gq = conn.prepareGraphQuery(qry); - try (GraphQueryResult result = gq.evaluate()) { - assertNotNull(result); - assertTrue(result.hasNext()); - int resultNo = 0; - while (result.hasNext()) { - Statement st = result.next(); - assertThat(resultNo).isLessThan(correctResult.length); - assertEquals(correctResult[resultNo], st); - resultNo++; + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/dataset-construct-modifiers.ttl", conn); + String qry = "PREFIX foaf: \n" + "PREFIX site: \n" + + "CONSTRUCT { \n" + " ?iri foaf:name ?name . \n" + " ?iri foaf:nick ?nick . \n" + "} \n" + + "WHERE { \n" + " ?iri foaf:name ?name ; \n" + " site:hits ?hits ; \n" + + " foaf:nick ?nick . \n" + "} \n" + "ORDER BY desc(?hits) \n" + "LIMIT 3"; + Statement[] correctResult = { + statement(iri("urn:1"), iri("http://xmlns.com/foaf/0.1/name"), literal("Alice"), null), + statement(iri("urn:1"), iri("http://xmlns.com/foaf/0.1/nick"), literal("Al"), null), + + statement(iri("urn:3"), iri("http://xmlns.com/foaf/0.1/name"), literal("Eve"), null), + statement(iri("urn:3"), iri("http://xmlns.com/foaf/0.1/nick"), literal("Ev"), null), + + statement(iri("urn:2"), iri("http://xmlns.com/foaf/0.1/name"), literal("Bob"), null), + statement(iri("urn:2"), iri("http://xmlns.com/foaf/0.1/nick"), literal("Bo"), null), }; + GraphQuery gq = conn.prepareGraphQuery(qry); + try (GraphQueryResult result = gq.evaluate()) { + assertNotNull(result); + assertTrue(result.hasNext()); + int resultNo = 0; + while (result.hasNext()) { + Statement st = result.next(); + assertThat(resultNo).isLessThan(correctResult.length); + assertEquals(correctResult[resultNo], st); + resultNo++; + } + assertEquals(correctResult.length, resultNo); } - assertEquals(correctResult.length, resultNo); } + closeRepository(repo); } /** @@ -83,34 +89,43 @@ private void testConstructModifiers() throws Exception { */ private void testConstruct_CyclicPathWithJoin() { - IRI test = iri("urn:test"), a = iri("urn:a"), b = iri("urn:b"), c = iri("urn:c"); - conn.add(test, RDF.TYPE, DCAT.CATALOG); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + IRI test = iri("urn:test"), a = iri("urn:a"), b = iri("urn:b"), c = iri("urn:c"); + conn.add(test, RDF.TYPE, DCAT.CATALOG); - String query = "PREFIX dcat: \n" + "\n" + "CONSTRUCT {\n" + " ?x .\n" - + " ?x ?x .\n" + "}\n" + "WHERE {\n" + " ?x a dcat:Catalog .\n" + "}"; + String query = "PREFIX dcat: \n" + "\n" + "CONSTRUCT {\n" + + " ?x .\n" + " ?x ?x .\n" + "}\n" + "WHERE {\n" + " ?x a dcat:Catalog .\n" + + "}"; - Model result = QueryResults.asModel(conn.prepareGraphQuery(query).evaluate()); + Model result = QueryResults.asModel(conn.prepareGraphQuery(query).evaluate()); - assertThat(result.contains(a, b, test)).isTrue(); - assertThat(result.contains(test, c, test)).isTrue(); + assertThat(result.contains(a, b, test)).isTrue(); + assertThat(result.contains(test, c, test)).isTrue(); + } + closeRepository(repo); } private void testSES2104ConstructBGPSameURI() throws Exception { - final String queryStr = "PREFIX : CONSTRUCT {:x :p :x } WHERE {} "; + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + final String queryStr = "PREFIX : CONSTRUCT {:x :p :x } WHERE {} "; - conn.add(new StringReader("@prefix : . :a :p :b . "), "", RDFFormat.TURTLE); + conn.add(new StringReader("@prefix : . :a :p :b . "), "", RDFFormat.TURTLE); - final IRI x = conn.getValueFactory().createIRI("urn:x"); - final IRI p = conn.getValueFactory().createIRI("urn:p"); + final IRI x = conn.getValueFactory().createIRI("urn:x"); + final IRI p = conn.getValueFactory().createIRI("urn:p"); - GraphQuery query = conn.prepareGraphQuery(QueryLanguage.SPARQL, queryStr); - try (GraphQueryResult evaluate = query.evaluate()) { - Model result = QueryResults.asModel(evaluate); + GraphQuery query = conn.prepareGraphQuery(QueryLanguage.SPARQL, queryStr); + try (GraphQueryResult evaluate = query.evaluate()) { + Model result = QueryResults.asModel(evaluate); - assertNotNull(result); - assertFalse(result.isEmpty()); - assertTrue(result.contains(x, p, x)); + assertNotNull(result); + assertFalse(result.isEmpty()); + assertTrue(result.contains(x, p, x)); + } } + closeRepository(repo); } public Stream tests() { diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/DefaultGraphTest.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/DefaultGraphTest.java index ce746c64f1c..20e86934b89 100644 --- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/DefaultGraphTest.java +++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/DefaultGraphTest.java @@ -16,6 +16,7 @@ import static org.junit.jupiter.api.Assertions.fail; import java.util.List; +import java.util.function.Supplier; import java.util.stream.Stream; import org.eclipse.rdf4j.model.Resource; @@ -26,10 +27,10 @@ import org.eclipse.rdf4j.query.TupleQuery; import org.eclipse.rdf4j.query.TupleQueryResult; import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; import org.eclipse.rdf4j.testsuite.sparql.AbstractComplianceTest; import org.eclipse.rdf4j.testsuite.sparql.vocabulary.EX; import org.junit.jupiter.api.DynamicTest; -import org.junit.jupiter.api.Test; /** * Tests on handling default graph identification (DEFAULT keyword, rf4j:nil). @@ -39,89 +40,104 @@ */ public class DefaultGraphTest extends AbstractComplianceTest { - public DefaultGraphTest(Repository repo) { + public DefaultGraphTest(Supplier repo) { super(repo); } private void testNullContext1() throws Exception { - loadTestData("/testdata-query/dataset-query.trig"); - String query = " SELECT * " + " FROM DEFAULT " + " WHERE { ?s ?p ?o } "; - - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - - try (TupleQueryResult result = tq.evaluate()) { - assertNotNull(result); - - while (result.hasNext()) { - BindingSet bs = result.next(); - assertNotNull(bs); - - Resource s = (Resource) bs.getValue("s"); - - assertNotNull(s); - assertNotEquals(EX.BOB, s); // should not be present in default - // graph - assertNotEquals(EX.ALICE, s); // should not be present in - // default - // graph + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/dataset-query.trig", conn); + String query = " SELECT * " + " FROM DEFAULT " + " WHERE { ?s ?p ?o } "; + + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + + try (TupleQueryResult result = tq.evaluate()) { + assertNotNull(result); + + while (result.hasNext()) { + BindingSet bs = result.next(); + assertNotNull(bs); + + Resource s = (Resource) bs.getValue("s"); + + assertNotNull(s); + assertNotEquals(EX.BOB, s); // should not be present in default + // graph + assertNotEquals(EX.ALICE, s); // should not be present in + // default + // graph + } + } catch (QueryEvaluationException e) { + e.printStackTrace(); + fail(e.getMessage()); } - } catch (QueryEvaluationException e) { - e.printStackTrace(); - fail(e.getMessage()); } + closeRepository(repo); } private void testNullContext2() throws Exception { - loadTestData("/testdata-query/dataset-query.trig"); - String query = " SELECT * " + " FROM rdf4j:nil " + " WHERE { ?s ?p ?o } "; - - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - - try (TupleQueryResult result = tq.evaluate()) { - assertNotNull(result); - - while (result.hasNext()) { - BindingSet bs = result.next(); - assertNotNull(bs); - - Resource s = (Resource) bs.getValue("s"); - - assertNotNull(s); - assertNotEquals(EX.BOB, s); // should not be present in default - // graph - assertNotEquals(EX.ALICE, s); // should not be present in - // default - // graph + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/dataset-query.trig", conn); + String query = " SELECT * " + " FROM rdf4j:nil " + " WHERE { ?s ?p ?o } "; + + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + + try (TupleQueryResult result = tq.evaluate()) { + assertNotNull(result); + + while (result.hasNext()) { + BindingSet bs = result.next(); + assertNotNull(bs); + + Resource s = (Resource) bs.getValue("s"); + + assertNotNull(s); + assertNotEquals(EX.BOB, s); // should not be present in default + // graph + assertNotEquals(EX.ALICE, s); // should not be present in + // default + // graph + } + } catch (QueryEvaluationException e) { + e.printStackTrace(); + fail(e.getMessage()); } - } catch (QueryEvaluationException e) { - e.printStackTrace(); - fail(e.getMessage()); + } finally { + closeRepository(repo); } } private void testSesameNilAsGraph() throws Exception { - loadTestData("/testdata-query/dataset-query.trig"); - String query = " SELECT * " + " WHERE { GRAPH rdf4j:nil { ?s ?p ?o } } "; + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/dataset-query.trig", conn); + String query = " SELECT * " + " WHERE { GRAPH rdf4j:nil { ?s ?p ?o } } "; // query.append(" WHERE { ?s ?p ?o } "); - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - try { - List result = QueryResults.asList(tq.evaluate()); + try { + List result = QueryResults.asList(tq.evaluate()); - // nil graph should not be empty - assertThat(result.size()).isGreaterThan(1); + // nil graph should not be empty + assertThat(result.size()).isGreaterThan(1); - for (BindingSet bs : result) { - Resource s = (Resource) bs.getValue("s"); + for (BindingSet bs : result) { + Resource s = (Resource) bs.getValue("s"); - assertNotNull(s); - assertThat(s).withFailMessage("%s should not be present in nil graph", EX.BOB).isNotEqualTo(EX.BOB); - assertThat(s).withFailMessage("%s should not be present in nil graph", EX.ALICE).isNotEqualTo(EX.ALICE); + assertNotNull(s); + assertThat(s).withFailMessage("%s should not be present in nil graph", EX.BOB).isNotEqualTo(EX.BOB); + assertThat(s).withFailMessage("%s should not be present in nil graph", EX.ALICE) + .isNotEqualTo(EX.ALICE); + } + } catch (QueryEvaluationException e) { + e.printStackTrace(); + fail(e.getMessage()); } - } catch (QueryEvaluationException e) { - e.printStackTrace(); - fail(e.getMessage()); + } finally { + closeRepository(repo); } } diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/DescribeTest.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/DescribeTest.java index 5cee875421b..562ef051e19 100644 --- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/DescribeTest.java +++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/DescribeTest.java @@ -14,6 +14,7 @@ import static org.assertj.core.api.Assertions.assertThat; import java.util.Set; +import java.util.function.Supplier; import java.util.stream.Stream; import org.eclipse.rdf4j.model.BNode; @@ -28,9 +29,9 @@ import org.eclipse.rdf4j.query.QueryLanguage; import org.eclipse.rdf4j.query.QueryResults; import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; import org.eclipse.rdf4j.testsuite.sparql.AbstractComplianceTest; import org.junit.jupiter.api.DynamicTest; -import org.junit.jupiter.api.Test; /** * Tests on SPARQL DESCRIBE queries @@ -39,264 +40,304 @@ */ public class DescribeTest extends AbstractComplianceTest { - public DescribeTest(Repository repo) { + public DescribeTest(Supplier repo) { super(repo); } private void testDescribeA() throws Exception { - loadTestData("/testdata-query/dataset-describe.trig"); - String query = getNamespaceDeclarations() + "DESCRIBE ex:a"; - - GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query); - - ValueFactory f = conn.getValueFactory(); - IRI a = f.createIRI("http://example.org/a"); - IRI p = f.createIRI("http://example.org/p"); - try (GraphQueryResult evaluate = gq.evaluate()) { - Model result = QueryResults.asModel(evaluate); - Set objects = result.filter(a, p, null).objects(); - assertThat(objects).isNotNull(); - for (Value object : objects) { - if (object instanceof BNode) { - assertThat(result.contains((Resource) object, null, null)).isTrue(); - assertThat(result.filter((Resource) object, null, null)).hasSize(2); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/dataset-describe.trig", conn); + String query = getNamespaceDeclarations() + "DESCRIBE ex:a"; + + GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query); + + ValueFactory f = conn.getValueFactory(); + IRI a = f.createIRI("http://example.org/a"); + IRI p = f.createIRI("http://example.org/p"); + try (GraphQueryResult evaluate = gq.evaluate()) { + Model result = QueryResults.asModel(evaluate); + Set objects = result.filter(a, p, null).objects(); + assertThat(objects).isNotNull(); + for (Value object : objects) { + if (object instanceof BNode) { + assertThat(result.contains((Resource) object, null, null)).isTrue(); + assertThat(result.filter((Resource) object, null, null)).hasSize(2); + } } } } + closeRepository(repo); } private void testDescribeAWhere() throws Exception { - loadTestData("/testdata-query/dataset-describe.trig"); - String query = getNamespaceDeclarations() + "DESCRIBE ?x WHERE {?x rdfs:label \"a\". } "; - - GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query); - - ValueFactory f = conn.getValueFactory(); - IRI a = f.createIRI("http://example.org/a"); - IRI p = f.createIRI("http://example.org/p"); - try (GraphQueryResult evaluate = gq.evaluate()) { - Model result = QueryResults.asModel(evaluate); - Set objects = result.filter(a, p, null).objects(); - assertThat(objects).isNotNull(); - for (Value object : objects) { - if (object instanceof BNode) { - assertThat(result.contains((Resource) object, null, null)).isTrue(); - assertThat(result.filter((Resource) object, null, null)).hasSize(2); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/dataset-describe.trig", conn); + String query = getNamespaceDeclarations() + "DESCRIBE ?x WHERE {?x rdfs:label \"a\". } "; + + GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query); + + ValueFactory f = conn.getValueFactory(); + IRI a = f.createIRI("http://example.org/a"); + IRI p = f.createIRI("http://example.org/p"); + try (GraphQueryResult evaluate = gq.evaluate()) { + Model result = QueryResults.asModel(evaluate); + Set objects = result.filter(a, p, null).objects(); + assertThat(objects).isNotNull(); + for (Value object : objects) { + if (object instanceof BNode) { + assertThat(result.contains((Resource) object, null, null)).isTrue(); + assertThat(result.filter((Resource) object, null, null)).hasSize(2); + } } } } + closeRepository(repo); } private void testDescribeWhere() throws Exception { - loadTestData("/testdata-query/dataset-describe.trig"); - String query = getNamespaceDeclarations() + "DESCRIBE ?x WHERE {?x rdfs:label ?y . } "; - - GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query); - - ValueFactory vf = conn.getValueFactory(); - IRI a = vf.createIRI("http://example.org/a"); - IRI b = vf.createIRI("http://example.org/b"); - IRI c = vf.createIRI("http://example.org/c"); - IRI e = vf.createIRI("http://example.org/e"); - IRI f = vf.createIRI("http://example.org/f"); - IRI p = vf.createIRI("http://example.org/p"); - - try (GraphQueryResult evaluate = gq.evaluate()) { - Model result = QueryResults.asModel(evaluate); - assertThat(result.contains(a, p, null)).isTrue(); - assertThat(result.contains(b, RDFS.LABEL, null)).isTrue(); - assertThat(result.contains(c, RDFS.LABEL, null)).isTrue(); - assertThat(result.contains(null, p, b)).isTrue(); - assertThat(result.contains(e, RDFS.LABEL, null)).isTrue(); - assertThat(result.contains(null, p, e)).isTrue(); - assertThat(result.contains(f, null, null)).isFalse(); - Set objects = result.filter(a, p, null).objects(); - assertThat(objects).isNotNull(); - for (Value object : objects) { - if (object instanceof BNode) { - assertThat(result.contains((Resource) object, null, null)).isTrue(); - assertThat(result.filter((Resource) object, null, null)).hasSize(2); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/dataset-describe.trig", conn); + String query = getNamespaceDeclarations() + "DESCRIBE ?x WHERE {?x rdfs:label ?y . } "; + + GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query); + + ValueFactory vf = conn.getValueFactory(); + IRI a = vf.createIRI("http://example.org/a"); + IRI b = vf.createIRI("http://example.org/b"); + IRI c = vf.createIRI("http://example.org/c"); + IRI e = vf.createIRI("http://example.org/e"); + IRI f = vf.createIRI("http://example.org/f"); + IRI p = vf.createIRI("http://example.org/p"); + + try (GraphQueryResult evaluate = gq.evaluate()) { + Model result = QueryResults.asModel(evaluate); + assertThat(result.contains(a, p, null)).isTrue(); + assertThat(result.contains(b, RDFS.LABEL, null)).isTrue(); + assertThat(result.contains(c, RDFS.LABEL, null)).isTrue(); + assertThat(result.contains(null, p, b)).isTrue(); + assertThat(result.contains(e, RDFS.LABEL, null)).isTrue(); + assertThat(result.contains(null, p, e)).isTrue(); + assertThat(result.contains(f, null, null)).isFalse(); + Set objects = result.filter(a, p, null).objects(); + assertThat(objects).isNotNull(); + for (Value object : objects) { + if (object instanceof BNode) { + assertThat(result.contains((Resource) object, null, null)).isTrue(); + assertThat(result.filter((Resource) object, null, null)).hasSize(2); + } } } } + closeRepository(repo); } private void testDescribeB() throws Exception { - loadTestData("/testdata-query/dataset-describe.trig"); - String query = getNamespaceDeclarations() + "DESCRIBE ex:b"; - - GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query); - - ValueFactory f = conn.getValueFactory(); - IRI b = f.createIRI("http://example.org/b"); - IRI p = f.createIRI("http://example.org/p"); - try (GraphQueryResult evaluate = gq.evaluate()) { - Model result = QueryResults.asModel(evaluate); - Set subjects = result.filter(null, p, b).subjects(); - assertThat(subjects).isNotNull(); - for (Value subject : subjects) { - if (subject instanceof BNode) { - assertThat(result.contains(null, null, subject)).isTrue(); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/dataset-describe.trig", conn); + String query = getNamespaceDeclarations() + "DESCRIBE ex:b"; + + GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query); + + ValueFactory f = conn.getValueFactory(); + IRI b = f.createIRI("http://example.org/b"); + IRI p = f.createIRI("http://example.org/p"); + try (GraphQueryResult evaluate = gq.evaluate()) { + Model result = QueryResults.asModel(evaluate); + Set subjects = result.filter(null, p, b).subjects(); + assertThat(subjects).isNotNull(); + for (Value subject : subjects) { + if (subject instanceof BNode) { + assertThat(result.contains(null, null, subject)).isTrue(); + } } } } + closeRepository(repo); } private void testDescribeD() throws Exception { - loadTestData("/testdata-query/dataset-describe.trig"); - String query = getNamespaceDeclarations() + "DESCRIBE ex:d"; - - GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query); - - ValueFactory f = conn.getValueFactory(); - IRI d = f.createIRI("http://example.org/d"); - IRI p = f.createIRI("http://example.org/p"); - IRI e = f.createIRI("http://example.org/e"); - try (GraphQueryResult evaluate = gq.evaluate()) { - Model result = QueryResults.asModel(evaluate); - - assertThat(result.contains(null, p, e)).isTrue(); - assertThat(result.contains(e, null, null)).isFalse(); - - Set objects = result.filter(d, p, null).objects(); - assertThat(objects).isNotNull(); - for (Value object : objects) { - if (object instanceof BNode) { - Set childObjects = result.filter((BNode) object, null, null).objects(); - assertThat(childObjects).isNotEmpty(); - for (Value childObject : childObjects) { - if (childObject instanceof BNode) { - assertThat(result.contains((BNode) childObject, null, null)).isTrue(); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/dataset-describe.trig", conn); + String query = getNamespaceDeclarations() + "DESCRIBE ex:d"; + + GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query); + + ValueFactory f = conn.getValueFactory(); + IRI d = f.createIRI("http://example.org/d"); + IRI p = f.createIRI("http://example.org/p"); + IRI e = f.createIRI("http://example.org/e"); + try (GraphQueryResult evaluate = gq.evaluate()) { + Model result = QueryResults.asModel(evaluate); + + assertThat(result.contains(null, p, e)).isTrue(); + assertThat(result.contains(e, null, null)).isFalse(); + + Set objects = result.filter(d, p, null).objects(); + assertThat(objects).isNotNull(); + for (Value object : objects) { + if (object instanceof BNode) { + Set childObjects = result.filter((BNode) object, null, null).objects(); + assertThat(childObjects).isNotEmpty(); + for (Value childObject : childObjects) { + if (childObject instanceof BNode) { + assertThat(result.contains((BNode) childObject, null, null)).isTrue(); + } } } } } } + closeRepository(repo); } private void testDescribeF() throws Exception { - loadTestData("/testdata-query/dataset-describe.trig"); - String query = getNamespaceDeclarations() + "DESCRIBE ex:f"; - - GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query); - - ValueFactory vf = conn.getValueFactory(); - IRI f = vf.createIRI("http://example.org/f"); - IRI p = vf.createIRI("http://example.org/p"); - try (GraphQueryResult evaluate = gq.evaluate()) { - Model result = QueryResults.asModel(evaluate); - - assertThat(result).isNotNull().hasSize(4); - - Set objects = result.filter(f, p, null).objects(); - for (Value object : objects) { - if (object instanceof BNode) { - Set childObjects = result.filter((BNode) object, null, null).objects(); - assertThat(childObjects).isNotEmpty(); - for (Value childObject : childObjects) { - if (childObject instanceof BNode) { - assertThat(result.contains((BNode) childObject, null, null)).isTrue(); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/dataset-describe.trig", conn); + String query = getNamespaceDeclarations() + "DESCRIBE ex:f"; + + GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query); + + ValueFactory vf = conn.getValueFactory(); + IRI f = vf.createIRI("http://example.org/f"); + IRI p = vf.createIRI("http://example.org/p"); + try (GraphQueryResult evaluate = gq.evaluate()) { + Model result = QueryResults.asModel(evaluate); + + assertThat(result).isNotNull().hasSize(4); + + Set objects = result.filter(f, p, null).objects(); + for (Value object : objects) { + if (object instanceof BNode) { + Set childObjects = result.filter((BNode) object, null, null).objects(); + assertThat(childObjects).isNotEmpty(); + for (Value childObject : childObjects) { + if (childObject instanceof BNode) { + assertThat(result.contains((BNode) childObject, null, null)).isTrue(); + } } } } } } + closeRepository(repo); } private void testDescribeMultipleA() { - String update = "insert data { . [] . . } "; - conn.prepareUpdate(QueryLanguage.SPARQL, update).execute(); - - String query = getNamespaceDeclarations() + "DESCRIBE "; - - GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query); - - ValueFactory vf = conn.getValueFactory(); - IRI urn1 = vf.createIRI("urn:1"); - IRI p1 = vf.createIRI("urn:p1"); - IRI p2 = vf.createIRI("urn:p2"); - IRI urn2 = vf.createIRI("urn:2"); - IRI blank = vf.createIRI("urn:blank"); - - try (GraphQueryResult evaluate = gq.evaluate()) { - Model result = QueryResults.asModel(evaluate); - assertThat(result.contains(urn1, p1, null)).isTrue(); - assertThat(result.contains(null, blank, urn1)).isTrue(); - assertThat(result.contains(urn2, p2, null)).isTrue(); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + String update = "insert data { . [] . . } "; + conn.prepareUpdate(QueryLanguage.SPARQL, update).execute(); + + String query = getNamespaceDeclarations() + "DESCRIBE "; + + GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query); + + ValueFactory vf = conn.getValueFactory(); + IRI urn1 = vf.createIRI("urn:1"); + IRI p1 = vf.createIRI("urn:p1"); + IRI p2 = vf.createIRI("urn:p2"); + IRI urn2 = vf.createIRI("urn:2"); + IRI blank = vf.createIRI("urn:blank"); + + try (GraphQueryResult evaluate = gq.evaluate()) { + Model result = QueryResults.asModel(evaluate); + assertThat(result.contains(urn1, p1, null)).isTrue(); + assertThat(result.contains(null, blank, urn1)).isTrue(); + assertThat(result.contains(urn2, p2, null)).isTrue(); + } } + closeRepository(repo); } private void testDescribeMultipleB() { - String update = "insert data { . [] . . } "; - conn.prepareUpdate(QueryLanguage.SPARQL, update).execute(); - - String query = getNamespaceDeclarations() + "DESCRIBE "; - - GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query); - - ValueFactory vf = conn.getValueFactory(); - IRI urn1 = vf.createIRI("urn:1"); - IRI p1 = vf.createIRI("urn:p1"); - IRI p2 = vf.createIRI("urn:p2"); - IRI urn2 = vf.createIRI("urn:2"); - IRI blank = vf.createIRI("urn:blank"); - try (GraphQueryResult evaluate = gq.evaluate()) { - Model result = QueryResults.asModel(evaluate); - - assertThat(result.contains(urn1, p1, null)).isTrue(); - assertThat(result.contains(urn1, blank, null)).isTrue(); - assertThat(result.contains(urn2, p2, null)).isTrue(); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + String update = "insert data { . [] . . } "; + conn.prepareUpdate(QueryLanguage.SPARQL, update).execute(); + + String query = getNamespaceDeclarations() + "DESCRIBE "; + + GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query); + + ValueFactory vf = conn.getValueFactory(); + IRI urn1 = vf.createIRI("urn:1"); + IRI p1 = vf.createIRI("urn:p1"); + IRI p2 = vf.createIRI("urn:p2"); + IRI urn2 = vf.createIRI("urn:2"); + IRI blank = vf.createIRI("urn:blank"); + try (GraphQueryResult evaluate = gq.evaluate()) { + Model result = QueryResults.asModel(evaluate); + + assertThat(result.contains(urn1, p1, null)).isTrue(); + assertThat(result.contains(urn1, blank, null)).isTrue(); + assertThat(result.contains(urn2, p2, null)).isTrue(); + } } + closeRepository(repo); } private void testDescribeMultipleC() { - String update = "insert data { . [] . [] . . } "; - conn.prepareUpdate(QueryLanguage.SPARQL, update).execute(); - - String query = getNamespaceDeclarations() + "DESCRIBE "; - - GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query); - - ValueFactory vf = conn.getValueFactory(); - IRI urn1 = vf.createIRI("urn:1"); - IRI p1 = vf.createIRI("urn:p1"); - IRI p2 = vf.createIRI("urn:p2"); - IRI urn2 = vf.createIRI("urn:2"); - IRI blank = vf.createIRI("urn:blank"); - try (GraphQueryResult evaluate = gq.evaluate()) { - Model result = QueryResults.asModel(evaluate); - - assertThat(result.contains(urn1, p1, null)).isTrue(); - assertThat(result.contains(urn1, blank, null)).isTrue(); - assertThat(result.contains(null, blank, urn1)).isTrue(); - assertThat(result.contains(urn2, p2, null)).isTrue(); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + String update = "insert data { . [] . [] . . } "; + conn.prepareUpdate(QueryLanguage.SPARQL, update).execute(); + + String query = getNamespaceDeclarations() + "DESCRIBE "; + + GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query); + + ValueFactory vf = conn.getValueFactory(); + IRI urn1 = vf.createIRI("urn:1"); + IRI p1 = vf.createIRI("urn:p1"); + IRI p2 = vf.createIRI("urn:p2"); + IRI urn2 = vf.createIRI("urn:2"); + IRI blank = vf.createIRI("urn:blank"); + try (GraphQueryResult evaluate = gq.evaluate()) { + Model result = QueryResults.asModel(evaluate); + + assertThat(result.contains(urn1, p1, null)).isTrue(); + assertThat(result.contains(urn1, blank, null)).isTrue(); + assertThat(result.contains(null, blank, urn1)).isTrue(); + assertThat(result.contains(urn2, p2, null)).isTrue(); + } } + closeRepository(repo); } private void testDescribeMultipleD() { - String update = "insert data { . [] . . [] . . [] .} "; - conn.prepareUpdate(QueryLanguage.SPARQL, update).execute(); - - String query = getNamespaceDeclarations() + "DESCRIBE "; - - GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query); - - ValueFactory vf = conn.getValueFactory(); - IRI urn1 = vf.createIRI("urn:1"); - IRI p1 = vf.createIRI("urn:p1"); - IRI p2 = vf.createIRI("urn:p2"); - IRI urn2 = vf.createIRI("urn:2"); - IRI urn4 = vf.createIRI("urn:4"); - IRI blank = vf.createIRI("urn:blank"); - try (GraphQueryResult evaluate = gq.evaluate()) { - Model result = QueryResults.asModel(evaluate); - - assertThat(result.contains(urn1, p1, null)).isTrue(); - assertThat(result.contains(null, blank, urn1)).isTrue(); - assertThat(result.contains(urn2, p2, null)).isTrue(); - assertThat(result.contains(urn4, p2, null)).isTrue(); - assertThat(result.contains(urn4, blank, null)).isTrue(); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + String update = "insert data { . [] . . [] . . [] .} "; + conn.prepareUpdate(QueryLanguage.SPARQL, update).execute(); + + String query = getNamespaceDeclarations() + "DESCRIBE "; + + GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query); + + ValueFactory vf = conn.getValueFactory(); + IRI urn1 = vf.createIRI("urn:1"); + IRI p1 = vf.createIRI("urn:p1"); + IRI p2 = vf.createIRI("urn:p2"); + IRI urn2 = vf.createIRI("urn:2"); + IRI urn4 = vf.createIRI("urn:4"); + IRI blank = vf.createIRI("urn:blank"); + try (GraphQueryResult evaluate = gq.evaluate()) { + Model result = QueryResults.asModel(evaluate); + + assertThat(result.contains(urn1, p1, null)).isTrue(); + assertThat(result.contains(null, blank, urn1)).isTrue(); + assertThat(result.contains(urn2, p2, null)).isTrue(); + assertThat(result.contains(urn4, p2, null)).isTrue(); + assertThat(result.contains(urn4, blank, null)).isTrue(); + } } + closeRepository(repo); } public Stream tests() { diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/ExistsTest.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/ExistsTest.java index 8cc086b2cc9..dc0808f7b06 100644 --- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/ExistsTest.java +++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/ExistsTest.java @@ -13,6 +13,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.List; +import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -21,9 +22,9 @@ import org.eclipse.rdf4j.query.BindingSet; import org.eclipse.rdf4j.query.TupleQuery; import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; import org.eclipse.rdf4j.testsuite.sparql.AbstractComplianceTest; import org.junit.jupiter.api.DynamicTest; -import org.junit.jupiter.api.Test; /** * Test for queries using EXISTS @@ -32,37 +33,40 @@ */ public class ExistsTest extends AbstractComplianceTest { - public ExistsTest(Repository repo) { + public ExistsTest(Supplier repo) { super(repo); } private void testFilterNotExistsBindingToCurrentSolutionMapping() { - - String ex = "http://example/"; - IRI a1 = Values.iri(ex, "a1"); - IRI a2 = Values.iri(ex, "a2"); - - IRI both = Values.iri(ex, "both"); - - IRI predicate1 = Values.iri(ex, "predicate1"); - IRI predicate2 = Values.iri(ex, "predicate2"); - - conn.add(a1, predicate1, both); - conn.add(a1, predicate2, both); - - conn.add(a2, predicate1, both); - conn.add(a2, predicate2, Values.bnode()); - - TupleQuery tupleQuery = conn.prepareTupleQuery("PREFIX : \n" + "SELECT * WHERE {\n" - + " ?a :predicate1 ?p1\n" + " FILTER NOT EXISTS {\n" + " ?a :predicate2 ?p2 .\n" - + " FILTER(?p2 = ?p1)\n" + " }\n" + "}\n"); - - try (Stream stream = tupleQuery.evaluate().stream()) { - List collect = stream.collect(Collectors.toList()); - assertEquals(1, collect.size()); - assertEquals(a2, collect.get(0).getValue("a")); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + String ex = "http://example/"; + IRI a1 = Values.iri(ex, "a1"); + IRI a2 = Values.iri(ex, "a2"); + + IRI both = Values.iri(ex, "both"); + + IRI predicate1 = Values.iri(ex, "predicate1"); + IRI predicate2 = Values.iri(ex, "predicate2"); + + conn.add(a1, predicate1, both); + conn.add(a1, predicate2, both); + + conn.add(a2, predicate1, both); + conn.add(a2, predicate2, Values.bnode()); + + TupleQuery tupleQuery = conn.prepareTupleQuery("PREFIX : \n" + "SELECT * WHERE {\n" + + " ?a :predicate1 ?p1\n" + " FILTER NOT EXISTS {\n" + " ?a :predicate2 ?p2 .\n" + + " FILTER(?p2 = ?p1)\n" + " }\n" + "}\n"); + + try (Stream stream = tupleQuery.evaluate().stream()) { + List collect = stream.collect(Collectors.toList()); + assertEquals(1, collect.size()); + assertEquals(a2, collect.get(0).getValue("a")); + } + } finally { + closeRepository(repo); } - } public Stream tests() { diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/GroupByTest.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/GroupByTest.java index ab60e476490..c5913728458 100644 --- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/GroupByTest.java +++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/GroupByTest.java @@ -13,14 +13,15 @@ import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; +import java.util.function.Supplier; import java.util.stream.Stream; import org.eclipse.rdf4j.query.TupleQuery; import org.eclipse.rdf4j.query.TupleQueryResult; import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; import org.eclipse.rdf4j.testsuite.sparql.AbstractComplianceTest; import org.junit.jupiter.api.DynamicTest; -import org.junit.jupiter.api.Test; /** * Tests on SPARQL GROUP BY @@ -29,18 +30,23 @@ */ public class GroupByTest extends AbstractComplianceTest { - public GroupByTest(Repository repo) { + public GroupByTest(Supplier repo) { super(repo); } private void testGroupByEmpty() { - // see issue https://github.com/eclipse/rdf4j/issues/573 - String query = "select ?x where {?x ?p ?o} group by ?x"; - - TupleQuery tq = conn.prepareTupleQuery(query); - try (TupleQueryResult result = tq.evaluate()) { - assertNotNull(result); - assertFalse(result.hasNext()); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + // see issue https://github.com/eclipse/rdf4j/issues/573 + String query = "select ?x where {?x ?p ?o} group by ?x"; + + TupleQuery tq = conn.prepareTupleQuery(query); + try (TupleQueryResult result = tq.evaluate()) { + assertNotNull(result); + assertFalse(result.hasNext()); + } + } finally { + closeRepository(repo); } } diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/InTest.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/InTest.java index fc4f14f282a..e139d5f869f 100644 --- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/InTest.java +++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/InTest.java @@ -16,6 +16,7 @@ import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; +import java.util.function.Supplier; import java.util.stream.Stream; import org.eclipse.rdf4j.model.Literal; @@ -27,9 +28,9 @@ import org.eclipse.rdf4j.query.TupleQuery; import org.eclipse.rdf4j.query.TupleQueryResult; import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; import org.eclipse.rdf4j.testsuite.sparql.AbstractComplianceTest; import org.junit.jupiter.api.DynamicTest; -import org.junit.jupiter.api.Test; /** * Tests on the IN operator. @@ -39,58 +40,73 @@ */ public class InTest extends AbstractComplianceTest { - public InTest(Repository repo) { + public InTest(Supplier repo) { super(repo); } private void testInComparison1() throws Exception { - loadTestData("/testdata-query/dataset-ses1913.trig"); - String query = " PREFIX : \n" - + " SELECT ?y WHERE { :a :p ?y. FILTER(?y in (:c, :d, 1/0 , 1)) } "; - - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - - try (TupleQueryResult result = tq.evaluate()) { - assertNotNull(result); - assertTrue(result.hasNext()); - - BindingSet bs = result.next(); - Value y = bs.getValue("y"); - assertNotNull(y); - assertTrue(y instanceof Literal); - assertEquals(literal("1", CoreDatatype.XSD.INTEGER), y); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/dataset-ses1913.trig", conn); + String query = " PREFIX : \n" + + " SELECT ?y WHERE { :a :p ?y. FILTER(?y in (:c, :d, 1/0 , 1)) } "; + + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + + try (TupleQueryResult result = tq.evaluate()) { + assertNotNull(result); + assertTrue(result.hasNext()); + + BindingSet bs = result.next(); + Value y = bs.getValue("y"); + assertNotNull(y); + assertTrue(y instanceof Literal); + assertEquals(literal("1", CoreDatatype.XSD.INTEGER), y); + } + } finally { + closeRepository(repo); } } private void testInComparison2() throws Exception { - loadTestData("/testdata-query/dataset-ses1913.trig"); - String query = " PREFIX : \n" - + " SELECT ?y WHERE { :a :p ?y. FILTER(?y in (:c, :d, 1/0)) } "; - - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - - try (TupleQueryResult result = tq.evaluate()) { - assertNotNull(result); - assertFalse(result.hasNext()); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/dataset-ses1913.trig", conn); + String query = " PREFIX : \n" + + " SELECT ?y WHERE { :a :p ?y. FILTER(?y in (:c, :d, 1/0)) } "; + + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + + try (TupleQueryResult result = tq.evaluate()) { + assertNotNull(result); + assertFalse(result.hasNext()); + } + } finally { + closeRepository(repo); } } private void testInComparison3() throws Exception { - loadTestData("/testdata-query/dataset-ses1913.trig"); - String query = " PREFIX : \n" - + " SELECT ?y WHERE { :a :p ?y. FILTER(?y in (:c, :d, 1, 1/0)) } "; - - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - - try (TupleQueryResult result = tq.evaluate()) { - assertNotNull(result); - assertTrue(result.hasNext()); - - BindingSet bs = result.next(); - Value y = bs.getValue("y"); - assertNotNull(y); - assertTrue(y instanceof Literal); - assertEquals(literal("1", XSD.INTEGER), y); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/dataset-ses1913.trig", conn); + String query = " PREFIX : \n" + + " SELECT ?y WHERE { :a :p ?y. FILTER(?y in (:c, :d, 1, 1/0)) } "; + + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + + try (TupleQueryResult result = tq.evaluate()) { + assertNotNull(result); + assertTrue(result.hasNext()); + + BindingSet bs = result.next(); + Value y = bs.getValue("y"); + assertNotNull(y); + assertTrue(y instanceof Literal); + assertEquals(literal("1", XSD.INTEGER), y); + } + } finally { + closeRepository(repo); } } diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/MinusTest.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/MinusTest.java index 0a6226fd39a..58c8ebf1b25 100644 --- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/MinusTest.java +++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/MinusTest.java @@ -13,6 +13,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.List; +import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -22,6 +23,7 @@ import org.eclipse.rdf4j.query.BindingSet; import org.eclipse.rdf4j.query.TupleQuery; import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; import org.eclipse.rdf4j.testsuite.sparql.AbstractComplianceTest; import org.junit.jupiter.api.DynamicTest; @@ -32,41 +34,44 @@ */ public class MinusTest extends AbstractComplianceTest { - public MinusTest(Repository repo) { + public MinusTest(Supplier repo) { super(repo); } private void testScopingOfFilterInMinus() { + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + String ex = "http://example/"; + IRI a1 = Values.iri(ex, "a1"); + IRI a2 = Values.iri(ex, "a2"); - String ex = "http://example/"; - IRI a1 = Values.iri(ex, "a1"); - IRI a2 = Values.iri(ex, "a2"); + IRI both = Values.iri(ex, "both"); - IRI both = Values.iri(ex, "both"); + IRI predicate1 = Values.iri(ex, "predicate1"); + IRI predicate2 = Values.iri(ex, "predicate2"); - IRI predicate1 = Values.iri(ex, "predicate1"); - IRI predicate2 = Values.iri(ex, "predicate2"); + conn.add(a1, predicate1, both); + conn.add(a1, predicate2, both); - conn.add(a1, predicate1, both); - conn.add(a1, predicate2, both); + conn.add(a2, predicate1, both); + conn.add(a2, predicate2, Values.bnode()); - conn.add(a2, predicate1, both); - conn.add(a2, predicate2, Values.bnode()); + TupleQuery tupleQuery = conn.prepareTupleQuery( + "PREFIX : \n" + "SELECT * WHERE {\n" + " ?a :predicate1 ?p1\n" + " MINUS {\n" + + " ?a :predicate2 ?p2 .\n" + " FILTER(?p2 = ?p1)\n" + " }\n" + "} ORDER BY ?a\n"); - TupleQuery tupleQuery = conn.prepareTupleQuery( - "PREFIX : \n" + "SELECT * WHERE {\n" + " ?a :predicate1 ?p1\n" + " MINUS {\n" - + " ?a :predicate2 ?p2 .\n" + " FILTER(?p2 = ?p1)\n" + " }\n" + "} ORDER BY ?a\n"); + try (Stream stream = tupleQuery.evaluate().stream()) { + List collect = stream.collect(Collectors.toList()); + assertEquals(2, collect.size()); - try (Stream stream = tupleQuery.evaluate().stream()) { - List collect = stream.collect(Collectors.toList()); - assertEquals(2, collect.size()); + List expectedValues = List.of(a1, a2); + List actualValues = collect.stream().map(b -> b.getValue("a")).collect(Collectors.toList()); - List expectedValues = List.of(a1, a2); - List actualValues = collect.stream().map(b -> b.getValue("a")).collect(Collectors.toList()); - - assertEquals(expectedValues, actualValues); + assertEquals(expectedValues, actualValues); + } + } finally { + closeRepository(repo); } - } public Stream tests() { diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/OptionalTest.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/OptionalTest.java index 14e6684db78..9d0e4a57da8 100644 --- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/OptionalTest.java +++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/OptionalTest.java @@ -19,6 +19,7 @@ import java.io.StringReader; import java.util.List; +import java.util.function.Supplier; import java.util.stream.Stream; import org.eclipse.rdf4j.model.Literal; @@ -30,10 +31,10 @@ import org.eclipse.rdf4j.query.TupleQuery; import org.eclipse.rdf4j.query.TupleQueryResult; import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; import org.eclipse.rdf4j.rio.RDFFormat; import org.eclipse.rdf4j.testsuite.sparql.AbstractComplianceTest; import org.junit.jupiter.api.DynamicTest; -import org.junit.jupiter.api.Test; /** * Tests on OPTIONAL clause behavior. @@ -43,99 +44,112 @@ */ public class OptionalTest extends AbstractComplianceTest { - public OptionalTest(Repository repo) { + public OptionalTest(Supplier repo) { super(repo); } private void testSES1898LeftJoinSemantics1() throws Exception { - loadTestData("/testdata-query/dataset-ses1898.trig"); - String query = " PREFIX : " + " SELECT * WHERE { " + " ?s :p1 ?v1 . " - + " OPTIONAL {?s :p2 ?v2 } ." + " ?s :p3 ?v2 . " + " } "; - - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - try (Stream result = tq.evaluate().stream()) { - long count = result.count(); - assertEquals(0, count); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/dataset-ses1898.trig", conn); + String query = " PREFIX : " + " SELECT * WHERE { " + " ?s :p1 ?v1 . " + + " OPTIONAL {?s :p2 ?v2 } ." + " ?s :p3 ?v2 . " + " } "; + + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + try (Stream result = tq.evaluate().stream()) { + long count = result.count(); + assertEquals(0, count); + } + } finally { + closeRepository(repo); } } private void testSES1121VarNamesInOptionals() throws Exception { // Verifying that variable names have no influence on order of optionals // in query. See SES-1121. + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/dataset-ses1121.trig", conn); - loadTestData("/testdata-query/dataset-ses1121.trig"); - - String query1 = getNamespaceDeclarations() + " SELECT DISTINCT *\n" + " WHERE { GRAPH ?g { \n" - + " OPTIONAL { ?var35 ex:p ?b . } \n " + " OPTIONAL { ?b ex:q ?c . } \n " - + " } \n" + " } \n"; + String query1 = getNamespaceDeclarations() + " SELECT DISTINCT *\n" + " WHERE { GRAPH ?g { \n" + + " OPTIONAL { ?var35 ex:p ?b . } \n " + " OPTIONAL { ?b ex:q ?c . } \n " + + " } \n" + " } \n"; - String query2 = getNamespaceDeclarations() + " SELECT DISTINCT *\n" + " WHERE { GRAPH ?g { \n" - + " OPTIONAL { ?var35 ex:p ?b . } \n " + " OPTIONAL { ?b ex:q ?var2 . } \n " - + " } \n" + " } \n"; + String query2 = getNamespaceDeclarations() + " SELECT DISTINCT *\n" + " WHERE { GRAPH ?g { \n" + + " OPTIONAL { ?var35 ex:p ?b . } \n " + " OPTIONAL { ?b ex:q ?var2 . } \n " + + " } \n" + " } \n"; - TupleQuery tq1 = conn.prepareTupleQuery(QueryLanguage.SPARQL, query1); - TupleQuery tq2 = conn.prepareTupleQuery(QueryLanguage.SPARQL, query2); + TupleQuery tq1 = conn.prepareTupleQuery(QueryLanguage.SPARQL, query1); + TupleQuery tq2 = conn.prepareTupleQuery(QueryLanguage.SPARQL, query2); - try (TupleQueryResult result1 = tq1.evaluate(); TupleQueryResult result2 = tq2.evaluate()) { - assertNotNull(result1); - assertNotNull(result2); + try (TupleQueryResult result1 = tq1.evaluate(); TupleQueryResult result2 = tq2.evaluate()) { + assertNotNull(result1); + assertNotNull(result2); - List qr1 = QueryResults.asList(result1); - List qr2 = QueryResults.asList(result2); + List qr1 = QueryResults.asList(result1); + List qr2 = QueryResults.asList(result2); - // System.out.println(qr1); - // System.out.println(qr2); + // System.out.println(qr1); + // System.out.println(qr2); - // if optionals are not kept in same order, query results will be - // different size. - assertEquals(qr1.size(), qr2.size()); + // if optionals are not kept in same order, query results will be + // different size. + assertEquals(qr1.size(), qr2.size()); - } catch (QueryEvaluationException e) { - e.printStackTrace(); - fail(e.getMessage()); + } catch (QueryEvaluationException e) { + e.printStackTrace(); + fail(e.getMessage()); + } + } finally { + closeRepository(repo); } } private void testSameTermRepeatInOptional() throws Exception { - loadTestData("/testdata-query/dataset-query.trig"); - String query = getNamespaceDeclarations() + " SELECT ?l ?opt1 ?opt2 " + " FROM ex:optional-sameterm-graph " - + " WHERE { " + " ?s ex:p ex:A ; " + " { " + " { " - + " ?s ?p ?l ." + " FILTER(?p = rdfs:label) " + " } " - + " OPTIONAL { " + " ?s ?p ?opt1 . " - + " FILTER (?p = ex:prop1) " + " } " + " OPTIONAL { " - + " ?s ?p ?opt2 . " + " FILTER (?p = ex:prop2) " + " } " - + " }" + " } "; - - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - - try (TupleQueryResult result = tq.evaluate()) { - assertNotNull(result); - - int count = 0; - while (result.hasNext()) { - BindingSet bs = result.next(); - count++; - assertNotNull(bs); - - // System.out.println(bs); - - Value l = bs.getValue("l"); - assertTrue(l instanceof Literal); - assertEquals("label", ((Literal) l).getLabel()); - - Value opt1 = bs.getValue("opt1"); - assertNull(opt1); - - Value opt2 = bs.getValue("opt2"); - assertNull(opt2); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/dataset-query.trig", conn); + String query = getNamespaceDeclarations() + " SELECT ?l ?opt1 ?opt2 " + " FROM ex:optional-sameterm-graph " + + " WHERE { " + " ?s ex:p ex:A ; " + " { " + " { " + + " ?s ?p ?l ." + " FILTER(?p = rdfs:label) " + " } " + + " OPTIONAL { " + " ?s ?p ?opt1 . " + + " FILTER (?p = ex:prop1) " + " } " + " OPTIONAL { " + + " ?s ?p ?opt2 . " + " FILTER (?p = ex:prop2) " + + " } " + " }" + " } "; + + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + + try (TupleQueryResult result = tq.evaluate()) { + assertNotNull(result); + + int count = 0; + while (result.hasNext()) { + BindingSet bs = result.next(); + count++; + assertNotNull(bs); + + // System.out.println(bs); + + Value l = bs.getValue("l"); + assertTrue(l instanceof Literal); + assertEquals("label", ((Literal) l).getLabel()); + + Value opt1 = bs.getValue("opt1"); + assertNull(opt1); + + Value opt2 = bs.getValue("opt2"); + assertNull(opt2); + } + assertEquals(1, count); + } catch (QueryEvaluationException e) { + e.printStackTrace(); + fail(e.getMessage()); } - assertEquals(1, count); - } catch (QueryEvaluationException e) { - e.printStackTrace(); - fail(e.getMessage()); + } finally { + closeRepository(repo); } - } /** @@ -143,17 +157,22 @@ private void testSameTermRepeatInOptional() throws Exception { * */ private void testValuesAfterOptional() throws Exception { - String data = "@prefix rdfs: . \n" + "@prefix : . \n" - + ":r1 a rdfs:Resource . \n" + ":r2 a rdfs:Resource ; rdfs:label \"a label\" . \n"; + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + String data = "@prefix rdfs: . \n" + "@prefix : . \n" + + ":r1 a rdfs:Resource . \n" + ":r2 a rdfs:Resource ; rdfs:label \"a label\" . \n"; - String query = "" + "prefix rdfs: \n" + "prefix : \n" - + "\n" + "select ?resource ?label where { \n" + " ?resource a rdfs:Resource . \n" - + " optional { ?resource rdfs:label ?label } \n" + " values ?label { undef } \n" + "}"; + String query = "" + "prefix rdfs: \n" + "prefix : \n" + + "\n" + "select ?resource ?label where { \n" + " ?resource a rdfs:Resource . \n" + + " optional { ?resource rdfs:label ?label } \n" + " values ?label { undef } \n" + "}"; - conn.add(new StringReader(data), RDFFormat.TURTLE); + conn.add(new StringReader(data), RDFFormat.TURTLE); - List result = QueryResults.asList(conn.prepareTupleQuery(query).evaluate()); - assertThat(result).hasSize(2); + List result = QueryResults.asList(conn.prepareTupleQuery(query).evaluate()); + assertThat(result).hasSize(2); + } finally { + closeRepository(repo); + } } public Stream tests() { diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/OrderByTest.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/OrderByTest.java index 8c642ebf98e..771501f54bc 100644 --- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/OrderByTest.java +++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/OrderByTest.java @@ -13,62 +13,76 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.StringReader; +import java.util.function.Supplier; import java.util.stream.Stream; import org.eclipse.rdf4j.query.BindingSet; import org.eclipse.rdf4j.query.QueryLanguage; import org.eclipse.rdf4j.query.TupleQuery; import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; import org.eclipse.rdf4j.rio.RDFFormat; import org.eclipse.rdf4j.testsuite.sparql.AbstractComplianceTest; import org.junit.jupiter.api.DynamicTest; -import org.junit.jupiter.api.Test; public class OrderByTest extends AbstractComplianceTest { - public OrderByTest(Repository repo) { + public OrderByTest(Supplier repo) { super(repo); } private void testDistinctOptionalOrderBy() throws Exception { - - conn.add(new StringReader("[] a .\n" + "[] a ; 123 ."), "", RDFFormat.TURTLE); - - String query = "select distinct ?o ?nr { ?o a optional { ?o ?nr } } order by ?nr"; - - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - try (Stream result = tq.evaluate().stream()) { - long count = result.count(); - assertEquals(2, count); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + conn.add(new StringReader("[] a .\n" + "[] a ; 123 ."), "", + RDFFormat.TURTLE); + + String query = "select distinct ?o ?nr { ?o a optional { ?o ?nr } } order by ?nr"; + + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + try (Stream result = tq.evaluate().stream()) { + long count = result.count(); + assertEquals(2, count); + } + } finally { + closeRepository(repo); } } private void testOrderByVariableNotInUse() throws Exception { - - conn.add(new StringReader( - "_:bob1 a ; rdfs:label \"Bob1\" .\n" + "_:bob2 a ; rdfs:label \"Bob2\" ."), - "", RDFFormat.TURTLE); - - String query = "SELECT * WHERE { ?person a } ORDER BY ?score\n"; - - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - try (Stream result = tq.evaluate().stream()) { - // use distinct because the issue is that the query produces duplicates - long count = result.distinct().count(); - assertEquals(2, count); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + conn.add(new StringReader("_:bob1 a ; rdfs:label \"Bob1\" .\n" + + "_:bob2 a ; rdfs:label \"Bob2\" ."), "", RDFFormat.TURTLE); + + String query = "SELECT * WHERE { ?person a } ORDER BY ?score\n"; + + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + try (Stream result = tq.evaluate().stream()) { + // use distinct because the issue is that the query produces duplicates + long count = result.distinct().count(); + assertEquals(2, count); + } + } finally { + closeRepository(repo); } } private void testDistinctOptionalOrderByMath() throws Exception { - - conn.add(new StringReader("[] a .\n" + "[] a ; 123 ."), "", RDFFormat.TURTLE); - - String query = "select distinct ?o ?nr { ?o a optional { ?o ?nr } } order by (?nr + STRLEN(?o))"; - - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - try (Stream result = tq.evaluate().stream()) { - long count = result.count(); - assertEquals(2, count); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + conn.add(new StringReader("[] a .\n" + "[] a ; 123 ."), "", + RDFFormat.TURTLE); + + String query = "select distinct ?o ?nr { ?o a optional { ?o ?nr } } order by (?nr + STRLEN(?o))"; + + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + try (Stream result = tq.evaluate().stream()) { + long count = result.count(); + assertEquals(2, count); + } + } finally { + closeRepository(repo); } } diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/PropertyPathTest.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/PropertyPathTest.java index 0608609295c..70e44098537 100644 --- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/PropertyPathTest.java +++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/PropertyPathTest.java @@ -21,6 +21,7 @@ import java.io.StringReader; import java.util.List; +import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -42,11 +43,11 @@ import org.eclipse.rdf4j.query.impl.MapBindingSet; import org.eclipse.rdf4j.query.impl.SimpleBinding; import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; import org.eclipse.rdf4j.rio.RDFFormat; import org.eclipse.rdf4j.testsuite.sparql.AbstractComplianceTest; import org.eclipse.rdf4j.testsuite.sparql.vocabulary.EX; import org.junit.jupiter.api.DynamicTest; -import org.junit.jupiter.api.Test; /** * Tests on SPARQL property paths. @@ -57,145 +58,176 @@ */ public class PropertyPathTest extends AbstractComplianceTest { - public PropertyPathTest(Repository repo) { + public PropertyPathTest(Supplier repo) { super(repo); } private void testSES2147PropertyPathsWithIdenticalSubsPreds() throws Exception { - String data = " .\n" + " .\n" - + " .\n" + " .\n" + " .\n"; + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + String data = " .\n" + " .\n" + + " .\n" + " .\n" + + " .\n"; - conn.begin(); - conn.add(new StringReader(data), "", RDFFormat.NTRIPLES); - conn.commit(); + conn.begin(); + conn.add(new StringReader(data), "", RDFFormat.NTRIPLES); + conn.commit(); - String query = getNamespaceDeclarations() + "SELECT ?x \n" + "WHERE { ?x */ . \n" - + " ?x */ . \n" + "} \n"; + String query = getNamespaceDeclarations() + "SELECT ?x \n" + "WHERE { ?x */ . \n" + + " ?x */ . \n" + "} \n"; - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - try (TupleQueryResult result = tq.evaluate()) { + try (TupleQueryResult result = tq.evaluate()) { - assertNotNull(result); - assertTrue(result.hasNext()); + assertNotNull(result); + assertTrue(result.hasNext()); - Value x = result.next().getValue("x"); - assertNotNull(x); - assertTrue(x instanceof IRI); - assertEquals("urn:s1", x.stringValue()); - } catch (QueryEvaluationException e) { - e.printStackTrace(); - fail(e.getMessage()); + Value x = result.next().getValue("x"); + assertNotNull(x); + assertTrue(x instanceof IRI); + assertEquals("urn:s1", x.stringValue()); + } catch (QueryEvaluationException e) { + e.printStackTrace(); + fail(e.getMessage()); + } + } finally { + closeRepository(repo); } } private void testSES2024PropertyPathAnonVarSharing() throws Exception { - loadTestData("/testdata-query/dataset-ses2024.trig"); - String query = "PREFIX : SELECT * WHERE { ?x1 :p/:lit ?l1 . ?x1 :diff ?x2 . ?x2 :p/:lit ?l2 . }"; - - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - - try (TupleQueryResult result = tq.evaluate()) { - assertNotNull(result); - - BindingSet bs = result.next(); - Literal l1 = (Literal) bs.getValue("l1"); - Literal l2 = (Literal) bs.getValue("l2"); - - assertNotNull(l1); - assertNotEquals(l1, l2); - } catch (QueryEvaluationException e) { - e.printStackTrace(); - fail(e.getMessage()); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/dataset-ses2024.trig", conn); + String query = "PREFIX : SELECT * WHERE { ?x1 :p/:lit ?l1 . ?x1 :diff ?x2 . ?x2 :p/:lit ?l2 . }"; + + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + + try (TupleQueryResult result = tq.evaluate()) { + assertNotNull(result); + + BindingSet bs = result.next(); + Literal l1 = (Literal) bs.getValue("l1"); + Literal l2 = (Literal) bs.getValue("l2"); + + assertNotNull(l1); + assertNotEquals(l1, l2); + } catch (QueryEvaluationException e) { + e.printStackTrace(); + fail(e.getMessage()); + } + } finally { + closeRepository(repo); } } private void testPropertyPathNegationInversion() throws Exception { - String data = "@prefix : .\n" + ":Mary :parentOf :Jim.\n" + ":Jim :knows :Jane.\n" - + ":Jane :worksFor :IBM."; - - conn.add(new StringReader(data), "", RDFFormat.TURTLE); - String query1 = "prefix : ASK WHERE { :IBM ^(:|!:) :Jane } "; + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + String data = "@prefix : .\n" + ":Mary :parentOf :Jim.\n" + ":Jim :knows :Jane.\n" + + ":Jane :worksFor :IBM."; - assertTrue(conn.prepareBooleanQuery(query1).evaluate()); + conn.add(new StringReader(data), "", RDFFormat.TURTLE); + String query1 = "prefix : ASK WHERE { :IBM ^(:|!:) :Jane } "; - String query2 = "prefix : ASK WHERE { :IBM ^(:|!:) ?a } "; - assertTrue(conn.prepareBooleanQuery(query2).evaluate()); + assertTrue(conn.prepareBooleanQuery(query1).evaluate()); - String query3 = "prefix : ASK WHERE { :IBM (^(:|!:))* :Mary } "; - assertTrue(conn.prepareBooleanQuery(query3).evaluate()); + String query2 = "prefix : ASK WHERE { :IBM ^(:|!:) ?a } "; + assertTrue(conn.prepareBooleanQuery(query2).evaluate()); + String query3 = "prefix : ASK WHERE { :IBM (^(:|!:))* :Mary } "; + assertTrue(conn.prepareBooleanQuery(query3).evaluate()); + } finally { + closeRepository(repo); + } } private void testSES2336NegatedPropertyPathMod() throws Exception { - loadTestData("/testdata-query/dataset-ses2336.trig"); - String query = "prefix : select * where { ?s a :Test ; !:p? ?o . }"; - - ValueFactory vf = conn.getValueFactory(); - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - - try (TupleQueryResult evaluate = tq.evaluate()) { - List result = QueryResults.asList(evaluate); - assertNotNull(result); - - IRI a = vf.createIRI(EX.NAMESPACE, "a"); - IRI b = vf.createIRI(EX.NAMESPACE, "b"); - IRI c = vf.createIRI(EX.NAMESPACE, "c"); - IRI d = vf.createIRI(EX.NAMESPACE, "d"); - IRI e = vf.createIRI(EX.NAMESPACE, "e"); - IRI test = vf.createIRI(EX.NAMESPACE, "Test"); - - assertTrue(containsSolution(result, new SimpleBinding("s", a), new SimpleBinding("o", a))); - assertTrue(containsSolution(result, new SimpleBinding("s", a), new SimpleBinding("o", test))); - assertTrue(containsSolution(result, new SimpleBinding("s", a), new SimpleBinding("o", c))); - assertTrue(containsSolution(result, new SimpleBinding("s", d), new SimpleBinding("o", d))); - assertTrue(containsSolution(result, new SimpleBinding("s", d), new SimpleBinding("o", e))); - assertTrue(containsSolution(result, new SimpleBinding("s", d), new SimpleBinding("o", test))); - - assertFalse(containsSolution(result, new SimpleBinding("s", a), new SimpleBinding("o", b))); - - } catch (QueryEvaluationException e) { - e.printStackTrace(); - fail(e.getMessage()); - } + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + + loadTestData("/testdata-query/dataset-ses2336.trig", conn); + String query = "prefix : select * where { ?s a :Test ; !:p? ?o . }"; + + ValueFactory vf = conn.getValueFactory(); + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + + try (TupleQueryResult evaluate = tq.evaluate()) { + List result = QueryResults.asList(evaluate); + assertNotNull(result); + + IRI a = vf.createIRI(EX.NAMESPACE, "a"); + IRI b = vf.createIRI(EX.NAMESPACE, "b"); + IRI c = vf.createIRI(EX.NAMESPACE, "c"); + IRI d = vf.createIRI(EX.NAMESPACE, "d"); + IRI e = vf.createIRI(EX.NAMESPACE, "e"); + IRI test = vf.createIRI(EX.NAMESPACE, "Test"); + + assertTrue(containsSolution(result, new SimpleBinding("s", a), new SimpleBinding("o", a))); + assertTrue(containsSolution(result, new SimpleBinding("s", a), new SimpleBinding("o", test))); + assertTrue(containsSolution(result, new SimpleBinding("s", a), new SimpleBinding("o", c))); + assertTrue(containsSolution(result, new SimpleBinding("s", d), new SimpleBinding("o", d))); + assertTrue(containsSolution(result, new SimpleBinding("s", d), new SimpleBinding("o", e))); + assertTrue(containsSolution(result, new SimpleBinding("s", d), new SimpleBinding("o", test))); + + assertFalse(containsSolution(result, new SimpleBinding("s", a), new SimpleBinding("o", b))); + + } catch (QueryEvaluationException e) { + e.printStackTrace(); + fail(e.getMessage()); + } + } finally { + closeRepository(repo); + } } private void testSES1685propPathSameVar() throws Exception { - final String queryStr = "PREFIX : SELECT ?x WHERE {?x :p+ ?x}"; + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + final String queryStr = "PREFIX : SELECT ?x WHERE {?x :p+ ?x}"; - conn.add(new StringReader("@prefix : . :a :p :b . :b :p :a ."), "", RDFFormat.TURTLE); + conn.add(new StringReader("@prefix : . :a :p :b . :b :p :a ."), "", RDFFormat.TURTLE); - TupleQuery query = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryStr); + TupleQuery query = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryStr); - try (Stream result = query.evaluate().stream()) { - long count = result.count(); - assertEquals(2, count); + try (Stream result = query.evaluate().stream()) { + long count = result.count(); + assertEquals(2, count); + } + } finally { + closeRepository(repo); } } private void testSES1073InverseSymmetricPattern() { - IRI a = iri("http://example.org/a"); - IRI b1 = iri("http://example.org/b1"); - IRI b2 = iri("http://example.org/b2"); - IRI c1 = iri("http://example.org/c1"); - IRI c2 = iri("http://example.org/c2"); - IRI a2b = iri("http://example.org/a2b"); - IRI b2c = iri("http://example.org/b2c"); - conn.add(a, a2b, b1); - conn.add(a, a2b, b2); - conn.add(b1, b2c, c1); - conn.add(b2, b2c, c2); - String query = "select * "; - query += "where{ "; - query += "?c1 ^/^// ?c2 . "; - query += " } "; - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - - try (Stream result = tq.evaluate().stream()) { - long count = result.count(); - assertEquals(4, count); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + IRI a = iri("http://example.org/a"); + IRI b1 = iri("http://example.org/b1"); + IRI b2 = iri("http://example.org/b2"); + IRI c1 = iri("http://example.org/c1"); + IRI c2 = iri("http://example.org/c2"); + IRI a2b = iri("http://example.org/a2b"); + IRI b2c = iri("http://example.org/b2c"); + conn.add(a, a2b, b1); + conn.add(a, a2b, b2); + conn.add(b1, b2c, c1); + conn.add(b2, b2c, c2); + String query = "select * "; + query += "where{ "; + query += "?c1 ^/^// ?c2 . "; + query += " } "; + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + + try (Stream result = tq.evaluate().stream()) { + long count = result.count(); + assertEquals(4, count); + } + } finally { + closeRepository(repo); } } @@ -204,46 +236,63 @@ private void testSES1073InverseSymmetricPattern() { */ private void testNestedInversePropertyPathWithZeroLength() { - String insert = "insert data {\n" + " .\n" - + " .\n" + " .\n" - + " .\n" + " .\n" - + " .\n" + "}"; + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + String insert = "insert data {\n" + " .\n" + + " .\n" + " .\n" + + " .\n" + " .\n" + + " .\n" + "}"; - String query = "select * where { \n" + " (^)? ?o .\n" + "}"; + String query = "select * where { \n" + " (^)? ?o .\n" + "}"; - conn.prepareUpdate(insert).execute(); + conn.prepareUpdate(insert).execute(); - TupleQuery tq = conn.prepareTupleQuery(query); + TupleQuery tq = conn.prepareTupleQuery(query); - List result = QueryResults.asList(tq.evaluate()); - assertThat(result).hasSize(4); + try (final TupleQueryResult evaluate = tq.evaluate()) { + List result = QueryResults.asList(evaluate); + assertThat(result).hasSize(4); + } + } finally { + closeRepository(repo); + } } private void testComplexPath() { - conn.add(Values.bnode(), SKOS.BROADER, Values.bnode()); - conn.add(Values.bnode(), SKOS.TOP_CONCEPT_OF, Values.bnode()); - - TupleQuery tupleQuery = conn.prepareTupleQuery( - "PREFIX skos: \r\n" + " SELECT * " + " WHERE {\r\n" - + " ?s (skos:broader|^skos:narrower|skos:topConceptOf|^skos:hasTopConcept) ?o.\r\n" + " }"); - try (TupleQueryResult evaluate = tupleQuery.evaluate()) { - List collect = evaluate.stream().collect(Collectors.toList()); - assertEquals(2, collect.size()); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + conn.add(Values.bnode(), SKOS.BROADER, Values.bnode()); + conn.add(Values.bnode(), SKOS.TOP_CONCEPT_OF, Values.bnode()); + + TupleQuery tupleQuery = conn.prepareTupleQuery("PREFIX skos: \r\n" + + " SELECT * " + " WHERE {\r\n" + + " ?s (skos:broader|^skos:narrower|skos:topConceptOf|^skos:hasTopConcept) ?o.\r\n" + " }"); + try (TupleQueryResult evaluate = tupleQuery.evaluate()) { + List collect = evaluate.stream().collect(Collectors.toList()); + assertEquals(2, collect.size()); + } + } finally { + closeRepository(repo); } } private void testInversePath() { - BNode bnode1 = Values.bnode("bnode1"); - - conn.add(Values.bnode(), FOAF.KNOWS, bnode1); - conn.add(Values.bnode(), FOAF.KNOWS, bnode1); - - TupleQuery tupleQuery = conn.prepareTupleQuery("PREFIX foaf: <" + FOAF.NAMESPACE + ">\n" + "SELECT * WHERE {\n" - + " ?x foaf:knows/^foaf:knows ?y . \n" + " FILTER(?x != ?y)\n" + "}"); - - try (TupleQueryResult evaluate = tupleQuery.evaluate()) { - List collect = evaluate.stream().collect(Collectors.toList()); - assertEquals(2, collect.size()); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + BNode bnode1 = Values.bnode("bnode1"); + + conn.add(Values.bnode(), FOAF.KNOWS, bnode1); + conn.add(Values.bnode(), FOAF.KNOWS, bnode1); + + TupleQuery tupleQuery = conn.prepareTupleQuery("PREFIX foaf: <" + FOAF.NAMESPACE + ">\n" + + "SELECT * WHERE {\n" + " ?x foaf:knows/^foaf:knows ?y . \n" + " FILTER(?x != ?y)\n" + "}"); + + try (TupleQueryResult evaluate = tupleQuery.evaluate()) { + List collect = evaluate.stream().collect(Collectors.toList()); + assertEquals(2, collect.size()); + } + } finally { + closeRepository(repo); } } diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/SubselectTest.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/SubselectTest.java index a6947459928..c835192e332 100644 --- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/SubselectTest.java +++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/SubselectTest.java @@ -14,6 +14,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.List; +import java.util.function.Supplier; import java.util.stream.Stream; import org.eclipse.rdf4j.model.Literal; @@ -23,9 +24,9 @@ import org.eclipse.rdf4j.query.TupleQuery; import org.eclipse.rdf4j.query.TupleQueryResult; import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; import org.eclipse.rdf4j.testsuite.sparql.AbstractComplianceTest; import org.junit.jupiter.api.DynamicTest; -import org.junit.jupiter.api.Test; /** * Tests on SPARQL nested SELECT query handling. @@ -34,60 +35,70 @@ */ public class SubselectTest extends AbstractComplianceTest { - public SubselectTest(Repository repo) { + public SubselectTest(Supplier repo) { super(repo); } private void testSES2373SubselectOptional() { - conn.prepareUpdate(QueryLanguage.SPARQL, - "insert data {" + " ." + " 1 ." + " ." - + " ." + " 2 ." + " ." - + " ." + " 3 ." + " ." - + " ." + " 4 ." + " ." - + " ." + " 5 ." + " ." + "}") - .execute(); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + conn.prepareUpdate(QueryLanguage.SPARQL, + "insert data {" + " ." + " 1 ." + " ." + + " ." + " 2 ." + " ." + + " ." + " 3 ." + " ." + + " ." + " 4 ." + " ." + + " ." + " 5 ." + " ." + "}") + .execute(); - String qb = "select ?x { \n" + " { select ?v { ?v filter (?v = ) } }.\n" - + " optional { select ?val { ?v ?val .} }\n" + " ?v ?x \n" + "}\n"; + String qb = "select ?x { \n" + " { select ?v { ?v filter (?v = ) } }.\n" + + " optional { select ?val { ?v ?val .} }\n" + " ?v ?x \n" + "}\n"; - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, qb); - try (TupleQueryResult result = tq.evaluate()) { - assertTrue(result.hasNext(), "The query should return a result"); - BindingSet b = result.next(); - assertTrue(b.hasBinding("x"), "?x is from the mandatory part of the query and should be bound"); + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, qb); + try (TupleQueryResult result = tq.evaluate()) { + assertTrue(result.hasNext(), "The query should return a result"); + BindingSet b = result.next(); + assertTrue(b.hasBinding("x"), "?x is from the mandatory part of the query and should be bound"); + } + } finally { + closeRepository(repo); } } private void testSES2154SubselectOptional() { + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + String ub = "insert data { \n" + " a . \n" + " a . \n" + + " a . \n" + " a . \n" + " a . \n" + + " a . \n" + " a . \n" + " a . \n" + + " a . \n" + " a . \n" + " a . \n" + + " a . \n" + " \"01\" . \n" + " \"02\" . \n" + + " \"03\" . \n" + " \"04\" . \n" + + " \"05\" . \n" + " \"06\" . \n" + + " \"07\" . \n" + " \"08\" . \n" + + " \"09\" . \n" + " \"10\" . \n" + + " \"11\" . \n" + " \"12\" . \n" + "} \n"; - String ub = "insert data { \n" + " a . \n" + " a . \n" - + " a . \n" + " a . \n" + " a . \n" - + " a . \n" + " a . \n" + " a . \n" - + " a . \n" + " a . \n" + " a . \n" - + " a . \n" + " \"01\" . \n" + " \"02\" . \n" - + " \"03\" . \n" + " \"04\" . \n" + " \"05\" . \n" - + " \"06\" . \n" + " \"07\" . \n" + " \"08\" . \n" - + " \"09\" . \n" + " \"10\" . \n" - + " \"11\" . \n" + " \"12\" . \n" + "} \n"; - - conn.prepareUpdate(QueryLanguage.SPARQL, ub).execute(); + conn.prepareUpdate(QueryLanguage.SPARQL, ub).execute(); - String qb = "SELECT ?s ?label\n" + "WHERE { \n" + " ?s a \n .\n" - + " OPTIONAL { {SELECT ?label WHERE { \n" + " ?s ?label . \n" - + " } ORDER BY ?label LIMIT 2 \n" + " }\n" + " }\n" + "}\n" + "ORDER BY ?s\n" - + "LIMIT 10 \n"; + String qb = "SELECT ?s ?label\n" + "WHERE { \n" + " ?s a \n .\n" + + " OPTIONAL { {SELECT ?label WHERE { \n" + " ?s ?label . \n" + + " } ORDER BY ?label LIMIT 2 \n" + " }\n" + " }\n" + "}\n" + + "ORDER BY ?s\n" + "LIMIT 10 \n"; - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, qb); - try (TupleQueryResult evaluate = tq.evaluate()) { - assertTrue(evaluate.hasNext(), "The query should return a result"); + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, qb); + try (TupleQueryResult evaluate = tq.evaluate()) { + assertTrue(evaluate.hasNext(), "The query should return a result"); - List result = QueryResults.asList(evaluate); - assertEquals(10, result.size()); - for (BindingSet bs : result) { - Literal label = (Literal) bs.getValue("label"); - assertTrue(label.stringValue().equals("01") || label.stringValue().equals("02"), - "wrong label value (expected '01' or '02', but got '" + label.stringValue() + "')"); + List result = QueryResults.asList(evaluate); + assertEquals(10, result.size()); + for (BindingSet bs : result) { + Literal label = (Literal) bs.getValue("label"); + assertTrue(label.stringValue().equals("01") || label.stringValue().equals("02"), + "wrong label value (expected '01' or '02', but got '" + label.stringValue() + "')"); + } } + } finally { + closeRepository(repo); } } diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/UnionTest.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/UnionTest.java index 910da3d7b2c..804a7123676 100644 --- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/UnionTest.java +++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/UnionTest.java @@ -17,6 +17,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; +import java.util.function.Supplier; import java.util.stream.Stream; import org.eclipse.rdf4j.model.IRI; @@ -28,9 +29,9 @@ import org.eclipse.rdf4j.query.TupleQuery; import org.eclipse.rdf4j.query.TupleQueryResult; import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; import org.eclipse.rdf4j.testsuite.sparql.AbstractComplianceTest; import org.junit.jupiter.api.DynamicTest; -import org.junit.jupiter.api.Test; /** * Tests on SPRQL UNION clauses. @@ -40,96 +41,109 @@ */ public class UnionTest extends AbstractComplianceTest { - public UnionTest(Repository repo) { + public UnionTest(Supplier repo) { super(repo); } private void testEmptyUnion() { - String query = "PREFIX : " + "SELECT ?visibility WHERE {" - + "OPTIONAL { SELECT ?var WHERE { :s a :MyType . BIND (:s as ?var ) .} } ." - + "BIND (IF(BOUND(?var), 'VISIBLE', 'HIDDEN') as ?visibility)" + "}"; - try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) { - assertNotNull(result); - assertFalse(result.hasNext()); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + String query = "PREFIX : " + "SELECT ?visibility WHERE {" + + "OPTIONAL { SELECT ?var WHERE { :s a :MyType . BIND (:s as ?var ) .} } ." + + "BIND (IF(BOUND(?var), 'VISIBLE', 'HIDDEN') as ?visibility)" + "}"; + try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) { + assertNotNull(result); + assertFalse(result.hasNext()); + } + } finally { + closeRepository(repo); } } private void testSameTermRepeatInUnion() throws Exception { - loadTestData("/testdata-query/dataset-query.trig"); - String query = "PREFIX foaf:\n" + "SELECT * {\n" + " {\n" - + " ?sameTerm foaf:mbox ?mbox\n" + " FILTER sameTerm(?sameTerm,$william)\n" - + " } UNION {\n" + " ?x foaf:knows ?sameTerm\n" - + " FILTER sameTerm(?sameTerm,$william)\n" + " }\n" + "}"; + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/dataset-query.trig", conn); + String query = "PREFIX foaf:\n" + "SELECT * {\n" + " {\n" + + " ?sameTerm foaf:mbox ?mbox\n" + " FILTER sameTerm(?sameTerm,$william)\n" + + " } UNION {\n" + " ?x foaf:knows ?sameTerm\n" + + " FILTER sameTerm(?sameTerm,$william)\n" + " }\n" + "}"; - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - tq.setBinding("william", conn.getValueFactory().createIRI("http://example.org/william")); + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + tq.setBinding("william", conn.getValueFactory().createIRI("http://example.org/william")); - try (TupleQueryResult result = tq.evaluate()) { - assertNotNull(result); + try (TupleQueryResult result = tq.evaluate()) { + assertNotNull(result); - int count = 0; - while (result.hasNext()) { - BindingSet bs = result.next(); - count++; - assertNotNull(bs); + int count = 0; + while (result.hasNext()) { + BindingSet bs = result.next(); + count++; + assertNotNull(bs); - // System.out.println(bs); + // System.out.println(bs); - Value mbox = bs.getValue("mbox"); - Value x = bs.getValue("x"); + Value mbox = bs.getValue("mbox"); + Value x = bs.getValue("x"); - assertTrue(mbox instanceof Literal || x instanceof IRI); + assertTrue(mbox instanceof Literal || x instanceof IRI); + } + assertEquals(3, count); + } catch (QueryEvaluationException e) { + e.printStackTrace(); + fail(e.getMessage()); } - assertEquals(3, count); - } catch (QueryEvaluationException e) { - e.printStackTrace(); - fail(e.getMessage()); + } finally { + closeRepository(repo); } - } private void testSameTermRepeatInUnionAndOptional() throws Exception { - loadTestData("/testdata-query/dataset-query.trig"); - - String query = getNamespaceDeclarations() + "SELECT * {\n" + " {\n" + " ex:a ?p ?prop1\n" - + " FILTER (?p = ex:prop1)\n" + " } UNION {\n" + " ?s ex:p ex:A ; " + " { " - + " { " + " ?s ?p ?l ." + " FILTER(?p = rdfs:label) " - + " } " + " OPTIONAL { " + " ?s ?p ?opt1 . " - + " FILTER (?p = ex:prop1) " + " } " + " OPTIONAL { " - + " ?s ?p ?opt2 . " + " FILTER (?p = ex:prop2) " + " } " - + " }" + " }\n" + "}"; - - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - - try (TupleQueryResult result = tq.evaluate()) { - assertNotNull(result); - - int count = 0; - while (result.hasNext()) { - BindingSet bs = result.next(); - count++; - assertNotNull(bs); - - // System.out.println(bs); - - Value prop1 = bs.getValue("prop1"); - Value l = bs.getValue("l"); - - assertTrue(prop1 instanceof Literal || l instanceof Literal); - if (l instanceof Literal) { - Value opt1 = bs.getValue("opt1"); - assertNull(opt1); - - Value opt2 = bs.getValue("opt2"); - assertNull(opt2); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/dataset-query.trig", conn); + + String query = getNamespaceDeclarations() + "SELECT * {\n" + " {\n" + " ex:a ?p ?prop1\n" + + " FILTER (?p = ex:prop1)\n" + " } UNION {\n" + " ?s ex:p ex:A ; " + + " { " + " { " + " ?s ?p ?l ." + + " FILTER(?p = rdfs:label) " + " } " + " OPTIONAL { " + + " ?s ?p ?opt1 . " + " FILTER (?p = ex:prop1) " + + " } " + " OPTIONAL { " + " ?s ?p ?opt2 . " + + " FILTER (?p = ex:prop2) " + " } " + " }" + " }\n" + "}"; + + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + + try (TupleQueryResult result = tq.evaluate()) { + assertNotNull(result); + + int count = 0; + while (result.hasNext()) { + BindingSet bs = result.next(); + count++; + assertNotNull(bs); + + // System.out.println(bs); + + Value prop1 = bs.getValue("prop1"); + Value l = bs.getValue("l"); + + assertTrue(prop1 instanceof Literal || l instanceof Literal); + if (l instanceof Literal) { + Value opt1 = bs.getValue("opt1"); + assertNull(opt1); + + Value opt2 = bs.getValue("opt2"); + assertNull(opt2); + } } + assertEquals(2, count); + } catch (QueryEvaluationException e) { + e.printStackTrace(); + fail(e.getMessage()); } - assertEquals(2, count); - } catch (QueryEvaluationException e) { - e.printStackTrace(); - fail(e.getMessage()); + } finally { + closeRepository(repo); } - } public Stream tests() { diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/ValuesTest.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/ValuesTest.java index ec75c54a67c..dc4bf759f77 100644 --- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/ValuesTest.java +++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/ValuesTest.java @@ -21,6 +21,7 @@ import java.io.StringReader; import java.util.List; +import java.util.function.Supplier; import java.util.stream.Stream; import org.eclipse.rdf4j.model.IRI; @@ -36,10 +37,10 @@ import org.eclipse.rdf4j.query.TupleQueryResult; import org.eclipse.rdf4j.query.Update; import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; import org.eclipse.rdf4j.rio.RDFFormat; import org.eclipse.rdf4j.testsuite.sparql.AbstractComplianceTest; import org.junit.jupiter.api.DynamicTest; -import org.junit.jupiter.api.Test; /** * Tests on SPARQL VALUES clauses. @@ -48,123 +49,147 @@ */ public class ValuesTest extends AbstractComplianceTest { - public ValuesTest(Repository repo) { + public ValuesTest(Supplier repo) { super(repo); } private void testValuesInOptional() throws Exception { - loadTestData("/testdata-query/dataset-ses1692.trig"); - String query = " PREFIX : \n" - + " SELECT DISTINCT ?a ?name ?isX WHERE { ?b :p1 ?a . ?a :name ?name. OPTIONAL { ?a a :X . VALUES(?isX) { (:X) } } } "; - - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - - try (TupleQueryResult result = tq.evaluate()) { - assertNotNull(result); - assertTrue(result.hasNext()); - - int count = 0; - while (result.hasNext()) { - count++; - BindingSet bs = result.next(); - // System.out.println(bs); - IRI a = (IRI) bs.getValue("a"); - assertNotNull(a); - Value isX = bs.getValue("isX"); - Literal name = (Literal) bs.getValue("name"); - assertNotNull(name); - if (a.stringValue().endsWith("a1")) { - assertNotNull(isX); - } else if (a.stringValue().endsWith(("a2"))) { - assertNull(isX); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/dataset-ses1692.trig", conn); + String query = " PREFIX : \n" + + " SELECT DISTINCT ?a ?name ?isX WHERE { ?b :p1 ?a . ?a :name ?name. OPTIONAL { ?a a :X . VALUES(?isX) { (:X) } } } "; + + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + + try (TupleQueryResult result = tq.evaluate()) { + assertNotNull(result); + assertTrue(result.hasNext()); + + int count = 0; + while (result.hasNext()) { + count++; + BindingSet bs = result.next(); + // System.out.println(bs); + IRI a = (IRI) bs.getValue("a"); + assertNotNull(a); + Value isX = bs.getValue("isX"); + Literal name = (Literal) bs.getValue("name"); + assertNotNull(name); + if (a.stringValue().endsWith("a1")) { + assertNotNull(isX); + } else if (a.stringValue().endsWith(("a2"))) { + assertNull(isX); + } } + assertEquals(2, count); } - assertEquals(2, count); + } finally { + closeRepository(repo); } } private void testValuesClauseNamedGraph() throws Exception { - String ex = "http://example.org/"; - String data = "@prefix foaf: <" + FOAF.NAMESPACE + "> .\n" + "@prefix ex: <" + ex + "> .\n" + "ex:graph1 {\n" - + " ex:Person1 rdf:type foaf:Person ;\n" - + " foaf:name \"Person 1\" . ex:Person2 rdf:type foaf:Person ;\n" - + " foaf:name \"Person 2\" . ex:Person3 rdf:type foaf:Person ;\n" - + " foaf:name \"Person 3\" .\n" + "}"; - - conn.add(new StringReader(data), "", RDFFormat.TRIG); - - String query = "SELECT ?person ?name ?__index \n" + "WHERE { " - + " VALUES (?person ?name ?__index) { \n" - + " ( UNDEF \"0\") \n" - + " ( UNDEF \"2\") } \n" - + " GRAPH { ?person ?name . } }"; - - TupleQuery q = conn.prepareTupleQuery(query); - - List result = QueryResults.asList(q.evaluate()); - assertThat(result).hasSize(2); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + String ex = "http://example.org/"; + String data = "@prefix foaf: <" + FOAF.NAMESPACE + "> .\n" + "@prefix ex: <" + ex + "> .\n" + + "ex:graph1 {\n" + " ex:Person1 rdf:type foaf:Person ;\n" + + " foaf:name \"Person 1\" . ex:Person2 rdf:type foaf:Person ;\n" + + " foaf:name \"Person 2\" . ex:Person3 rdf:type foaf:Person ;\n" + + " foaf:name \"Person 3\" .\n" + "}"; + + conn.add(new StringReader(data), "", RDFFormat.TRIG); + + String query = "SELECT ?person ?name ?__index \n" + "WHERE { " + + " VALUES (?person ?name ?__index) { \n" + + " ( UNDEF \"0\") \n" + + " ( UNDEF \"2\") } \n" + + " GRAPH { ?person ?name . } }"; + + TupleQuery q = conn.prepareTupleQuery(query); + + List result = QueryResults.asList(q.evaluate()); + assertThat(result).hasSize(2); + } finally { + closeRepository(repo); + } } private void testValuesCartesianProduct() { - final String queryString = "" + "select ?x ?y where { " + " values ?x { undef 67 } " - + " values ?y { undef 42 } " + "}"; - final TupleQuery tupleQuery = conn.prepareTupleQuery(queryString); - - List bindingSets = QueryResults.asList(tupleQuery.evaluate()); - assertThat(bindingSets).hasSize(4); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + final String queryString = "" + "select ?x ?y where { " + " values ?x { undef 67 } " + + " values ?y { undef 42 } " + "}"; + final TupleQuery tupleQuery = conn.prepareTupleQuery(queryString); + + List bindingSets = QueryResults.asList(tupleQuery.evaluate()); + assertThat(bindingSets).hasSize(4); + } finally { + closeRepository(repo); + } } private void testSES1081SameTermWithValues() throws Exception { - loadTestData("/testdata-query/dataset-ses1081.trig"); - String query = "PREFIX ex: \n" + " SELECT * \n" + " WHERE { \n " - + " ?s ex:p ?a . \n" + " FILTER sameTerm(?a, ?e) \n " - + " VALUES ?e { ex:b } \n " + " } "; - - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - - try (TupleQueryResult result = tq.evaluate()) { - assertNotNull(result); - - int count = 0; - while (result.hasNext()) { - BindingSet bs = result.next(); - count++; - assertNotNull(bs); - - Value s = bs.getValue("s"); - Value a = bs.getValue("a"); - - assertNotNull(s); - assertNotNull(a); - assertEquals(iri("http://example.org/a"), s); - assertEquals(iri("http://example.org/b"), a); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testdata-query/dataset-ses1081.trig", conn); + String query = "PREFIX ex: \n" + " SELECT * \n" + " WHERE { \n " + + " ?s ex:p ?a . \n" + " FILTER sameTerm(?a, ?e) \n " + + " VALUES ?e { ex:b } \n " + " } "; + + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + + try (TupleQueryResult result = tq.evaluate()) { + assertNotNull(result); + + int count = 0; + while (result.hasNext()) { + BindingSet bs = result.next(); + count++; + assertNotNull(bs); + + Value s = bs.getValue("s"); + Value a = bs.getValue("a"); + + assertNotNull(s); + assertNotNull(a); + assertEquals(iri("http://example.org/a"), s); + assertEquals(iri("http://example.org/b"), a); + } + assertEquals(1, count); + } catch (QueryEvaluationException e) { + e.printStackTrace(); + fail(e.getMessage()); } - assertEquals(1, count); - } catch (QueryEvaluationException e) { - e.printStackTrace(); - fail(e.getMessage()); + } finally { + closeRepository(repo); } - } private void testSES2136() throws Exception { - loadTestData("/testcases-sparql-1.1-w3c/bindings/data02.ttl"); - String query = "PREFIX : \n" + "SELECT ?s ?o { \n" + " { SELECT * WHERE { ?s ?p ?o . } }\n" - + " VALUES (?o) { (:b) }\n" + "}\n"; - - ValueFactory vf = conn.getValueFactory(); - final IRI a = vf.createIRI("http://example.org/a"); - final IRI b = vf.createIRI("http://example.org/b"); - - TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - - try (TupleQueryResult result = tq.evaluate()) { - assertNotNull(result); - assertTrue(result.hasNext()); - BindingSet bs = result.next(); - assertFalse(result.hasNext(), "only one result expected"); - assertEquals(a, bs.getValue("s")); - assertEquals(b, bs.getValue("o")); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + loadTestData("/testcases-sparql-1.1-w3c/bindings/data02.ttl", conn); + String query = "PREFIX : \n" + "SELECT ?s ?o { \n" + + " { SELECT * WHERE { ?s ?p ?o . } }\n" + " VALUES (?o) { (:b) }\n" + "}\n"; + + ValueFactory vf = conn.getValueFactory(); + final IRI a = vf.createIRI("http://example.org/a"); + final IRI b = vf.createIRI("http://example.org/b"); + + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + + try (TupleQueryResult result = tq.evaluate()) { + assertNotNull(result); + assertTrue(result.hasNext()); + BindingSet bs = result.next(); + assertFalse(result.hasNext(), "only one result expected"); + assertEquals(a, bs.getValue("s")); + assertEquals(b, bs.getValue("o")); + } + } finally { + closeRepository(repo); } } @@ -173,34 +198,38 @@ private void testSES2136() throws Exception { */ private void testFilterExistsExternalValuesClause() { - String ub = "insert data {\n" + " a .\n" + " a .\n" - + " .\n" - + " .\n" + "}"; - conn.prepareUpdate(QueryLanguage.SPARQL, ub).execute(); - - String query = "select ?s {\n" + " ?s a* .\n" - + " FILTER EXISTS {?s ?o}\n" + "} limit 100 values ?o {}"; - - TupleQuery tq = conn.prepareTupleQuery(query); - - List result = QueryResults.asList(tq.evaluate()); - assertEquals(1, result.size(), "single result expected"); - assertEquals("http://subj1", result.get(0).getValue("s").stringValue()); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + String ub = "insert data {\n" + " a .\n" + + " a .\n" + " .\n" + + " .\n" + "}"; + conn.prepareUpdate(QueryLanguage.SPARQL, ub).execute(); + + String query = "select ?s {\n" + " ?s a* .\n" + + " FILTER EXISTS {?s ?o}\n" + "} limit 100 values ?o {}"; + + TupleQuery tq = conn.prepareTupleQuery(query); + + List result = QueryResults.asList(tq.evaluate()); + assertEquals(1, result.size(), "single result expected"); + assertEquals("http://subj1", result.get(0).getValue("s").stringValue()); + } finally { + closeRepository(repo); + } } public void testMultipleValuesClauses() { - Update update = conn.prepareUpdate("PREFIX ex: \n" + - "\n" + - "INSERT DATA { ex:sub ex:somePred \"value\" . };\n" + - "\n" + - "INSERT { ?s ?newPred ?newObj }\n" + - "WHERE {\n" + - " # If one combines these into a single VALUES clause then it also works\n" + - " VALUES ?newPred { ex:somePred2 }\n" + - " VALUES ?newObj { \"value2\" }\n" + - " ?s ex:somePred [] .\n" + - "}"); - update.execute(); + Repository repo = openRepository(); + try (RepositoryConnection conn = repo.getConnection()) { + Update update = conn.prepareUpdate("PREFIX ex: \n" + "\n" + + "INSERT DATA { ex:sub ex:somePred \"value\" . };\n" + "\n" + "INSERT { ?s ?newPred ?newObj }\n" + + "WHERE {\n" + " # If one combines these into a single VALUES clause then it also works\n" + + " VALUES ?newPred { ex:somePred2 }\n" + " VALUES ?newObj { \"value2\" }\n" + + " ?s ex:somePred [] .\n" + "}"); + update.execute(); + } finally { + closeRepository(repo); + } } public Stream tests() {