diff --git a/core/sail/elasticsearch-store/pom.xml b/core/sail/elasticsearch-store/pom.xml
index 57aeb65c505..667b043b147 100644
--- a/core/sail/elasticsearch-store/pom.xml
+++ b/core/sail/elasticsearch-store/pom.xml
@@ -193,7 +193,7 @@
9200
false
- ${java.sec.mgr}
+ ${java.sec.mgr} -Xmx512m
1
diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/AbstractComplianceTest.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/AbstractComplianceTest.java
index 7f9fa95172e..10255c775b7 100644
--- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/AbstractComplianceTest.java
+++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/AbstractComplianceTest.java
@@ -15,6 +15,7 @@
import java.io.InputStream;
import java.io.Reader;
import java.net.URL;
+import java.util.function.Supplier;
import org.eclipse.rdf4j.common.iteration.CloseableIteration;
import org.eclipse.rdf4j.common.transaction.IsolationLevel;
@@ -60,36 +61,36 @@ public abstract class AbstractComplianceTest {
protected DynamicTest makeTest(String name, Executable x) {
return DynamicTest.dynamicTest(name, () -> {
- setUp();
x.execute();
- tearDown();
});
}
- protected final Logger logger = LoggerFactory.getLogger(this.getClass());
-
- protected final Repository repo;
- protected RepositoryConnection conn;
-
- public AbstractComplianceTest(Repository repo) {
- this.repo = repo;
+ protected Repository openRepository() {
+ Repository r = repo.get();
+ r.init();
+ return r;
}
- public void setUp() {
- repo.init();
- conn = new RepositoryConnectionWrapper(repo.getConnection());
+ protected RepositoryConnection openConnection(Repository r) {
+ return new RepositoryConnectionWrapper(r.getConnection());
}
- public void tearDown() {
- try {
+ protected void closeRepository(Repository r) {
+ try (RepositoryConnection conn = r.getConnection()) {
conn.clear();
- conn.close();
- } finally {
- repo.shutDown();
}
+ r.shutDown();
+ }
+
+ protected final Logger logger = LoggerFactory.getLogger(this.getClass());
+
+ protected final Supplier repo;
+
+ public AbstractComplianceTest(Supplier repo) {
+ this.repo = repo;
}
- protected void loadTestData(String dataFile, Resource... contexts)
+ protected void loadTestData(String dataFile, RepositoryConnection conn, Resource... contexts)
throws RDFParseException, RepositoryException, IOException {
logger.debug("loading dataset {}", dataFile);
try (InputStream dataset = this.getClass().getResourceAsStream(dataFile)) {
diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/RepositorySPARQLComplianceTestSuite.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/RepositorySPARQLComplianceTestSuite.java
index 50539b84b9c..d362b020ef6 100644
--- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/RepositorySPARQLComplianceTestSuite.java
+++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/RepositorySPARQLComplianceTestSuite.java
@@ -10,6 +10,8 @@
*******************************************************************************/
package org.eclipse.rdf4j.testsuite.sparql;
+import static org.junit.jupiter.api.Assertions.fail;
+
import java.io.File;
import java.io.IOException;
import java.util.stream.Stream;
@@ -56,92 +58,92 @@ public abstract class RepositorySPARQLComplianceTestSuite {
@TestFactory
Stream aggregate() throws RDF4JException, IOException {
- return new AggregateTest(getEmptyInitializedRepository()).tests();
+ return new AggregateTest(this::getEmptyInitializedRepository).tests();
}
@TestFactory
Stream arbitraryLengthPath() throws RDF4JException, IOException {
- return new ArbitraryLengthPathTest(getEmptyInitializedRepository()).tests();
+ return new ArbitraryLengthPathTest(this::getEmptyInitializedRepository).tests();
}
@TestFactory
Stream basic() throws RDF4JException, IOException {
- return new BasicTest(getEmptyInitializedRepository()).tests();
+ return new BasicTest(this::getEmptyInitializedRepository).tests();
}
@TestFactory
Stream bind() throws RDF4JException, IOException {
- return new BindTest(getEmptyInitializedRepository()).tests();
+ return new BindTest(this::getEmptyInitializedRepository).tests();
}
@TestFactory
Stream builtinFunction() throws RDF4JException, IOException {
- return new BuiltinFunctionTest(getEmptyInitializedRepository()).tests();
+ return new BuiltinFunctionTest(this::getEmptyInitializedRepository).tests();
}
@TestFactory
Stream construct() throws RDF4JException, IOException {
- return new ConstructTest(getEmptyInitializedRepository()).tests();
+ return new ConstructTest(this::getEmptyInitializedRepository).tests();
}
@TestFactory
Stream defaultGraph() throws RDF4JException, IOException {
- return new DefaultGraphTest(getEmptyInitializedRepository()).tests();
+ return new DefaultGraphTest(this::getEmptyInitializedRepository).tests();
}
@TestFactory
Stream describe() throws RDF4JException, IOException {
- return new DescribeTest(getEmptyInitializedRepository()).tests();
+ return new DescribeTest(this::getEmptyInitializedRepository).tests();
}
@TestFactory
Stream groupBy() throws RDF4JException, IOException {
- return new GroupByTest(getEmptyInitializedRepository()).tests();
+ return new GroupByTest(this::getEmptyInitializedRepository).tests();
}
@TestFactory
Stream in() throws RDF4JException, IOException {
- return new InTest(getEmptyInitializedRepository()).tests();
+ return new InTest(this::getEmptyInitializedRepository).tests();
}
@TestFactory
Stream optional() throws RDF4JException, IOException {
- return new OptionalTest(getEmptyInitializedRepository()).tests();
+ return new OptionalTest(this::getEmptyInitializedRepository).tests();
}
@TestFactory
Stream propertyPath() throws RDF4JException, IOException {
- return new PropertyPathTest(getEmptyInitializedRepository()).tests();
+ return new PropertyPathTest(this::getEmptyInitializedRepository).tests();
}
@TestFactory
Stream subselect() throws RDF4JException, IOException {
- return new SubselectTest(getEmptyInitializedRepository()).tests();
+ return new SubselectTest(this::getEmptyInitializedRepository).tests();
}
@TestFactory
Stream union() throws RDF4JException, IOException {
- return new UnionTest(getEmptyInitializedRepository()).tests();
+ return new UnionTest(this::getEmptyInitializedRepository).tests();
}
@TestFactory
Stream values() throws RDF4JException, IOException {
- return new ValuesTest(getEmptyInitializedRepository()).tests();
+ return new ValuesTest(this::getEmptyInitializedRepository).tests();
}
@TestFactory
Stream orderBy() throws RDF4JException, IOException {
- return new OrderByTest(getEmptyInitializedRepository()).tests();
+ return new OrderByTest(this::getEmptyInitializedRepository).tests();
}
@TestFactory
Stream exists() throws RDF4JException, IOException {
- return new ExistsTest(getEmptyInitializedRepository()).tests();
+ return new ExistsTest(this::getEmptyInitializedRepository).tests();
}
@TestFactory
Stream minus() throws RDF4JException, IOException {
- return new MinusTest(getEmptyInitializedRepository()).tests();
+ return new MinusTest(this::getEmptyInitializedRepository).tests();
}
@BeforeAll
@@ -164,13 +166,19 @@ public RepositorySPARQLComplianceTestSuite(RepositoryFactory factory) {
this.factory = factory;
}
- public Repository getEmptyInitializedRepository() throws RDF4JException, IOException {
- Repository repository = factory.getRepository(factory.getConfig());
- repository.setDataDir(dataDir);
- try (RepositoryConnection con = repository.getConnection()) {
- con.clear();
- con.clearNamespaces();
+ public Repository getEmptyInitializedRepository() {
+ try {
+ Repository repository = factory.getRepository(factory.getConfig());
+ repository.setDataDir(dataDir);
+ try (RepositoryConnection con = repository.getConnection()) {
+ con.clear();
+ con.clearNamespaces();
+ }
+ return repository;
+
+ } catch (RDF4JException e) {
+ fail(e);
+ return null;
}
- return repository;
}
}
diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/AggregateTest.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/AggregateTest.java
index 814e6fe91f0..12fcbc1df0a 100644
--- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/AggregateTest.java
+++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/AggregateTest.java
@@ -21,6 +21,7 @@
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.util.List;
+import java.util.function.Supplier;
import java.util.stream.Stream;
import org.eclipse.rdf4j.model.BNode;
@@ -36,6 +37,7 @@
import org.eclipse.rdf4j.query.TupleQuery;
import org.eclipse.rdf4j.query.TupleQueryResult;
import org.eclipse.rdf4j.repository.Repository;
+import org.eclipse.rdf4j.repository.RepositoryConnection;
import org.eclipse.rdf4j.testsuite.sparql.AbstractComplianceTest;
import org.junit.jupiter.api.DynamicTest;
@@ -46,13 +48,12 @@
*/
public class AggregateTest extends AbstractComplianceTest {
- public AggregateTest(Repository repo) {
+ public AggregateTest(Supplier repo) {
super(repo);
}
public Stream tests() {
- return Stream.of(
- makeTest("MaxAggregateWithGroupEmptyResult", this::testMaxAggregateWithGroupEmptyResult),
+ return Stream.of(makeTest("MaxAggregateWithGroupEmptyResult", this::testMaxAggregateWithGroupEmptyResult),
makeTest("MaxAggregateWithoutGroupEmptySolution", this::testMaxAggregateWithoutGroupEmptySolution),
makeTest("MinAggregateWithGroupEmptyResult", this::testMinAggregateWithGroupEmptyResult),
makeTest("MinAggregateWithoutGroupEmptySolution", this::testMinAggregateWithoutGroupEmptySolution),
@@ -61,12 +62,9 @@ public Stream tests() {
this::testSampleAggregateWithoutGroupEmptySolution),
makeTest("SES2361UndefMin", this::testSES2361UndefMin),
makeTest("CountOrderBy_ImplicitGroup", this::testCountOrderBy_ImplicitGroup),
- makeTest("DistinctMax", this::testDistinctMax),
- makeTest("Max", this::testMax),
- makeTest("DistinctAvg", this::testDistinctAvg),
- makeTest("Avg", this::testAvg),
- makeTest("DistinctSum", this::testDistinctSum),
- makeTest("Sum", this::testSum),
+ makeTest("DistinctMax", this::testDistinctMax), makeTest("Max", this::testMax),
+ makeTest("DistinctAvg", this::testDistinctAvg), makeTest("Avg", this::testAvg),
+ makeTest("DistinctSum", this::testDistinctSum), makeTest("Sum", this::testSum),
makeTest("CountHaving", this::testCountHaving),
makeTest("SES1970CountDistinctWildcard", this::testSES1970CountDistinctWildcard),
makeTest("GroupConcatNonDistinct", this::testGroupConcatNonDistinct),
@@ -75,18 +73,22 @@ public Stream tests() {
makeTest("SES2361UndefSum", this::testSES2361UndefSum),
makeTest("SES2361UndefCountWildcard", this::testSES2361UndefCountWildcard),
makeTest("SES2361UndefCount", this::testSES2361UndefCount),
- makeTest("SES2361UndefMax", this::testSES2361UndefMax)
- );
+ makeTest("SES2361UndefMax", this::testSES2361UndefMax));
}
/**
* See https://github.com/eclipse/rdf4j/issues/1978
*/
private void testMaxAggregateWithGroupEmptyResult() {
- String query = "select ?s (max(?o) as ?omax) {\n" + " ?s ?p ?o .\n" + " }\n" + " group by ?s\n";
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ String query = "select ?s (max(?o) as ?omax) {\n" + " ?s ?p ?o .\n" + " }\n" + " group by ?s\n";
- try (TupleQueryResult result = conn.prepareTupleQuery(query).evaluate()) {
- assertThat(result.hasNext()).isFalse();
+ try (TupleQueryResult result = conn.prepareTupleQuery(query).evaluate()) {
+ assertThat(result.hasNext()).isFalse();
+ }
+ } finally {
+ closeRepository(repo);
}
}
@@ -94,10 +96,15 @@ private void testMaxAggregateWithGroupEmptyResult() {
* See https://github.com/eclipse/rdf4j/issues/1978
*/
private void testMaxAggregateWithoutGroupEmptySolution() {
- String query = "select (max(?o) as ?omax) {\n" + " ?s ?p ?o .\n" + " }\n";
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ String query = "select (max(?o) as ?omax) {\n" + " ?s ?p ?o .\n" + " }\n";
- try (TupleQueryResult result = conn.prepareTupleQuery(query).evaluate()) {
- assertThat(result.next()).isEmpty();
+ try (TupleQueryResult result = conn.prepareTupleQuery(query).evaluate()) {
+ assertThat(result.next()).isEmpty();
+ }
+ } finally {
+ closeRepository(repo);
}
}
@@ -106,10 +113,15 @@ private void testMaxAggregateWithoutGroupEmptySolution() {
*/
private void testMinAggregateWithGroupEmptyResult() {
- String query = "select ?s (min(?o) as ?omin) {\n" + " ?s ?p ?o .\n" + " }\n" + " group by ?s\n";
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ String query = "select ?s (min(?o) as ?omin) {\n" + " ?s ?p ?o .\n" + " }\n" + " group by ?s\n";
- try (TupleQueryResult result = conn.prepareTupleQuery(query).evaluate()) {
- assertThat(result.hasNext()).isFalse();
+ try (TupleQueryResult result = conn.prepareTupleQuery(query).evaluate()) {
+ assertThat(result.hasNext()).isFalse();
+ }
+ } finally {
+ closeRepository(repo);
}
}
@@ -117,10 +129,15 @@ private void testMinAggregateWithGroupEmptyResult() {
* See https://github.com/eclipse/rdf4j/issues/1978
*/
private void testMinAggregateWithoutGroupEmptySolution() {
- String query = "select (min(?o) as ?omin) {\n" + " ?s ?p ?o .\n" + " }\n";
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ String query = "select (min(?o) as ?omin) {\n" + " ?s ?p ?o .\n" + " }\n";
- try (TupleQueryResult result = conn.prepareTupleQuery(query).evaluate()) {
- assertThat(result.next()).isEmpty();
+ try (TupleQueryResult result = conn.prepareTupleQuery(query).evaluate()) {
+ assertThat(result.next()).isEmpty();
+ }
+ } finally {
+ closeRepository(repo);
}
}
@@ -128,10 +145,15 @@ private void testMinAggregateWithoutGroupEmptySolution() {
* See https://github.com/eclipse/rdf4j/issues/1978
*/
private void testSampleAggregateWithGroupEmptyResult() {
- String query = "select ?s (sample(?o) as ?osample) {\n" + " ?s ?p ?o .\n" + " }\n" + " group by ?s\n";
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ String query = "select ?s (sample(?o) as ?osample) {\n" + " ?s ?p ?o .\n" + " }\n" + " group by ?s\n";
- try (TupleQueryResult result = conn.prepareTupleQuery(query).evaluate()) {
- assertThat(result.hasNext()).isFalse();
+ try (TupleQueryResult result = conn.prepareTupleQuery(query).evaluate()) {
+ assertThat(result.hasNext()).isFalse();
+ }
+ } finally {
+ closeRepository(repo);
}
}
@@ -139,317 +161,405 @@ private void testSampleAggregateWithGroupEmptyResult() {
* See https://github.com/eclipse/rdf4j/issues/1978
*/
private void testSampleAggregateWithoutGroupEmptySolution() {
- String query = "select (sample(?o) as ?osample) {\n" + " ?s ?p ?o .\n" + " }\n";
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ String query = "select (sample(?o) as ?osample) {\n" + " ?s ?p ?o .\n" + " }\n";
- try (TupleQueryResult result = conn.prepareTupleQuery(query).evaluate()) {
- assertThat(result.next()).isEmpty();
+ try (TupleQueryResult result = conn.prepareTupleQuery(query).evaluate()) {
+ assertThat(result.next()).isEmpty();
+ }
+ } finally {
+ closeRepository(repo);
}
}
private void testSES2361UndefMin() {
- String query = "SELECT (MIN(?v) as ?min) WHERE { VALUES ?v { 1 2 undef 3 4 }}";
- try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) {
- assertThat((Iterable>) result).isNotNull();
- assertThat(result.hasNext()).isTrue();
- assertThat(result.next().getValue("min").stringValue()).isEqualTo("1");
- assertThat(result.hasNext()).isFalse();
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ String query = "SELECT (MIN(?v) as ?min) WHERE { VALUES ?v { 1 2 undef 3 4 }}";
+ try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) {
+ assertThat((Iterable>) result).isNotNull();
+ assertThat(result.hasNext()).isTrue();
+ assertThat(result.next().getValue("min").stringValue()).isEqualTo("1");
+ assertThat(result.hasNext()).isFalse();
+ }
+ } finally {
+ closeRepository(repo);
}
}
private void testSES2361UndefMax() {
- String query = "SELECT (MAX(?v) as ?max) WHERE { VALUES ?v { 1 2 7 undef 3 4 }}";
- try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) {
- assertThat((Iterable>) result).isNotNull();
- assertThat(result.hasNext()).isTrue();
- assertThat(result.next().getValue("max").stringValue()).isEqualTo("7");
- assertThat((Iterable>) result).isEmpty();
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ String query = "SELECT (MAX(?v) as ?max) WHERE { VALUES ?v { 1 2 7 undef 3 4 }}";
+ try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) {
+ assertThat((Iterable>) result).isNotNull();
+ assertThat(result.hasNext()).isTrue();
+ assertThat(result.next().getValue("max").stringValue()).isEqualTo("7");
+ assertThat((Iterable>) result).isEmpty();
+ }
+ } finally {
+ closeRepository(repo);
}
}
private void testSES2361UndefCount() {
String query = "SELECT (COUNT(?v) as ?c) WHERE { VALUES ?v { 1 2 undef 3 4 }}";
- try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) {
- assertThat((Iterable>) result).isNotNull();
- assertThat(result.hasNext()).isTrue();
- assertThat(result.next().getValue("c").stringValue()).isEqualTo("4");
- assertThat((Iterable>) result).isEmpty();
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) {
+ assertThat((Iterable>) result).isNotNull();
+ assertThat(result.hasNext()).isTrue();
+ assertThat(result.next().getValue("c").stringValue()).isEqualTo("4");
+ assertThat((Iterable>) result).isEmpty();
+ }
+ } finally {
+ closeRepository(repo);
}
}
private void testSES2361UndefCountWildcard() {
String query = "SELECT (COUNT(*) as ?c) WHERE { VALUES ?v { 1 2 undef 3 4 }}";
- try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) {
- assertThat((Iterable>) result).isNotNull();
- assertThat(result.hasNext()).isTrue();
- assertThat(result.next().getValue("c").stringValue()).isEqualTo("4");
- assertThat((Iterable>) result).isEmpty();
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) {
+ assertThat((Iterable>) result).isNotNull();
+ assertThat(result.hasNext()).isTrue();
+ assertThat(result.next().getValue("c").stringValue()).isEqualTo("4");
+ assertThat((Iterable>) result).isEmpty();
+ }
+ } finally {
+ closeRepository(repo);
}
}
private void testSES2361UndefSum() {
- String query = "SELECT (SUM(?v) as ?s) WHERE { VALUES ?v { 1 2 undef 3 4 }}";
- try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) {
- assertThat((Iterable>) result).isNotNull();
- assertThat(result.hasNext()).isTrue();
- assertThat(result.next().getValue("s").stringValue()).isEqualTo("10");
- assertThat((Iterable>) result).isEmpty();
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ String query = "SELECT (SUM(?v) as ?s) WHERE { VALUES ?v { 1 2 undef 3 4 }}";
+ try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) {
+ assertThat((Iterable>) result).isNotNull();
+ assertThat(result.hasNext()).isTrue();
+ assertThat(result.next().getValue("s").stringValue()).isEqualTo("10");
+ assertThat((Iterable>) result).isEmpty();
+ }
+ } finally {
+ closeRepository(repo);
}
}
private void testSES1979MinMaxInf() throws Exception {
- loadTestData("/testdata-query/dataset-ses1979.trig");
- String query = "prefix : select (min(?o) as ?min) (max(?o) as ?max) where { ?s :float ?o }";
-
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-
- try (TupleQueryResult evaluate = tq.evaluate()) {
- List result = QueryResults.asList(evaluate);
- assertThat((Iterable>) result).isNotNull().hasSize(1);
- assertThat(result.get(0).getValue("min")).isEqualTo(literal(Float.NEGATIVE_INFINITY));
- assertThat(result.get(0).getValue("max")).isEqualTo(literal(Float.POSITIVE_INFINITY));
- } catch (QueryEvaluationException e) {
- e.printStackTrace();
- fail(e.getMessage());
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/dataset-ses1979.trig", conn);
+ String query = "prefix : select (min(?o) as ?min) (max(?o) as ?max) where { ?s :float ?o }";
+
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+
+ try (TupleQueryResult evaluate = tq.evaluate()) {
+ List result = QueryResults.asList(evaluate);
+ assertThat((Iterable>) result).isNotNull().hasSize(1);
+ assertThat(result.get(0).getValue("min")).isEqualTo(literal(Float.NEGATIVE_INFINITY));
+ assertThat(result.get(0).getValue("max")).isEqualTo(literal(Float.POSITIVE_INFINITY));
+ } catch (QueryEvaluationException e) {
+ e.printStackTrace();
+ fail(e.getMessage());
+ }
+ } finally {
+ closeRepository(repo);
}
}
private void testGroupConcatDistinct() throws Exception {
- loadTestData("/testdata-query/dataset-query.trig");
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/dataset-query.trig", conn);
- String query = getNamespaceDeclarations() + "SELECT (GROUP_CONCAT(DISTINCT ?l) AS ?concat)"
- + "WHERE { ex:groupconcat-test ?p ?l . }";
+ String query = getNamespaceDeclarations() + "SELECT (GROUP_CONCAT(DISTINCT ?l) AS ?concat)"
+ + "WHERE { ex:groupconcat-test ?p ?l . }";
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
- try (TupleQueryResult result = tq.evaluate()) {
- assertThat((Iterable>) result).isNotNull();
+ try (TupleQueryResult result = tq.evaluate()) {
+ assertThat((Iterable>) result).isNotNull();
- while (result.hasNext()) {
- BindingSet bs = result.next();
- assertThat(bs).isNotNull();
+ while (result.hasNext()) {
+ BindingSet bs = result.next();
+ assertThat(bs).isNotNull();
- Value concat = bs.getValue("concat");
+ Value concat = bs.getValue("concat");
- assertThat(concat).isInstanceOf(Literal.class);
+ assertThat(concat).isInstanceOf(Literal.class);
- String lexValue = ((Literal) concat).getLabel();
+ String lexValue = ((Literal) concat).getLabel();
- int occ = countCharOccurrences(lexValue, 'a');
- assertThat(occ).isEqualTo(1);
- occ = countCharOccurrences(lexValue, 'b');
- assertThat(occ).isEqualTo(1);
- occ = countCharOccurrences(lexValue, 'c');
- assertThat(occ).isEqualTo(1);
- occ = countCharOccurrences(lexValue, 'd');
- assertThat(occ).isEqualTo(1);
+ int occ = countCharOccurrences(lexValue, 'a');
+ assertThat(occ).isEqualTo(1);
+ occ = countCharOccurrences(lexValue, 'b');
+ assertThat(occ).isEqualTo(1);
+ occ = countCharOccurrences(lexValue, 'c');
+ assertThat(occ).isEqualTo(1);
+ occ = countCharOccurrences(lexValue, 'd');
+ assertThat(occ).isEqualTo(1);
+ }
+ } catch (QueryEvaluationException e) {
+ e.printStackTrace();
+ fail(e.getMessage());
}
- } catch (QueryEvaluationException e) {
- e.printStackTrace();
- fail(e.getMessage());
+ } finally {
+ closeRepository(repo);
}
}
private void testGroupConcatNonDistinct() throws Exception {
- loadTestData("/testdata-query/dataset-query.trig");
- String query = getNamespaceDeclarations() + "SELECT (GROUP_CONCAT(?l) AS ?concat)"
- + "WHERE { ex:groupconcat-test ?p ?l . }";
-
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-
- try (TupleQueryResult result = tq.evaluate()) {
- assertThat((Iterable>) result).isNotNull();
-
- while (result.hasNext()) {
- BindingSet bs = result.next();
- assertThat(bs).isNotNull();
-
- Value concat = bs.getValue("concat");
-
- assertThat(concat).isInstanceOf(Literal.class);
-
- String lexValue = ((Literal) concat).getLabel();
-
- int occ = countCharOccurrences(lexValue, 'a');
- assertThat(occ).isEqualTo(1);
- occ = countCharOccurrences(lexValue, 'b');
- assertThat(occ).isEqualTo(2);
- occ = countCharOccurrences(lexValue, 'c');
- assertThat(occ).isEqualTo(2);
- occ = countCharOccurrences(lexValue, 'd');
- assertThat(occ).isEqualTo(1);
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/dataset-query.trig", conn);
+ String query = getNamespaceDeclarations() + "SELECT (GROUP_CONCAT(?l) AS ?concat)"
+ + "WHERE { ex:groupconcat-test ?p ?l . }";
+
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+
+ try (TupleQueryResult result = tq.evaluate()) {
+ assertThat((Iterable>) result).isNotNull();
+
+ while (result.hasNext()) {
+ BindingSet bs = result.next();
+ assertThat(bs).isNotNull();
+
+ Value concat = bs.getValue("concat");
+
+ assertThat(concat).isInstanceOf(Literal.class);
+
+ String lexValue = ((Literal) concat).getLabel();
+
+ int occ = countCharOccurrences(lexValue, 'a');
+ assertThat(occ).isEqualTo(1);
+ occ = countCharOccurrences(lexValue, 'b');
+ assertThat(occ).isEqualTo(2);
+ occ = countCharOccurrences(lexValue, 'c');
+ assertThat(occ).isEqualTo(2);
+ occ = countCharOccurrences(lexValue, 'd');
+ assertThat(occ).isEqualTo(1);
+ }
+ } catch (QueryEvaluationException e) {
+ e.printStackTrace();
+ fail(e.getMessage());
}
- } catch (QueryEvaluationException e) {
- e.printStackTrace();
- fail(e.getMessage());
+ } finally {
+ closeRepository(repo);
}
}
private void testSES1970CountDistinctWildcard() throws Exception {
- loadTestData("/testdata-query/dataset-ses1970.trig");
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/dataset-ses1970.trig", conn);
- String query = "SELECT (COUNT(DISTINCT *) AS ?c) {?s ?p ?o }";
+ String query = "SELECT (COUNT(DISTINCT *) AS ?c) {?s ?p ?o }";
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
- try (TupleQueryResult result = tq.evaluate()) {
- assertThat((Iterable>) result).isNotNull();
+ try (TupleQueryResult result = tq.evaluate()) {
+ assertThat((Iterable>) result).isNotNull();
- assertThat(result.hasNext()).isTrue();
- BindingSet s = result.next();
- assertThat(getIntValue(s.getValue("c"), 0)).isEqualTo(3);
- } catch (QueryEvaluationException e) {
- e.printStackTrace();
- fail(e.getMessage());
+ assertThat(result.hasNext()).isTrue();
+ BindingSet s = result.next();
+ assertThat(getIntValue(s.getValue("c"), 0)).isEqualTo(3);
+ } catch (QueryEvaluationException e) {
+ e.printStackTrace();
+ fail(e.getMessage());
+ }
+ } finally {
+ closeRepository(repo);
}
}
private void testCountHaving() {
- BNode bnode1 = bnode();
- BNode bnode2 = bnode();
- BNode bnode3 = bnode();
-
- conn.add(bnode3, FOAF.KNOWS, bnode());
- conn.add(bnode1, FOAF.KNOWS, bnode());
- conn.add(bnode1, FOAF.KNOWS, bnode());
- conn.add(bnode2, FOAF.KNOWS, bnode());
- conn.add(bnode3, FOAF.KNOWS, bnode());
- conn.add(bnode3, FOAF.KNOWS, bnode());
- conn.add(bnode1, FOAF.KNOWS, bnode());
-
- String query = "SELECT ?a WHERE { ?a ?b ?c } GROUP BY ?a HAVING( (COUNT(?c) > 1 ) && ( COUNT(?c) != 0 ) ) ";
- try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) {
- List collect = QueryResults.asList(result);
- assertThat(collect).hasSize(2);
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ BNode bnode1 = bnode();
+ BNode bnode2 = bnode();
+ BNode bnode3 = bnode();
+
+ conn.add(bnode3, FOAF.KNOWS, bnode());
+ conn.add(bnode1, FOAF.KNOWS, bnode());
+ conn.add(bnode1, FOAF.KNOWS, bnode());
+ conn.add(bnode2, FOAF.KNOWS, bnode());
+ conn.add(bnode3, FOAF.KNOWS, bnode());
+ conn.add(bnode3, FOAF.KNOWS, bnode());
+ conn.add(bnode1, FOAF.KNOWS, bnode());
+
+ String query = "SELECT ?a WHERE { ?a ?b ?c } GROUP BY ?a HAVING( (COUNT(?c) > 1 ) && ( COUNT(?c) != 0 ) ) ";
+ try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) {
+ List collect = QueryResults.asList(result);
+ assertThat(collect).hasSize(2);
+ }
+ } finally {
+ closeRepository(repo);
}
}
private void testSum() {
- mixedDataForNumericAggregates();
-
- String query = "SELECT ?a (SUM(?c) as ?aggregate) WHERE { ?a ?b ?c } GROUP BY ?a ORDER BY ?aggregate ";
- try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) {
- List collect = QueryResults.asList(result);
- int i = 0;
- assertThat(collect.get(i++).getValue("aggregate")).isNull();
- assertThat(collect.get(i++).getValue("aggregate")).isNull();
- assertThat(collect.get(i++).getValue("aggregate")).isNull();
- assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal(30.11));
- assertThat(collect.get(i++).getValue("aggregate"))
- .isEqualTo(literal(new BigDecimal("89.4786576482391284723864721567342354783275234")));
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ mixedDataForNumericAggregates(conn);
+ String query = "SELECT ?a (SUM(?c) as ?aggregate) WHERE { ?a ?b ?c } GROUP BY ?a ORDER BY ?aggregate ";
+ try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) {
+ List collect = QueryResults.asList(result);
+ int i = 0;
+ assertThat(collect.get(i++).getValue("aggregate")).isNull();
+ assertThat(collect.get(i++).getValue("aggregate")).isNull();
+ assertThat(collect.get(i++).getValue("aggregate")).isNull();
+ assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal(30.11));
+ assertThat(collect.get(i++).getValue("aggregate"))
+ .isEqualTo(literal(new BigDecimal("89.4786576482391284723864721567342354783275234")));
- }
+ }
+ } finally {
+ closeRepository(repo);
+ }
}
private void testDistinctSum() {
- mixedDataForNumericAggregates();
-
- String query = "SELECT ?a (SUM(DISTINCT ?c) as ?aggregate) WHERE { ?a ?b ?c } GROUP BY ?a ORDER BY ?aggregate ";
- try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) {
- List collect = QueryResults.asList(result);
- int i = 0;
- assertThat(collect.get(i++).getValue("aggregate")).isNull();
- assertThat(collect.get(i++).getValue("aggregate")).isNull();
- assertThat(collect.get(i++).getValue("aggregate")).isNull();
- assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal(30.11));
- assertThat(collect.get(i++).getValue("aggregate"))
- .isEqualTo(literal(new BigDecimal("55.4786576482391284723864721567342354783275234")));
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ mixedDataForNumericAggregates(conn);
+
+ String query = "SELECT ?a (SUM(DISTINCT ?c) as ?aggregate) WHERE { ?a ?b ?c } GROUP BY ?a ORDER BY ?aggregate ";
+ try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) {
+ List collect = QueryResults.asList(result);
+ int i = 0;
+ assertThat(collect.get(i++).getValue("aggregate")).isNull();
+ assertThat(collect.get(i++).getValue("aggregate")).isNull();
+ assertThat(collect.get(i++).getValue("aggregate")).isNull();
+ assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal(30.11));
+ assertThat(collect.get(i++).getValue("aggregate"))
+ .isEqualTo(literal(new BigDecimal("55.4786576482391284723864721567342354783275234")));
+ }
+ } finally {
+ closeRepository(repo);
}
}
private void testAvg() {
- mixedDataForNumericAggregates();
-
- String query = "SELECT ?a (AVG(?c) as ?aggregate) WHERE { ?a ?b ?c } GROUP BY ?a ORDER BY ?aggregate ";
- try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) {
- List collect = QueryResults.asList(result);
- int i = 0;
- assertThat(collect.get(i++).getValue("aggregate")).isNull();
- assertThat(collect.get(i++).getValue("aggregate")).isNull();
- assertThat(collect.get(i++).getValue("aggregate")).isNull();
- assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal(15.055));
- assertThat(collect.get(i++).getValue("aggregate"))
- .isEqualTo(literal(new BigDecimal("17.89573152964782569447729443134684709566550468")));
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ mixedDataForNumericAggregates(conn);
+
+ String query = "SELECT ?a (AVG(?c) as ?aggregate) WHERE { ?a ?b ?c } GROUP BY ?a ORDER BY ?aggregate ";
+ try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) {
+ List collect = QueryResults.asList(result);
+ int i = 0;
+ assertThat(collect.get(i++).getValue("aggregate")).isNull();
+ assertThat(collect.get(i++).getValue("aggregate")).isNull();
+ assertThat(collect.get(i++).getValue("aggregate")).isNull();
+ assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal(15.055));
+ assertThat(collect.get(i++).getValue("aggregate"))
+ .isEqualTo(literal(new BigDecimal("17.89573152964782569447729443134684709566550468")));
+ }
+ } finally {
+ closeRepository(repo);
}
}
private void testDistinctAvg() {
- mixedDataForNumericAggregates();
-
- String query = "SELECT ?a (AVG(DISTINCT ?c) as ?aggregate) WHERE { ?a ?b ?c } GROUP BY ?a ORDER BY ?aggregate ";
- try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) {
- List collect = QueryResults.asList(result);
- int i = 0;
- assertThat(collect.get(i++).getValue("aggregate")).isNull();
- assertThat(collect.get(i++).getValue("aggregate")).isNull();
- assertThat(collect.get(i++).getValue("aggregate")).isNull();
-
- assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal(15.055));
- assertThat(collect.get(i++).getValue("aggregate"))
- .isEqualTo(literal(new BigDecimal("18.492885882746376157462157")));
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ mixedDataForNumericAggregates(conn);
+
+ String query = "SELECT ?a (AVG(DISTINCT ?c) as ?aggregate) WHERE { ?a ?b ?c } GROUP BY ?a ORDER BY ?aggregate ";
+ try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) {
+ List collect = QueryResults.asList(result);
+ int i = 0;
+ assertThat(collect.get(i++).getValue("aggregate")).isNull();
+ assertThat(collect.get(i++).getValue("aggregate")).isNull();
+ assertThat(collect.get(i++).getValue("aggregate")).isNull();
+
+ assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal(15.055));
+ assertThat(collect.get(i++).getValue("aggregate"))
+ .isEqualTo(literal(new BigDecimal("18.492885882746376157462157")));
+ }
+ } finally {
+ closeRepository(repo);
}
}
private void testMax() {
- mixedDataForNumericAggregates();
-
- String query = "SELECT ?a (MAX(?c) as ?aggregate) WHERE { ?a ?b ?c } GROUP BY ?a ORDER BY ?aggregate ";
- try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) {
- List collect = QueryResults.asList(result);
- int i = 0;
- assertThat(collect.get(i++).getValue("aggregate"))
- .isEqualTo(literal(new BigDecimal("19.4786576482391284723864721567342354783275234")));
- assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal(23));
- assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal(23));
- assertThat(collect.get(i++).getValue("aggregate"))
- .isEqualTo(literal("2022-01-01T01:01:01.000000001Z", CoreDatatype.XSD.DATETIME));
- assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal("3"));
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ mixedDataForNumericAggregates(conn);
+
+ String query = "SELECT ?a (MAX(?c) as ?aggregate) WHERE { ?a ?b ?c } GROUP BY ?a ORDER BY ?aggregate ";
+ try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) {
+ List collect = QueryResults.asList(result);
+ int i = 0;
+ assertThat(collect.get(i++).getValue("aggregate"))
+ .isEqualTo(literal(new BigDecimal("19.4786576482391284723864721567342354783275234")));
+ assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal(23));
+ assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal(23));
+ assertThat(collect.get(i++).getValue("aggregate"))
+ .isEqualTo(literal("2022-01-01T01:01:01.000000001Z", CoreDatatype.XSD.DATETIME));
+ assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal("3"));
+ }
+ } finally {
+ closeRepository(repo);
}
-
}
private void testDistinctMax() {
- mixedDataForNumericAggregates();
-
- String query = "SELECT ?a (MAX(DISTINCT ?c) as ?aggregate) WHERE { ?a ?b ?c } GROUP BY ?a ORDER BY ?aggregate ";
- try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) {
- List collect = QueryResults.asList(result);
- int i = 0;
- assertThat(collect.get(i++).getValue("aggregate"))
- .isEqualTo(literal(new BigDecimal("19.4786576482391284723864721567342354783275234")));
- assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal(23));
- assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal(23));
- assertThat(collect.get(i++).getValue("aggregate"))
- .isEqualTo(literal("2022-01-01T01:01:01.000000001Z", CoreDatatype.XSD.DATETIME));
- assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal("3"));
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ mixedDataForNumericAggregates(conn);
+
+ String query = "SELECT ?a (MAX(DISTINCT ?c) as ?aggregate) WHERE { ?a ?b ?c } GROUP BY ?a ORDER BY ?aggregate ";
+ try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate()) {
+ List collect = QueryResults.asList(result);
+ int i = 0;
+ assertThat(collect.get(i++).getValue("aggregate"))
+ .isEqualTo(literal(new BigDecimal("19.4786576482391284723864721567342354783275234")));
+ assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal(23));
+ assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal(23));
+ assertThat(collect.get(i++).getValue("aggregate"))
+ .isEqualTo(literal("2022-01-01T01:01:01.000000001Z", CoreDatatype.XSD.DATETIME));
+ assertThat(collect.get(i++).getValue("aggregate")).isEqualTo(literal("3"));
+ }
+ } finally {
+ closeRepository(repo);
}
}
/**
- * @see https://github.com/eclipse/rdf4j/issues/4290
+ * @see https://github.com/eclipse/rdf4j/issues/4290
*/
private void testCountOrderBy_ImplicitGroup() {
- mixedDataForNumericAggregates();
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ mixedDataForNumericAggregates(conn);
- String query = "select (count(*) as ?c) where { \n" + " ?s ?p ?o .\n" + "} \n" + "order by (?s)";
+ String query = "select (count(*) as ?c) where { \n" + " ?s ?p ?o .\n" + "} \n" + "order by (?s)";
- TupleQuery preparedQuery = conn.prepareTupleQuery(query);
+ TupleQuery preparedQuery = conn.prepareTupleQuery(query);
- List result = QueryResults.asList(preparedQuery.evaluate());
- assertThat(result).hasSize(1);
+ List result = QueryResults.asList(preparedQuery.evaluate());
+ assertThat(result).hasSize(1);
- BindingSet bs = result.get(0);
- assertThat(bs.size()).isEqualTo(1);
- assertThat(getIntValue(bs.getValue("c"), 0)).isEqualTo(19);
+ BindingSet bs = result.get(0);
+ assertThat(bs.size()).isEqualTo(1);
+ assertThat(getIntValue(bs.getValue("c"), 0)).isEqualTo(19);
+ } finally {
+ closeRepository(repo);
+ }
}
// private methods
- private void mixedDataForNumericAggregates() {
+ private void mixedDataForNumericAggregates(RepositoryConnection conn) {
IRI node1 = iri("http://example.com/1");
IRI node2 = iri("http://example.com/2");
IRI node3 = iri("http://example.com/3");
diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/ArbitraryLengthPathTest.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/ArbitraryLengthPathTest.java
index 66fbe513183..b8da5ce7734 100644
--- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/ArbitraryLengthPathTest.java
+++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/ArbitraryLengthPathTest.java
@@ -15,6 +15,7 @@
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
+import java.util.function.Supplier;
import java.util.stream.Stream;
import org.eclipse.rdf4j.model.vocabulary.OWL;
@@ -25,10 +26,10 @@
import org.eclipse.rdf4j.query.TupleQueryResult;
import org.eclipse.rdf4j.query.impl.SimpleDataset;
import org.eclipse.rdf4j.repository.Repository;
+import org.eclipse.rdf4j.repository.RepositoryConnection;
import org.eclipse.rdf4j.testsuite.sparql.AbstractComplianceTest;
import org.eclipse.rdf4j.testsuite.sparql.vocabulary.EX;
import org.junit.jupiter.api.DynamicTest;
-import org.junit.jupiter.api.Test;
/**
* Tests on SPARQL property paths involving * or + operators (arbitrary length paths).
@@ -39,7 +40,7 @@
*/
public class ArbitraryLengthPathTest extends AbstractComplianceTest {
- public ArbitraryLengthPathTest(Repository repo) {
+ public ArbitraryLengthPathTest(Supplier repo) {
super(repo);
}
@@ -64,45 +65,49 @@ public Stream tests() {
*/
private void testArbitraryLengthPathWithBinding1() throws Exception {
- loadTestData("/testdata-query/alp-testdata.ttl");
- String query = getNamespaceDeclarations() + "SELECT ?parent ?child "
- + "WHERE { ?child a owl:Class . ?child rdfs:subClassOf+ ?parent . }";
-
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-
- try (TupleQueryResult result = tq.evaluate()) {
- // first execute without binding
- assertNotNull(result);
-
- int count = 0;
- while (result.hasNext()) {
- count++;
- BindingSet bs = result.next();
- assertTrue(bs.hasBinding("child"));
- assertTrue(bs.hasBinding("parent"));
- }
- assertEquals(7, count);
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/alp-testdata.ttl", conn);
+ String query = getNamespaceDeclarations() + "SELECT ?parent ?child "
+ + "WHERE { ?child a owl:Class . ?child rdfs:subClassOf+ ?parent . }";
- // execute again, but this time setting a binding
- tq.setBinding("parent", OWL.THING);
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
- try (TupleQueryResult result2 = tq.evaluate()) {
- assertNotNull(result2);
+ try (TupleQueryResult result = tq.evaluate()) {
+ // first execute without binding
+ assertNotNull(result);
- count = 0;
- while (result2.hasNext()) {
+ int count = 0;
+ while (result.hasNext()) {
count++;
- BindingSet bs = result2.next();
+ BindingSet bs = result.next();
assertTrue(bs.hasBinding("child"));
assertTrue(bs.hasBinding("parent"));
}
- assertEquals(4, count);
+ assertEquals(7, count);
+
+ // execute again, but this time setting a binding
+ tq.setBinding("parent", OWL.THING);
+
+ try (TupleQueryResult result2 = tq.evaluate()) {
+ assertNotNull(result2);
+
+ count = 0;
+ while (result2.hasNext()) {
+ count++;
+ BindingSet bs = result2.next();
+ assertTrue(bs.hasBinding("child"));
+ assertTrue(bs.hasBinding("parent"));
+ }
+ assertEquals(4, count);
+ }
+ } catch (QueryEvaluationException e) {
+ e.printStackTrace();
+ fail(e.getMessage());
}
- } catch (QueryEvaluationException e) {
- e.printStackTrace();
- fail(e.getMessage());
+ } finally {
+ closeRepository(repo);
}
-
}
/**
@@ -111,47 +116,51 @@ private void testArbitraryLengthPathWithBinding1() throws Exception {
*/
private void testArbitraryLengthPathWithBinding2() throws Exception {
- loadTestData("/testdata-query/alp-testdata.ttl");
-
- // query without initializing ?child first.
- String query = getNamespaceDeclarations() + "SELECT ?parent ?child "
- + "WHERE { ?child rdfs:subClassOf+ ?parent . }";
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/alp-testdata.ttl", conn);
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+ // query without initializing ?child first.
+ String query = getNamespaceDeclarations() + "SELECT ?parent ?child "
+ + "WHERE { ?child rdfs:subClassOf+ ?parent . }";
- try (TupleQueryResult result = tq.evaluate()) {
- // first execute without binding
- assertNotNull(result);
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
- int count = 0;
- while (result.hasNext()) {
- count++;
- BindingSet bs = result.next();
- assertTrue(bs.hasBinding("child"));
- assertTrue(bs.hasBinding("parent"));
- }
- assertEquals(7, count);
-
- // execute again, but this time setting a binding
- tq.setBinding("parent", OWL.THING);
-
- try (TupleQueryResult result2 = tq.evaluate()) {
- assertNotNull(result2);
+ try (TupleQueryResult result = tq.evaluate()) {
+ // first execute without binding
+ assertNotNull(result);
- count = 0;
- while (result2.hasNext()) {
+ int count = 0;
+ while (result.hasNext()) {
count++;
- BindingSet bs = result2.next();
+ BindingSet bs = result.next();
assertTrue(bs.hasBinding("child"));
assertTrue(bs.hasBinding("parent"));
}
- assertEquals(4, count);
+ assertEquals(7, count);
+
+ // execute again, but this time setting a binding
+ tq.setBinding("parent", OWL.THING);
+
+ try (TupleQueryResult result2 = tq.evaluate()) {
+ assertNotNull(result2);
+
+ count = 0;
+ while (result2.hasNext()) {
+ count++;
+ BindingSet bs = result2.next();
+ assertTrue(bs.hasBinding("child"));
+ assertTrue(bs.hasBinding("parent"));
+ }
+ assertEquals(4, count);
+ }
+ } catch (QueryEvaluationException e) {
+ e.printStackTrace();
+ fail(e.getMessage());
}
- } catch (QueryEvaluationException e) {
- e.printStackTrace();
- fail(e.getMessage());
+ } finally {
+ closeRepository(repo);
}
-
}
/**
@@ -160,47 +169,51 @@ private void testArbitraryLengthPathWithBinding2() throws Exception {
*/
private void testArbitraryLengthPathWithBinding3() throws Exception {
- loadTestData("/testdata-query/alp-testdata.ttl");
-
- // binding on child instead of parent.
- String query = getNamespaceDeclarations() + "SELECT ?parent ?child "
- + "WHERE { ?child rdfs:subClassOf+ ?parent . }";
-
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/alp-testdata.ttl", conn);
- try (TupleQueryResult result = tq.evaluate()) {
- // first execute without binding
- assertNotNull(result);
-
- int count = 0;
- while (result.hasNext()) {
- count++;
- BindingSet bs = result.next();
- assertTrue(bs.hasBinding("child"));
- assertTrue(bs.hasBinding("parent"));
- }
- assertEquals(7, count);
+ // binding on child instead of parent.
+ String query = getNamespaceDeclarations() + "SELECT ?parent ?child "
+ + "WHERE { ?child rdfs:subClassOf+ ?parent . }";
- // execute again, but this time setting a binding
- tq.setBinding("child", EX.C);
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
- try (TupleQueryResult result2 = tq.evaluate()) {
- assertNotNull(result2);
+ try (TupleQueryResult result = tq.evaluate()) {
+ // first execute without binding
+ assertNotNull(result);
- count = 0;
- while (result2.hasNext()) {
+ int count = 0;
+ while (result.hasNext()) {
count++;
- BindingSet bs = result2.next();
+ BindingSet bs = result.next();
assertTrue(bs.hasBinding("child"));
assertTrue(bs.hasBinding("parent"));
}
- assertEquals(2, count);
+ assertEquals(7, count);
+
+ // execute again, but this time setting a binding
+ tq.setBinding("child", EX.C);
+
+ try (TupleQueryResult result2 = tq.evaluate()) {
+ assertNotNull(result2);
+
+ count = 0;
+ while (result2.hasNext()) {
+ count++;
+ BindingSet bs = result2.next();
+ assertTrue(bs.hasBinding("child"));
+ assertTrue(bs.hasBinding("parent"));
+ }
+ assertEquals(2, count);
+ }
+ } catch (QueryEvaluationException e) {
+ e.printStackTrace();
+ fail(e.getMessage());
}
- } catch (QueryEvaluationException e) {
- e.printStackTrace();
- fail(e.getMessage());
+ } finally {
+ closeRepository(repo);
}
-
}
/**
@@ -209,47 +222,51 @@ private void testArbitraryLengthPathWithBinding3() throws Exception {
*/
private void testArbitraryLengthPathWithBinding4() throws Exception {
- loadTestData("/testdata-query/alp-testdata.ttl", EX.ALICE);
-
- // binding on child instead of parent.
- String query = getNamespaceDeclarations() + "SELECT ?parent ?child "
- + "WHERE { ?child rdfs:subClassOf+ ?parent . }";
-
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/alp-testdata.ttl", conn, EX.ALICE);
- try (TupleQueryResult result = tq.evaluate()) {
- // first execute without binding
- assertNotNull(result);
+ // binding on child instead of parent.
+ String query = getNamespaceDeclarations() + "SELECT ?parent ?child "
+ + "WHERE { ?child rdfs:subClassOf+ ?parent . }";
- int count = 0;
- while (result.hasNext()) {
- count++;
- BindingSet bs = result.next();
- assertTrue(bs.hasBinding("child"));
- assertTrue(bs.hasBinding("parent"));
- }
- assertEquals(7, count);
-
- // execute again, but this time setting a binding
- tq.setBinding("child", EX.C);
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
- try (TupleQueryResult result2 = tq.evaluate()) {
- assertNotNull(result2);
+ try (TupleQueryResult result = tq.evaluate()) {
+ // first execute without binding
+ assertNotNull(result);
- count = 0;
- while (result2.hasNext()) {
+ int count = 0;
+ while (result.hasNext()) {
count++;
- BindingSet bs = result2.next();
+ BindingSet bs = result.next();
assertTrue(bs.hasBinding("child"));
assertTrue(bs.hasBinding("parent"));
}
- assertEquals(2, count);
+ assertEquals(7, count);
+
+ // execute again, but this time setting a binding
+ tq.setBinding("child", EX.C);
+
+ try (TupleQueryResult result2 = tq.evaluate()) {
+ assertNotNull(result2);
+
+ count = 0;
+ while (result2.hasNext()) {
+ count++;
+ BindingSet bs = result2.next();
+ assertTrue(bs.hasBinding("child"));
+ assertTrue(bs.hasBinding("parent"));
+ }
+ assertEquals(2, count);
+ }
+ } catch (QueryEvaluationException e) {
+ e.printStackTrace();
+ fail(e.getMessage());
}
- } catch (QueryEvaluationException e) {
- e.printStackTrace();
- fail(e.getMessage());
+ } finally {
+ closeRepository(repo);
}
-
}
/**
@@ -258,53 +275,57 @@ private void testArbitraryLengthPathWithBinding4() throws Exception {
*/
private void testArbitraryLengthPathWithBinding5() throws Exception {
- loadTestData("/testdata-query/alp-testdata.ttl", EX.ALICE, EX.BOB);
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/alp-testdata.ttl", conn, EX.ALICE, EX.BOB);
- // binding on child instead of parent.
- String query = getNamespaceDeclarations() + "SELECT ?parent ?child "
- + "WHERE { ?child rdfs:subClassOf+ ?parent . }";
+ // binding on child instead of parent.
+ String query = getNamespaceDeclarations() + "SELECT ?parent ?child "
+ + "WHERE { ?child rdfs:subClassOf+ ?parent . }";
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
- try (TupleQueryResult result = tq.evaluate()) {
- // first execute without binding
- assertNotNull(result);
+ try (TupleQueryResult result = tq.evaluate()) {
+ // first execute without binding
+ assertNotNull(result);
- // System.out.println("--- testArbitraryLengthPathWithBinding5
- // ---");
+ // System.out.println("--- testArbitraryLengthPathWithBinding5
+ // ---");
- int count = 0;
- while (result.hasNext()) {
- count++;
- BindingSet bs = result.next();
-
- // System.out.println(bs);
-
- assertTrue(bs.hasBinding("child"));
- assertTrue(bs.hasBinding("parent"));
- }
- assertEquals(7, count);
-
- // execute again, but this time setting a binding
- tq.setBinding("child", EX.C);
+ int count = 0;
+ while (result.hasNext()) {
+ count++;
+ BindingSet bs = result.next();
- try (TupleQueryResult result2 = tq.evaluate()) {
- assertNotNull(result2);
+ // System.out.println(bs);
- count = 0;
- while (result2.hasNext()) {
- count++;
- BindingSet bs = result2.next();
assertTrue(bs.hasBinding("child"));
assertTrue(bs.hasBinding("parent"));
}
- assertEquals(2, count);
+ assertEquals(7, count);
+
+ // execute again, but this time setting a binding
+ tq.setBinding("child", EX.C);
+
+ try (TupleQueryResult result2 = tq.evaluate()) {
+ assertNotNull(result2);
+
+ count = 0;
+ while (result2.hasNext()) {
+ count++;
+ BindingSet bs = result2.next();
+ assertTrue(bs.hasBinding("child"));
+ assertTrue(bs.hasBinding("parent"));
+ }
+ assertEquals(2, count);
+ }
+ } catch (QueryEvaluationException e) {
+ e.printStackTrace();
+ fail(e.getMessage());
}
- } catch (QueryEvaluationException e) {
- e.printStackTrace();
- fail(e.getMessage());
+ } finally {
+ closeRepository(repo);
}
-
}
/**
@@ -313,53 +334,57 @@ private void testArbitraryLengthPathWithBinding5() throws Exception {
*/
private void testArbitraryLengthPathWithBinding6() throws Exception {
- loadTestData("/testdata-query/alp-testdata.ttl", EX.ALICE, EX.BOB, EX.MARY);
-
- // binding on child instead of parent.
- String query = getNamespaceDeclarations() + "SELECT ?parent ?child "
- + "WHERE { ?child rdfs:subClassOf+ ?parent . }";
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/alp-testdata.ttl", conn, EX.ALICE, EX.BOB, EX.MARY);
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+ // binding on child instead of parent.
+ String query = getNamespaceDeclarations() + "SELECT ?parent ?child "
+ + "WHERE { ?child rdfs:subClassOf+ ?parent . }";
- try (TupleQueryResult result = tq.evaluate()) {
- // first execute without binding
- assertNotNull(result);
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
- // System.out.println("--- testArbitraryLengthPathWithBinding6
- // ---");
+ try (TupleQueryResult result = tq.evaluate()) {
+ // first execute without binding
+ assertNotNull(result);
- int count = 0;
- while (result.hasNext()) {
- count++;
- BindingSet bs = result.next();
+ // System.out.println("--- testArbitraryLengthPathWithBinding6
+ // ---");
- // System.out.println(bs);
-
- assertTrue(bs.hasBinding("child"));
- assertTrue(bs.hasBinding("parent"));
- }
- assertEquals(7, count);
-
- // execute again, but this time setting a binding
- tq.setBinding("child", EX.C);
+ int count = 0;
+ while (result.hasNext()) {
+ count++;
+ BindingSet bs = result.next();
- try (TupleQueryResult result2 = tq.evaluate()) {
- assertNotNull(result2);
+ // System.out.println(bs);
- count = 0;
- while (result2.hasNext()) {
- count++;
- BindingSet bs = result2.next();
assertTrue(bs.hasBinding("child"));
assertTrue(bs.hasBinding("parent"));
}
- assertEquals(2, count);
+ assertEquals(7, count);
+
+ // execute again, but this time setting a binding
+ tq.setBinding("child", EX.C);
+
+ try (TupleQueryResult result2 = tq.evaluate()) {
+ assertNotNull(result2);
+
+ count = 0;
+ while (result2.hasNext()) {
+ count++;
+ BindingSet bs = result2.next();
+ assertTrue(bs.hasBinding("child"));
+ assertTrue(bs.hasBinding("parent"));
+ }
+ assertEquals(2, count);
+ }
+ } catch (QueryEvaluationException e) {
+ e.printStackTrace();
+ fail(e.getMessage());
}
- } catch (QueryEvaluationException e) {
- e.printStackTrace();
- fail(e.getMessage());
+ } finally {
+ closeRepository(repo);
}
-
}
/**
@@ -368,56 +393,60 @@ private void testArbitraryLengthPathWithBinding6() throws Exception {
*/
private void testArbitraryLengthPathWithBinding7() throws Exception {
- loadTestData("/testdata-query/alp-testdata.ttl", EX.ALICE, EX.BOB, EX.MARY);
-
- // binding on child instead of parent.
- String query = getNamespaceDeclarations() + "SELECT ?parent ?child "
- + "WHERE { ?child rdfs:subClassOf+ ?parent . }";
-
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
- SimpleDataset dt = new SimpleDataset();
- dt.addDefaultGraph(EX.ALICE);
- tq.setDataset(dt);
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/alp-testdata.ttl", conn, EX.ALICE, EX.BOB, EX.MARY);
- try (TupleQueryResult result = tq.evaluate()) {
- // first execute without binding
- assertNotNull(result);
+ // binding on child instead of parent.
+ String query = getNamespaceDeclarations() + "SELECT ?parent ?child "
+ + "WHERE { ?child rdfs:subClassOf+ ?parent . }";
- // System.out.println("--- testArbitraryLengthPathWithBinding7
- // ---");
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+ SimpleDataset dt = new SimpleDataset();
+ dt.addDefaultGraph(EX.ALICE);
+ tq.setDataset(dt);
- int count = 0;
- while (result.hasNext()) {
- count++;
- BindingSet bs = result.next();
+ try (TupleQueryResult result = tq.evaluate()) {
+ // first execute without binding
+ assertNotNull(result);
- // System.out.println(bs);
+ // System.out.println("--- testArbitraryLengthPathWithBinding7
+ // ---");
- assertTrue(bs.hasBinding("child"));
- assertTrue(bs.hasBinding("parent"));
- }
- assertEquals(7, count);
-
- // execute again, but this time setting a binding
- tq.setBinding("child", EX.C);
+ int count = 0;
+ while (result.hasNext()) {
+ count++;
+ BindingSet bs = result.next();
- try (TupleQueryResult result2 = tq.evaluate()) {
- assertNotNull(result2);
+ // System.out.println(bs);
- count = 0;
- while (result2.hasNext()) {
- count++;
- BindingSet bs = result2.next();
assertTrue(bs.hasBinding("child"));
assertTrue(bs.hasBinding("parent"));
}
- assertEquals(2, count);
+ assertEquals(7, count);
+
+ // execute again, but this time setting a binding
+ tq.setBinding("child", EX.C);
+
+ try (TupleQueryResult result2 = tq.evaluate()) {
+ assertNotNull(result2);
+
+ count = 0;
+ while (result2.hasNext()) {
+ count++;
+ BindingSet bs = result2.next();
+ assertTrue(bs.hasBinding("child"));
+ assertTrue(bs.hasBinding("parent"));
+ }
+ assertEquals(2, count);
+ }
+ } catch (QueryEvaluationException e) {
+ e.printStackTrace();
+ fail(e.getMessage());
}
- } catch (QueryEvaluationException e) {
- e.printStackTrace();
- fail(e.getMessage());
+ } finally {
+ closeRepository(repo);
}
-
}
/**
@@ -426,55 +455,59 @@ private void testArbitraryLengthPathWithBinding7() throws Exception {
*/
private void testArbitraryLengthPathWithBinding8() throws Exception {
- loadTestData("/testdata-query/alp-testdata.ttl", EX.ALICE, EX.BOB, EX.MARY);
-
- // binding on child instead of parent.
- String query = getNamespaceDeclarations() + "SELECT ?parent ?child "
- + "WHERE { ?child rdfs:subClassOf+ ?parent . }";
-
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
- SimpleDataset dt = new SimpleDataset();
- dt.addDefaultGraph(EX.ALICE);
- dt.addDefaultGraph(EX.BOB);
- tq.setDataset(dt);
-
- try (TupleQueryResult result = tq.evaluate()) {
- // first execute without binding
- assertNotNull(result);
- // System.out.println("--- testArbitraryLengthPathWithBinding8
- // ---");
- int count = 0;
- while (result.hasNext()) {
- count++;
- BindingSet bs = result.next();
-
- // System.out.println(bs);
-
- assertTrue(bs.hasBinding("child"));
- assertTrue(bs.hasBinding("parent"));
- }
- assertEquals(7, count);
-
- // execute again, but this time setting a binding
- tq.setBinding("child", EX.C);
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/alp-testdata.ttl", conn, EX.ALICE, EX.BOB, EX.MARY);
+
+ // binding on child instead of parent.
+ String query = getNamespaceDeclarations() + "SELECT ?parent ?child "
+ + "WHERE { ?child rdfs:subClassOf+ ?parent . }";
+
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+ SimpleDataset dt = new SimpleDataset();
+ dt.addDefaultGraph(EX.ALICE);
+ dt.addDefaultGraph(EX.BOB);
+ tq.setDataset(dt);
+
+ try (TupleQueryResult result = tq.evaluate()) {
+ // first execute without binding
+ assertNotNull(result);
+ // System.out.println("--- testArbitraryLengthPathWithBinding8
+ // ---");
+ int count = 0;
+ while (result.hasNext()) {
+ count++;
+ BindingSet bs = result.next();
- try (TupleQueryResult result2 = tq.evaluate()) {
- assertNotNull(result2);
+ // System.out.println(bs);
- count = 0;
- while (result2.hasNext()) {
- count++;
- BindingSet bs = result2.next();
assertTrue(bs.hasBinding("child"));
assertTrue(bs.hasBinding("parent"));
}
- assertEquals(2, count);
+ assertEquals(7, count);
+
+ // execute again, but this time setting a binding
+ tq.setBinding("child", EX.C);
+
+ try (TupleQueryResult result2 = tq.evaluate()) {
+ assertNotNull(result2);
+
+ count = 0;
+ while (result2.hasNext()) {
+ count++;
+ BindingSet bs = result2.next();
+ assertTrue(bs.hasBinding("child"));
+ assertTrue(bs.hasBinding("parent"));
+ }
+ assertEquals(2, count);
+ }
+ } catch (QueryEvaluationException e) {
+ e.printStackTrace();
+ fail(e.getMessage());
}
- } catch (QueryEvaluationException e) {
- e.printStackTrace();
- fail(e.getMessage());
+ } finally {
+ closeRepository(repo);
}
-
}
/**
@@ -483,28 +516,32 @@ private void testArbitraryLengthPathWithBinding8() throws Exception {
*/
private void testArbitraryLengthPathWithFilter1() throws Exception {
- loadTestData("/testdata-query/alp-testdata.ttl");
- String query = getNamespaceDeclarations() + "SELECT ?parent ?child "
- + "WHERE { ?child a owl:Class . ?child rdfs:subClassOf+ ?parent . FILTER (?parent = owl:Thing) }";
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/alp-testdata.ttl", conn);
+ String query = getNamespaceDeclarations() + "SELECT ?parent ?child "
+ + "WHERE { ?child a owl:Class . ?child rdfs:subClassOf+ ?parent . FILTER (?parent = owl:Thing) }";
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
- try (TupleQueryResult result = tq.evaluate()) {
- assertNotNull(result);
+ try (TupleQueryResult result = tq.evaluate()) {
+ assertNotNull(result);
- int count = 0;
- while (result.hasNext()) {
- count++;
- BindingSet bs = result.next();
- assertTrue(bs.hasBinding("child"));
- assertTrue(bs.hasBinding("parent"));
+ int count = 0;
+ while (result.hasNext()) {
+ count++;
+ BindingSet bs = result.next();
+ assertTrue(bs.hasBinding("child"));
+ assertTrue(bs.hasBinding("parent"));
+ }
+ assertEquals(4, count);
+ } catch (QueryEvaluationException e) {
+ e.printStackTrace();
+ fail(e.getMessage());
}
- assertEquals(4, count);
- } catch (QueryEvaluationException e) {
- e.printStackTrace();
- fail(e.getMessage());
+ } finally {
+ closeRepository(repo);
}
-
}
/**
@@ -513,28 +550,32 @@ private void testArbitraryLengthPathWithFilter1() throws Exception {
*/
private void testArbitraryLengthPathWithFilter2() throws Exception {
- loadTestData("/testdata-query/alp-testdata.ttl");
- String query = getNamespaceDeclarations() + "SELECT ?parent ?child "
- + "WHERE { ?child rdfs:subClassOf+ ?parent . FILTER (?parent = owl:Thing) }";
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/alp-testdata.ttl", conn);
+ String query = getNamespaceDeclarations() + "SELECT ?parent ?child "
+ + "WHERE { ?child rdfs:subClassOf+ ?parent . FILTER (?parent = owl:Thing) }";
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
- try (TupleQueryResult result = tq.evaluate()) {
- assertNotNull(result);
+ try (TupleQueryResult result = tq.evaluate()) {
+ assertNotNull(result);
- int count = 0;
- while (result.hasNext()) {
- count++;
- BindingSet bs = result.next();
- assertTrue(bs.hasBinding("child"));
- assertTrue(bs.hasBinding("parent"));
+ int count = 0;
+ while (result.hasNext()) {
+ count++;
+ BindingSet bs = result.next();
+ assertTrue(bs.hasBinding("child"));
+ assertTrue(bs.hasBinding("parent"));
+ }
+ assertEquals(4, count);
+ } catch (QueryEvaluationException e) {
+ e.printStackTrace();
+ fail(e.getMessage());
}
- assertEquals(4, count);
- } catch (QueryEvaluationException e) {
- e.printStackTrace();
- fail(e.getMessage());
+ } finally {
+ closeRepository(repo);
}
-
}
/**
@@ -543,52 +584,60 @@ private void testArbitraryLengthPathWithFilter2() throws Exception {
*/
private void testArbitraryLengthPathWithFilter3() throws Exception {
- loadTestData("/testdata-query/alp-testdata.ttl");
- String query = getNamespaceDeclarations() + "SELECT ?parent ?child "
- + "WHERE { ?child rdfs:subClassOf+ ?parent . FILTER (?child = ) }";
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/alp-testdata.ttl", conn);
+ String query = getNamespaceDeclarations() + "SELECT ?parent ?child "
+ + "WHERE { ?child rdfs:subClassOf+ ?parent . FILTER (?child = ) }";
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
- try (TupleQueryResult result = tq.evaluate()) {
- assertNotNull(result);
+ try (TupleQueryResult result = tq.evaluate()) {
+ assertNotNull(result);
- int count = 0;
- while (result.hasNext()) {
- count++;
- BindingSet bs = result.next();
- assertTrue(bs.hasBinding("child"));
- assertTrue(bs.hasBinding("parent"));
+ int count = 0;
+ while (result.hasNext()) {
+ count++;
+ BindingSet bs = result.next();
+ assertTrue(bs.hasBinding("child"));
+ assertTrue(bs.hasBinding("parent"));
+ }
+ assertEquals(2, count);
+ } catch (QueryEvaluationException e) {
+ e.printStackTrace();
+ fail(e.getMessage());
}
- assertEquals(2, count);
- } catch (QueryEvaluationException e) {
- e.printStackTrace();
- fail(e.getMessage());
+ } finally {
+ closeRepository(repo);
}
-
}
private void testPropertyPathInTree() throws Exception {
- loadTestData("/testdata-query/dataset-query.trig");
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/dataset-query.trig", conn);
- String query = getNamespaceDeclarations() + " SELECT ?node ?name " + " FROM ex:tree-graph "
- + " WHERE { ?node ex:hasParent+ ex:b . ?node ex:name ?name . }";
+ String query = getNamespaceDeclarations() + " SELECT ?node ?name " + " FROM ex:tree-graph "
+ + " WHERE { ?node ex:hasParent+ ex:b . ?node ex:name ?name . }";
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
- try (TupleQueryResult result = tq.evaluate()) {
- assertNotNull(result);
+ try (TupleQueryResult result = tq.evaluate()) {
+ assertNotNull(result);
- while (result.hasNext()) {
- BindingSet bs = result.next();
- assertNotNull(bs);
+ while (result.hasNext()) {
+ BindingSet bs = result.next();
+ assertNotNull(bs);
- // System.out.println(bs);
+ // System.out.println(bs);
+ }
+ } catch (QueryEvaluationException e) {
+ e.printStackTrace();
+ fail(e.getMessage());
}
- } catch (QueryEvaluationException e) {
- e.printStackTrace();
- fail(e.getMessage());
+ } finally {
+ closeRepository(repo);
}
-
}
}
diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/BasicTest.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/BasicTest.java
index 4fc374278a0..f558459abf4 100644
--- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/BasicTest.java
+++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/BasicTest.java
@@ -14,6 +14,7 @@
import static org.junit.jupiter.api.Assertions.fail;
import java.util.List;
+import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@@ -25,6 +26,7 @@
import org.eclipse.rdf4j.query.BindingSet;
import org.eclipse.rdf4j.query.QueryLanguage;
import org.eclipse.rdf4j.repository.Repository;
+import org.eclipse.rdf4j.repository.RepositoryConnection;
import org.eclipse.rdf4j.testsuite.sparql.AbstractComplianceTest;
import org.eclipse.rdf4j.testsuite.sparql.vocabulary.EX;
import org.junit.jupiter.api.DynamicTest;
@@ -38,23 +40,27 @@
*/
public class BasicTest extends AbstractComplianceTest {
- public BasicTest(Repository repo) {
+ public BasicTest(Supplier repo) {
super(repo);
}
private void testIdenticalVariablesInStatementPattern() {
- conn.add(EX.ALICE, DC.PUBLISHER, EX.BOB);
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ conn.add(EX.ALICE, DC.PUBLISHER, EX.BOB);
- String queryBuilder = "SELECT ?publisher "
- + "{ ?publisher ?publisher }";
+ String queryBuilder = "SELECT ?publisher "
+ + "{ ?publisher ?publisher }";
- conn.prepareTupleQuery(QueryLanguage.SPARQL, queryBuilder).evaluate(new AbstractTupleQueryResultHandler() {
+ conn.prepareTupleQuery(QueryLanguage.SPARQL, queryBuilder).evaluate(new AbstractTupleQueryResultHandler() {
- @Override
- public void handleSolution(BindingSet bindingSet) {
- fail("nobody is self published");
- }
- });
+ @Override
+ public void handleSolution(BindingSet bindingSet) {
+ fail("nobody is self published");
+ }
+ });
+ }
+ closeRepository(repo);
}
public Stream tests() {
@@ -64,26 +70,26 @@ public Stream tests() {
@Test
public void testIdenticalVariablesSubjectContextInStatementPattern() {
- conn.add(EX.ALICE, FOAF.KNOWS, EX.BOB, EX.ALICE);
- conn.add(EX.ALICE, RDF.TYPE, FOAF.PERSON, EX.ALICE);
- conn.add(EX.ALICE, FOAF.KNOWS, EX.A, EX.BOB);
- conn.add(EX.ALICE, FOAF.KNOWS, EX.B, EX.BOB);
- conn.add(EX.ALICE, FOAF.KNOWS, EX.C, EX.BOB);
- conn.add(EX.ALICE, FOAF.KNOWS, EX.MARY, EX.BOB);
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ conn.add(EX.ALICE, FOAF.KNOWS, EX.BOB, EX.ALICE);
+ conn.add(EX.ALICE, RDF.TYPE, FOAF.PERSON, EX.ALICE);
+ conn.add(EX.ALICE, FOAF.KNOWS, EX.A, EX.BOB);
+ conn.add(EX.ALICE, FOAF.KNOWS, EX.B, EX.BOB);
+ conn.add(EX.ALICE, FOAF.KNOWS, EX.C, EX.BOB);
+ conn.add(EX.ALICE, FOAF.KNOWS, EX.MARY, EX.BOB);
- String queryBuilder = "SELECT ?knows { " +
- " graph ?alice {" +
- " ?alice a <" + FOAF.PERSON + ">; " +
- " <" + FOAF.KNOWS + "> ?knows ." +
- " }" +
- "}";
+ String queryBuilder = "SELECT ?knows { " + " graph ?alice {" + " ?alice a <" + FOAF.PERSON + ">; "
+ + " <" + FOAF.KNOWS + "> ?knows ." + " }" + "}";
- try (Stream stream = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryBuilder)
- .evaluate()
- .stream()) {
- List knows = stream.map(b -> b.getValue("knows")).collect(Collectors.toList());
- assertEquals(List.of(EX.BOB), knows);
+ try (Stream stream = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryBuilder)
+ .evaluate()
+ .stream()) {
+ List knows = stream.map(b -> b.getValue("knows")).collect(Collectors.toList());
+ assertEquals(List.of(EX.BOB), knows);
+ }
}
+ closeRepository(repo);
}
}
diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/BindTest.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/BindTest.java
index fcbfe95fb32..2d37d2067fd 100644
--- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/BindTest.java
+++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/BindTest.java
@@ -17,6 +17,7 @@
import static org.junit.jupiter.api.Assertions.assertNull;
import java.util.List;
+import java.util.function.Supplier;
import java.util.stream.Stream;
import org.eclipse.rdf4j.model.IRI;
@@ -33,6 +34,7 @@
import org.eclipse.rdf4j.query.TupleQuery;
import org.eclipse.rdf4j.query.TupleQueryResult;
import org.eclipse.rdf4j.repository.Repository;
+import org.eclipse.rdf4j.repository.RepositoryConnection;
import org.eclipse.rdf4j.testsuite.sparql.AbstractComplianceTest;
import org.junit.jupiter.api.DynamicTest;
@@ -44,7 +46,7 @@
*/
public class BindTest extends AbstractComplianceTest {
- public BindTest(Repository repo) {
+ public BindTest(Supplier repo) {
super(repo);
}
@@ -53,16 +55,21 @@ public BindTest(Repository repo) {
*/
private void testBindError() {
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ conn.prepareUpdate(QueryLanguage.SPARQL, "insert data { _:blank }")
+ .execute();
- conn.prepareUpdate(QueryLanguage.SPARQL, "insert data { _:blank }").execute();
+ String qb = "SELECT * \n" + "WHERE { \n" + " VALUES (?NAValue) { () } \n "
+ + " BIND(IF(?NAValue != , ?NAValue, ?notBoundVar) as ?ValidNAValue) \n "
+ + " { ?disjClass (owl:disjointWith|^owl:disjointWith)? ?disjClass2 . }\n" + "}\n";
- String qb = "SELECT * \n" + "WHERE { \n" + " VALUES (?NAValue) { () } \n "
- + " BIND(IF(?NAValue != , ?NAValue, ?notBoundVar) as ?ValidNAValue) \n "
- + " { ?disjClass (owl:disjointWith|^owl:disjointWith)? ?disjClass2 . }\n" + "}\n";
+ List result = QueryResults.asList(conn.prepareTupleQuery(qb).evaluate());
- List result = QueryResults.asList(conn.prepareTupleQuery(qb).evaluate());
-
- assertEquals(2, result.size(), "query should return 2 solutions");
+ assertEquals(2, result.size(), "query should return 2 solutions");
+ } finally {
+ closeRepository(repo);
+ }
}
/**
@@ -70,15 +77,20 @@ private void testBindError() {
*/
private void testBindScope() {
- String query = "SELECT * {\n" + " { BIND (\"a\" AS ?a) }\n" + " { BIND (?a AS ?b) } \n" + "}";
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ String query = "SELECT * {\n" + " { BIND (\"a\" AS ?a) }\n" + " { BIND (?a AS ?b) } \n" + "}";
- TupleQuery q = conn.prepareTupleQuery(query);
- List result = QueryResults.asList(q.evaluate());
+ TupleQuery q = conn.prepareTupleQuery(query);
+ List result = QueryResults.asList(q.evaluate());
- assertEquals(1, result.size());
+ assertEquals(1, result.size());
- assertEquals(conn.getValueFactory().createLiteral("a"), result.get(0).getValue("a"));
- assertNull(result.get(0).getValue("b"));
+ assertEquals(conn.getValueFactory().createLiteral("a"), result.get(0).getValue("a"));
+ assertNull(result.get(0).getValue("b"));
+ } finally {
+ closeRepository(repo);
+ }
}
/**
@@ -86,145 +98,179 @@ private void testBindScope() {
*/
private void testBindScopeUnion() {
-
- ValueFactory f = conn.getValueFactory();
- String query = "prefix ex: \n" + "select * {\n" + " bind(ex:v1 as ?v)\n"
- + " bind(strafter(str(?v),str(ex:)) as ?b)\n" + " {\n" + " bind(?b as ?b1)\n" + " } union {\n"
- + " bind(?b as ?b2)\n" + " }\n" + "}";
-
- TupleQuery q = conn.prepareTupleQuery(query);
- List result = QueryResults.asList(q.evaluate());
-
- assertEquals(2, result.size());
-
- IRI v1 = f.createIRI("http://example.org/v1");
- Literal b = f.createLiteral("v1");
- for (BindingSet bs : result) {
- assertThat(bs.getValue("v")).isEqualTo(v1);
- assertThat(bs.getValue("b1")).isNull();
- assertThat(bs.getValue("b2")).isNull();
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ ValueFactory f = conn.getValueFactory();
+ String query = "prefix ex: \n" + "select * {\n" + " bind(ex:v1 as ?v)\n"
+ + " bind(strafter(str(?v),str(ex:)) as ?b)\n" + " {\n" + " bind(?b as ?b1)\n" + " } union {\n"
+ + " bind(?b as ?b2)\n" + " }\n" + "}";
+
+ TupleQuery q = conn.prepareTupleQuery(query);
+ List result = QueryResults.asList(q.evaluate());
+
+ assertEquals(2, result.size());
+
+ IRI v1 = f.createIRI("http://example.org/v1");
+ Literal b = f.createLiteral("v1");
+ for (BindingSet bs : result) {
+ assertThat(bs.getValue("v")).isEqualTo(v1);
+ assertThat(bs.getValue("b1")).isNull();
+ assertThat(bs.getValue("b2")).isNull();
+ }
+ } finally {
+ closeRepository(repo);
}
}
private void testSES2250BindErrors() {
-
- conn.prepareUpdate(QueryLanguage.SPARQL, "insert data { _:blank }").execute();
-
- String qb = "SELECT * {\n" + " ?s1 ?p1 ?blank . " + " FILTER(isBlank(?blank))"
- + " BIND (iri(?blank) as ?biri)" + " ?biri ?p2 ?o2 ." + "}";
-
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, qb);
- try (TupleQueryResult evaluate = tq.evaluate()) {
- assertFalse(evaluate.hasNext(), "The query should not return a result");
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ conn.prepareUpdate(QueryLanguage.SPARQL, "insert data { _:blank }")
+ .execute();
+
+ String qb = "SELECT * {\n" + " ?s1 ?p1 ?blank . " + " FILTER(isBlank(?blank))"
+ + " BIND (iri(?blank) as ?biri)" + " ?biri ?p2 ?o2 ." + "}";
+
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, qb);
+ try (TupleQueryResult evaluate = tq.evaluate()) {
+ assertFalse(evaluate.hasNext(), "The query should not return a result");
+ }
+ } finally {
+ closeRepository(repo);
}
}
private void testSES2250BindErrorsInPath() {
-
- conn.prepareUpdate(QueryLanguage.SPARQL, "insert data { _:blank }").execute();
-
- String qb = "SELECT * {\n" + " ?s1 ?p1 ?blank . " + " FILTER(isBlank(?blank))"
- + " BIND (iri(?blank) as ?biri)" + " ?biri * ?o2 ." + "}";
-
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, qb);
- try (TupleQueryResult evaluate = tq.evaluate()) {
- assertFalse(evaluate.hasNext(), "The query should not return a result");
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ conn.prepareUpdate(QueryLanguage.SPARQL, "insert data { _:blank }")
+ .execute();
+
+ String qb = "SELECT * {\n" + " ?s1 ?p1 ?blank . " + " FILTER(isBlank(?blank))"
+ + " BIND (iri(?blank) as ?biri)" + " ?biri * ?o2 ." + "}";
+
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, qb);
+ try (TupleQueryResult evaluate = tq.evaluate()) {
+ assertFalse(evaluate.hasNext(), "The query should not return a result");
+ }
+ } finally {
+ closeRepository(repo);
}
}
private void testSelectBindOnly() {
- String query = "select ?b1 ?b2 ?b3\n" + "where {\n" + " bind(1 as ?b1)\n" + "}";
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ String query = "select ?b1 ?b2 ?b3\n" + "where {\n" + " bind(1 as ?b1)\n" + "}";
- List result = QueryResults.asList(conn.prepareTupleQuery(query).evaluate());
+ List result = QueryResults.asList(conn.prepareTupleQuery(query).evaluate());
- assertThat(result.size()).isEqualTo(1);
- BindingSet solution = result.get(0);
+ assertThat(result.size()).isEqualTo(1);
+ BindingSet solution = result.get(0);
- assertThat(solution.getValue("b1")).isEqualTo(literal("1", CoreDatatype.XSD.INTEGER));
- assertThat(solution.getValue("b2")).isNull();
- assertThat(solution.getValue("b3")).isNull();
+ assertThat(solution.getValue("b1")).isEqualTo(literal("1", CoreDatatype.XSD.INTEGER));
+ assertThat(solution.getValue("b2")).isNull();
+ assertThat(solution.getValue("b3")).isNull();
+ } finally {
+ closeRepository(repo);
+ }
}
private void testGH3696Bind() {
- Model testData = new ModelBuilder().setNamespace("ex", "http://example.org/")
- .subject("ex:unit1")
- .add(RDF.TYPE, "ex:Unit")
- .add(RDFS.LABEL, "Unit1")
- .add("ex:has", "Unit1")
- .subject("ex:unit2")
- .add(RDF.TYPE, "ex:Unit")
- .add(RDFS.LABEL, "Unit2")
- .build();
- conn.add(testData);
-
- String query = "PREFIX ex: \n" + "SELECT * {\n" + " ?bind rdfs:label ?b1 ;\n"
- + " a ex:Unit .\n" + " FILTER (?b1 = 'Unit2') .\n" + " BIND(?bind AS ?n0)\n"
- + " ?n0 ex:has ?n1 \n" + " }";
-
- List result = QueryResults.asList(conn.prepareTupleQuery(query).evaluate());
-
- assertThat(result).isEmpty();
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ Model testData = new ModelBuilder().setNamespace("ex", "http://example.org/")
+ .subject("ex:unit1")
+ .add(RDF.TYPE, "ex:Unit")
+ .add(RDFS.LABEL, "Unit1")
+ .add("ex:has", "Unit1")
+ .subject("ex:unit2")
+ .add(RDF.TYPE, "ex:Unit")
+ .add(RDFS.LABEL, "Unit2")
+ .build();
+ conn.add(testData);
+
+ String query = "PREFIX ex: \n" + "SELECT * {\n" + " ?bind rdfs:label ?b1 ;\n"
+ + " a ex:Unit .\n" + " FILTER (?b1 = 'Unit2') .\n" + " BIND(?bind AS ?n0)\n"
+ + " ?n0 ex:has ?n1 \n" + " }";
+
+ List result = QueryResults.asList(conn.prepareTupleQuery(query).evaluate());
+
+ assertThat(result).isEmpty();
+ } finally {
+ closeRepository(repo);
+ }
}
private void testGH4499BindFilterNotExist1() {
- Model testData = new ModelBuilder().setNamespace("ex", "http://example.org/")
- .subject("ex:a")
- .add("ex:p", "ex:c1")
- .add("ex:p", "ex:c2")
- .add("ex:p", "ex:c3")
- .subject("ex:c1")
- .add(RDF.TYPE, "ex:T")
- .add("ex:q", "something")
- .subject("ex:c2")
- .add(RDF.TYPE, "ex:T")
- .build();
- conn.add(testData);
-
- String query = "PREFIX ex: \n" + "SELECT *\n" + " WHERE {\n"
- + " BIND ( ex:a AS ?a )\n" + " BIND ( ex:b AS ?b )\n"
- + " ?a ex:p* ?c .\n" + " FILTER EXISTS { ?c rdf:type ex:T }\n"
- + " FILTER NOT EXISTS { ?c ex:q ?d}\n" + "}";
-
- List result = QueryResults.asList(conn.prepareTupleQuery(query).evaluate());
-
- assertThat(result).hasSize(1);
-
- var bs = result.get(0);
-
- assertThat(bs.getValue("a").stringValue()).isEqualTo("http://example.org/a");
- assertThat(bs.getValue("c").stringValue()).isEqualTo("http://example.org/c2");
- assertThat(bs.getValue("d")).isNull();
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ Model testData = new ModelBuilder().setNamespace("ex", "http://example.org/")
+ .subject("ex:a")
+ .add("ex:p", "ex:c1")
+ .add("ex:p", "ex:c2")
+ .add("ex:p", "ex:c3")
+ .subject("ex:c1")
+ .add(RDF.TYPE, "ex:T")
+ .add("ex:q", "something")
+ .subject("ex:c2")
+ .add(RDF.TYPE, "ex:T")
+ .build();
+ conn.add(testData);
+
+ String query = "PREFIX ex: \n" + "SELECT *\n" + " WHERE {\n"
+ + " BIND ( ex:a AS ?a )\n" + " BIND ( ex:b AS ?b )\n"
+ + " ?a ex:p* ?c .\n" + " FILTER EXISTS { ?c rdf:type ex:T }\n"
+ + " FILTER NOT EXISTS { ?c ex:q ?d}\n" + "}";
+
+ List result = QueryResults.asList(conn.prepareTupleQuery(query).evaluate());
+
+ assertThat(result).hasSize(1);
+
+ var bs = result.get(0);
+
+ assertThat(bs.getValue("a").stringValue()).isEqualTo("http://example.org/a");
+ assertThat(bs.getValue("c").stringValue()).isEqualTo("http://example.org/c2");
+ assertThat(bs.getValue("d")).isNull();
+ } finally {
+ closeRepository(repo);
+ }
}
private void testGH4499BindFilterNotExist2() {
- Model testData = new ModelBuilder().setNamespace("ex", "http://example.org/")
- .subject("ex:a")
- .add("ex:p", "ex:c1")
- .add("ex:p", "ex:c2")
- .add("ex:p", "ex:c3")
- .subject("ex:c1")
- .add(RDF.TYPE, "ex:T")
- .add("ex:q", "something")
- .subject("ex:c2")
- .add(RDF.TYPE, "ex:T")
- .build();
- conn.add(testData);
-
- String query = "PREFIX ex: \n" + "SELECT *\n" + " WHERE {\n"
- + " FILTER EXISTS { ?c rdf:type ex:T }\n" + " FILTER NOT EXISTS { ?c ex:q ?d }\n"
- + " BIND ( ex:a AS ?a )\n" + " BIND ( ex:b AS ?b )\n"
- + " ?a ex:p* ?c .\n" + "}";
-
- List result = QueryResults.asList(conn.prepareTupleQuery(query).evaluate());
-
- assertThat(result).hasSize(1);
-
- var bs = result.get(0);
-
- assertThat(bs.getValue("a").stringValue()).isEqualTo("http://example.org/a");
- assertThat(bs.getValue("c").stringValue()).isEqualTo("http://example.org/c2");
- assertThat(bs.getValue("d")).isNull();
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ Model testData = new ModelBuilder().setNamespace("ex", "http://example.org/")
+ .subject("ex:a")
+ .add("ex:p", "ex:c1")
+ .add("ex:p", "ex:c2")
+ .add("ex:p", "ex:c3")
+ .subject("ex:c1")
+ .add(RDF.TYPE, "ex:T")
+ .add("ex:q", "something")
+ .subject("ex:c2")
+ .add(RDF.TYPE, "ex:T")
+ .build();
+ conn.add(testData);
+
+ String query = "PREFIX ex: \n" + "SELECT *\n" + " WHERE {\n"
+ + " FILTER EXISTS { ?c rdf:type ex:T }\n"
+ + " FILTER NOT EXISTS { ?c ex:q ?d }\n" + " BIND ( ex:a AS ?a )\n"
+ + " BIND ( ex:b AS ?b )\n" + " ?a ex:p* ?c .\n" + "}";
+
+ List result = QueryResults.asList(conn.prepareTupleQuery(query).evaluate());
+
+ assertThat(result).hasSize(1);
+
+ var bs = result.get(0);
+
+ assertThat(bs.getValue("a").stringValue()).isEqualTo("http://example.org/a");
+ assertThat(bs.getValue("c").stringValue()).isEqualTo("http://example.org/c2");
+ assertThat(bs.getValue("d")).isNull();
+ } finally {
+ closeRepository(repo);
+ }
}
diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/BuiltinFunctionTest.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/BuiltinFunctionTest.java
index 6b71c64aaa7..01dfe7b0831 100644
--- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/BuiltinFunctionTest.java
+++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/BuiltinFunctionTest.java
@@ -19,6 +19,7 @@
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
+import java.util.function.Supplier;
import java.util.stream.Stream;
import org.eclipse.rdf4j.model.IRI;
@@ -31,6 +32,7 @@
import org.eclipse.rdf4j.query.TupleQuery;
import org.eclipse.rdf4j.query.TupleQueryResult;
import org.eclipse.rdf4j.repository.Repository;
+import org.eclipse.rdf4j.repository.RepositoryConnection;
import org.eclipse.rdf4j.testsuite.sparql.AbstractComplianceTest;
import org.junit.jupiter.api.DynamicTest;
@@ -42,7 +44,7 @@
*/
public class BuiltinFunctionTest extends AbstractComplianceTest {
- public BuiltinFunctionTest(Repository repo) {
+ public BuiltinFunctionTest(Supplier repo) {
super(repo);
}
@@ -53,13 +55,16 @@ public BuiltinFunctionTest(Repository repo) {
private void testSeconds() {
String qry = "PREFIX xsd: "
+ "SELECT (SECONDS(\"2011-01-10T14:45:13\"^^xsd:dateTime) AS ?sec) { }";
-
- try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, qry).evaluate()) {
- assertNotNull(result);
- assertTrue(result.hasNext());
- assertEquals("13", result.next().getValue("sec").stringValue());
- assertFalse(result.hasNext());
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, qry).evaluate()) {
+ assertNotNull(result);
+ assertTrue(result.hasNext());
+ assertEquals("13", result.next().getValue("sec").stringValue());
+ assertFalse(result.hasNext());
+ }
}
+ closeRepository(repo);
}
/**
@@ -70,278 +75,341 @@ private void testSecondsMilliseconds() {
String qry = "PREFIX xsd: "
+ "SELECT (SECONDS(\"2011-01-10T14:45:13.815-05:00\"^^xsd:dateTime) AS ?sec) { }";
- try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, qry).evaluate()) {
- assertNotNull(result);
- assertTrue(result.hasNext());
- assertEquals("13.815", result.next().getValue("sec").stringValue());
- assertFalse(result.hasNext());
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, qry).evaluate()) {
+ assertNotNull(result);
+ assertTrue(result.hasNext());
+ assertEquals("13.815", result.next().getValue("sec").stringValue());
+ assertFalse(result.hasNext());
+ }
}
+ closeRepository(repo);
}
private void testSES1991NOWEvaluation() throws Exception {
- loadTestData("/testdata-query/defaultgraph.ttl");
- String query = "SELECT ?d WHERE {?s ?p ?o . BIND(NOW() as ?d) } LIMIT 2";
-
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-
- try (TupleQueryResult result = tq.evaluate()) {
- assertNotNull(result);
- assertTrue(result.hasNext());
- Literal d1 = (Literal) result.next().getValue("d");
- assertTrue(result.hasNext());
- Literal d2 = (Literal) result.next().getValue("d");
- assertFalse(result.hasNext());
- assertNotNull(d1);
- assertEquals(d1, d2);
- } catch (QueryEvaluationException e) {
- e.printStackTrace();
- fail(e.getMessage());
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/defaultgraph.ttl", conn);
+ String query = "SELECT ?d WHERE {?s ?p ?o . BIND(NOW() as ?d) } LIMIT 2";
+
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+
+ try (TupleQueryResult result = tq.evaluate()) {
+ assertNotNull(result);
+ assertTrue(result.hasNext());
+ Literal d1 = (Literal) result.next().getValue("d");
+ assertTrue(result.hasNext());
+ Literal d2 = (Literal) result.next().getValue("d");
+ assertFalse(result.hasNext());
+ assertNotNull(d1);
+ assertEquals(d1, d2);
+ } catch (QueryEvaluationException e) {
+ e.printStackTrace();
+ fail(e.getMessage());
+ }
}
+ closeRepository(repo);
}
private void testSES869ValueOfNow() {
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL,
- "SELECT ?p ( NOW() as ?n ) { BIND (NOW() as ?p ) }");
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL,
+ "SELECT ?p ( NOW() as ?n ) { BIND (NOW() as ?p ) }");
- try (TupleQueryResult result = tq.evaluate()) {
- assertNotNull(result);
- assertTrue(result.hasNext());
+ try (TupleQueryResult result = tq.evaluate()) {
+ assertNotNull(result);
+ assertTrue(result.hasNext());
- BindingSet bs = result.next();
- Value p = bs.getValue("p");
- Value n = bs.getValue("n");
+ BindingSet bs = result.next();
+ Value p = bs.getValue("p");
+ Value n = bs.getValue("n");
- assertNotNull(p);
- assertNotNull(n);
- assertEquals(p, n);
- assertTrue(p == n);
+ assertNotNull(p);
+ assertNotNull(n);
+ assertEquals(p, n);
+ assertTrue(p == n);
+ }
}
+ closeRepository(repo);
}
private void testSES1991UUIDEvaluation() throws Exception {
- loadTestData("/testdata-query/defaultgraph.ttl");
- String query = "SELECT ?uid WHERE {?s ?p ?o . BIND(UUID() as ?uid) } LIMIT 2";
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/defaultgraph.ttl", conn);
+ String query = "SELECT ?uid WHERE {?s ?p ?o . BIND(UUID() as ?uid) } LIMIT 2";
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
- try (TupleQueryResult result = tq.evaluate()) {
- assertNotNull(result);
+ try (TupleQueryResult result = tq.evaluate()) {
+ assertNotNull(result);
- IRI uuid1 = (IRI) result.next().getValue("uid");
- IRI uuid2 = (IRI) result.next().getValue("uid");
+ IRI uuid1 = (IRI) result.next().getValue("uid");
+ IRI uuid2 = (IRI) result.next().getValue("uid");
- assertNotNull(uuid1);
- assertNotNull(uuid2);
- assertNotEquals(uuid1, uuid2);
- } catch (QueryEvaluationException e) {
- e.printStackTrace();
- fail(e.getMessage());
+ assertNotNull(uuid1);
+ assertNotNull(uuid2);
+ assertNotEquals(uuid1, uuid2);
+ } catch (QueryEvaluationException e) {
+ e.printStackTrace();
+ fail(e.getMessage());
+ }
}
+ closeRepository(repo);
}
private void testSES1991STRUUIDEvaluation() throws Exception {
- loadTestData("/testdata-query/defaultgraph.ttl");
- String query = "SELECT ?uid WHERE {?s ?p ?o . BIND(STRUUID() as ?uid) } LIMIT 2";
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/defaultgraph.ttl", conn);
+ String query = "SELECT ?uid WHERE {?s ?p ?o . BIND(STRUUID() as ?uid) } LIMIT 2";
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
- try (TupleQueryResult result = tq.evaluate()) {
- assertNotNull(result);
+ try (TupleQueryResult result = tq.evaluate()) {
+ assertNotNull(result);
- Literal uid1 = (Literal) result.next().getValue("uid");
- Literal uid2 = (Literal) result.next().getValue("uid");
+ Literal uid1 = (Literal) result.next().getValue("uid");
+ Literal uid2 = (Literal) result.next().getValue("uid");
- assertNotNull(uid1);
- assertNotEquals(uid1, uid2);
- } catch (QueryEvaluationException e) {
- e.printStackTrace();
- fail(e.getMessage());
+ assertNotNull(uid1);
+ assertNotEquals(uid1, uid2);
+ } catch (QueryEvaluationException e) {
+ e.printStackTrace();
+ fail(e.getMessage());
+ }
}
+ closeRepository(repo);
}
private void testSES1991RANDEvaluation() throws Exception {
- loadTestData("/testdata-query/defaultgraph.ttl");
- String query = "SELECT ?r WHERE {?s ?p ?o . BIND(RAND() as ?r) } LIMIT 3";
-
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-
- try (TupleQueryResult result = tq.evaluate()) {
- assertNotNull(result);
-
- Literal r1 = (Literal) result.next().getValue("r");
- Literal r2 = (Literal) result.next().getValue("r");
- Literal r3 = (Literal) result.next().getValue("r");
-
- assertNotNull(r1);
-
- // there is a small chance that two successive calls to the random
- // number generator will generate the exact same value, so we check
- // for
- // three successive calls (still theoretically possible to be
- // identical, but phenomenally unlikely).
- assertFalse(r1.equals(r2) && r1.equals(r3));
- } catch (QueryEvaluationException e) {
- e.printStackTrace();
- fail(e.getMessage());
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/defaultgraph.ttl", conn);
+ String query = "SELECT ?r WHERE {?s ?p ?o . BIND(RAND() as ?r) } LIMIT 3";
+
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+
+ try (TupleQueryResult result = tq.evaluate()) {
+ assertNotNull(result);
+
+ Literal r1 = (Literal) result.next().getValue("r");
+ Literal r2 = (Literal) result.next().getValue("r");
+ Literal r3 = (Literal) result.next().getValue("r");
+
+ assertNotNull(r1);
+
+ // there is a small chance that two successive calls to the random
+ // number generator will generate the exact same value, so we check
+ // for
+ // three successive calls (still theoretically possible to be
+ // identical, but phenomenally unlikely).
+ assertFalse(r1.equals(r2) && r1.equals(r3));
+ } catch (QueryEvaluationException e) {
+ e.printStackTrace();
+ fail(e.getMessage());
+ }
}
+ closeRepository(repo);
}
private void testSES2121URIFunction() {
- String query = "SELECT (URI(\"foo bar\") as ?uri) WHERE {}";
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
- try (TupleQueryResult result = tq.evaluate()) {
- assertNotNull(result);
- assertTrue(result.hasNext());
- BindingSet bs = result.next();
- IRI uri = (IRI) bs.getValue("uri");
- assertNull(uri, "uri result for invalid URI should be unbound");
- }
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ String query = "SELECT (URI(\"foo bar\") as ?uri) WHERE {}";
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+ try (TupleQueryResult result = tq.evaluate()) {
+ assertNotNull(result);
+ assertTrue(result.hasNext());
+ BindingSet bs = result.next();
+ IRI uri = (IRI) bs.getValue("uri");
+ assertNull(uri, "uri result for invalid URI should be unbound");
+ }
- query = "BASE SELECT (URI(\"foo bar\") as ?uri) WHERE {}";
- tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
- try (TupleQueryResult result = tq.evaluate()) {
- assertNotNull(result);
- assertTrue(result.hasNext());
- BindingSet bs = result.next();
- IRI uri = (IRI) bs.getValue("uri");
- assertNotNull(uri, "uri result for valid URI reference should be bound");
+ query = "BASE SELECT (URI(\"foo bar\") as ?uri) WHERE {}";
+ tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+ try (TupleQueryResult result = tq.evaluate()) {
+ assertNotNull(result);
+ assertTrue(result.hasNext());
+ BindingSet bs = result.next();
+ IRI uri = (IRI) bs.getValue("uri");
+ assertNotNull(uri, "uri result for valid URI reference should be bound");
+ }
}
+ closeRepository(repo);
}
private void test27NormalizeIRIFunction() {
- String query = "SELECT (IRI(\"../bar\") as ?Iri) WHERE {}";
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query, "http://example.com/foo/");
- try (TupleQueryResult result = tq.evaluate()) {
- assertNotNull(result);
- assertTrue(result.hasNext());
- BindingSet bs = result.next();
- IRI actual = (IRI) bs.getValue("Iri");
- IRI expected = iri("http://example.com/bar");
- assertEquals(expected, actual, "IRI result for relative IRI should be normalized");
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ String query = "SELECT (IRI(\"../bar\") as ?Iri) WHERE {}";
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query, "http://example.com/foo/");
+ try (TupleQueryResult result = tq.evaluate()) {
+ assertNotNull(result);
+ assertTrue(result.hasNext());
+ BindingSet bs = result.next();
+ IRI actual = (IRI) bs.getValue("Iri");
+ IRI expected = iri("http://example.com/bar");
+ assertEquals(expected, actual, "IRI result for relative IRI should be normalized");
+ }
}
+ closeRepository(repo);
}
private void testSES2052If1() throws Exception {
- loadTestData("/testdata-query/dataset-query.trig");
- String query = "SELECT ?p \n" + "WHERE { \n" + " ?s ?p ?o . \n"
- + " FILTER(IF(BOUND(?p), ?p = , false)) \n"
- + "}";
-
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
- try (TupleQueryResult result = tq.evaluate()) {
- assertNotNull(result);
- while (result.hasNext()) {
- BindingSet bs = result.next();
-
- IRI p = (IRI) bs.getValue("p");
- assertNotNull(p);
- assertEquals(RDF.TYPE, p);
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/dataset-query.trig", conn);
+ String query = "SELECT ?p \n" + "WHERE { \n" + " ?s ?p ?o . \n"
+ + " FILTER(IF(BOUND(?p), ?p = , false)) \n"
+ + "}";
+
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+ try (TupleQueryResult result = tq.evaluate()) {
+ assertNotNull(result);
+ while (result.hasNext()) {
+ BindingSet bs = result.next();
+
+ IRI p = (IRI) bs.getValue("p");
+ assertNotNull(p);
+ assertEquals(RDF.TYPE, p);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ fail(e.getMessage());
}
- } catch (Exception e) {
- e.printStackTrace();
- fail(e.getMessage());
}
+ closeRepository(repo);
}
private void testSES2052If2() throws Exception {
- loadTestData("/testdata-query/dataset-query.trig");
- String query = "SELECT ?p \n" + "WHERE { \n" + " ?s ?p ?o . \n"
- + " FILTER(IF(!BOUND(?p), false , ?p = )) \n"
- + "}";
-
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
- try (TupleQueryResult result = tq.evaluate()) {
- assertNotNull(result);
- while (result.hasNext()) {
- BindingSet bs = result.next();
-
- IRI p = (IRI) bs.getValue("p");
- assertNotNull(p);
- assertEquals(RDF.TYPE, p);
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/dataset-query.trig", conn);
+ String query = "SELECT ?p \n" + "WHERE { \n" + " ?s ?p ?o . \n"
+ + " FILTER(IF(!BOUND(?p), false , ?p = )) \n"
+ + "}";
+
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+ try (TupleQueryResult result = tq.evaluate()) {
+ assertNotNull(result);
+ while (result.hasNext()) {
+ BindingSet bs = result.next();
+
+ IRI p = (IRI) bs.getValue("p");
+ assertNotNull(p);
+ assertEquals(RDF.TYPE, p);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ fail(e.getMessage());
}
- } catch (Exception e) {
- e.printStackTrace();
- fail(e.getMessage());
}
-
+ closeRepository(repo);
}
private void testRegexCaseNonAscii() {
- String query = "ask {filter (regex(\"Валовой\", \"валовой\", \"i\")) }";
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ String query = "ask {filter (regex(\"Валовой\", \"валовой\", \"i\")) }";
- assertTrue(conn.prepareBooleanQuery(query).evaluate(), "case-insensitive match on Cyrillic should succeed");
+ assertTrue(conn.prepareBooleanQuery(query).evaluate(), "case-insensitive match on Cyrillic should succeed");
- query = "ask {filter (regex(\"Валовой\", \"валовой\")) }";
+ query = "ask {filter (regex(\"Валовой\", \"валовой\")) }";
- assertFalse(conn.prepareBooleanQuery(query).evaluate(), "case-sensitive match on Cyrillic should fail");
+ assertFalse(conn.prepareBooleanQuery(query).evaluate(), "case-sensitive match on Cyrillic should fail");
+ }
+ closeRepository(repo);
}
private void testFilterRegexBoolean() throws Exception {
- loadTestData("/testdata-query/dataset-query.trig");
-
- // test case for issue SES-1050
- String query = getNamespaceDeclarations() + " SELECT *" + " WHERE { " + " ?x foaf:name ?name ; "
- + " foaf:mbox ?mbox . " + " FILTER(EXISTS { "
- + " FILTER(REGEX(?name, \"Bo\") && REGEX(?mbox, \"bob\")) " +
- // query.append(" FILTER(REGEX(?mbox, \"bob\")) ");
- " } )" + " } ";
-
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-
- try (Stream result = tq.evaluate().stream()) {
- long count = result.count();
- assertEquals(1, count);
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/dataset-query.trig", conn);
+
+ // test case for issue SES-1050
+ String query = getNamespaceDeclarations() + " SELECT *" + " WHERE { " + " ?x foaf:name ?name ; "
+ + " foaf:mbox ?mbox . " + " FILTER(EXISTS { "
+ + " FILTER(REGEX(?name, \"Bo\") && REGEX(?mbox, \"bob\")) " +
+ // query.append(" FILTER(REGEX(?mbox, \"bob\")) ");
+ " } )" + " } ";
+
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+
+ try (Stream result = tq.evaluate().stream()) {
+ long count = result.count();
+ assertEquals(1, count);
+ }
}
+ closeRepository(repo);
}
private void testDateCastFunction_date() {
- String qry = "PREFIX xsd: "
- + "SELECT (xsd:date(\"2022-09-09\") AS ?date) { }";
-
- try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, qry).evaluate()) {
- assertNotNull(result);
- assertTrue(result.hasNext());
- assertEquals("2022-09-09", result.next().getValue("date").stringValue());
- assertFalse(result.hasNext());
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ String qry = "PREFIX xsd: "
+ + "SELECT (xsd:date(\"2022-09-09\") AS ?date) { }";
+
+ try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, qry).evaluate()) {
+ assertNotNull(result);
+ assertTrue(result.hasNext());
+ assertEquals("2022-09-09", result.next().getValue("date").stringValue());
+ assertFalse(result.hasNext());
+ }
}
+ closeRepository(repo);
}
private void testDateCastFunction_date_withTimeZone_utc() {
- String qry = "PREFIX xsd: "
- + "SELECT (xsd:date(\"2022-09-09Z\") AS ?date) { }";
-
- try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, qry).evaluate()) {
- assertNotNull(result);
- assertTrue(result.hasNext());
- assertEquals("2022-09-09Z", result.next().getValue("date").stringValue());
- assertFalse(result.hasNext());
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ String qry = "PREFIX xsd: "
+ + "SELECT (xsd:date(\"2022-09-09Z\") AS ?date) { }";
+
+ try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, qry).evaluate()) {
+ assertNotNull(result);
+ assertTrue(result.hasNext());
+ assertEquals("2022-09-09Z", result.next().getValue("date").stringValue());
+ assertFalse(result.hasNext());
+ }
}
+ closeRepository(repo);
}
private void testDateCastFunction_dateTime_withTimeZone_offset() {
- String qry = "PREFIX xsd: "
- + "SELECT (xsd:date(\"2022-09-09T14:45:13+03:00\") AS ?date) { }";
-
- try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, qry).evaluate()) {
- assertNotNull(result);
- assertTrue(result.hasNext());
- assertEquals("2022-09-09+03:00", result.next().getValue("date").stringValue());
- assertFalse(result.hasNext());
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ String qry = "PREFIX xsd: "
+ + "SELECT (xsd:date(\"2022-09-09T14:45:13+03:00\") AS ?date) { }";
+
+ try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, qry).evaluate()) {
+ assertNotNull(result);
+ assertTrue(result.hasNext());
+ assertEquals("2022-09-09+03:00", result.next().getValue("date").stringValue());
+ assertFalse(result.hasNext());
+ }
}
+ closeRepository(repo);
}
private void testDateCastFunction_invalidInput() {
- String qry = "PREFIX xsd: "
- + "SELECT (xsd:date(\"2022-09-xx\") AS ?date) { }";
-
- try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, qry).evaluate()) {
- assertNotNull(result);
- assertTrue(result.hasNext());
- assertFalse(result.next().hasBinding("date"),
- "There should be no binding because the cast should have failed.");
- assertFalse(result.hasNext());
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ String qry = "PREFIX xsd: "
+ + "SELECT (xsd:date(\"2022-09-xx\") AS ?date) { }";
+
+ try (TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, qry).evaluate()) {
+ assertNotNull(result);
+ assertTrue(result.hasNext());
+ assertFalse(result.next().hasBinding("date"),
+ "There should be no binding because the cast should have failed.");
+ assertFalse(result.hasNext());
+ }
}
+ closeRepository(repo);
}
public Stream tests() {
diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/ConstructTest.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/ConstructTest.java
index 20b1282e6de..d7476c0e8ef 100644
--- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/ConstructTest.java
+++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/ConstructTest.java
@@ -20,6 +20,7 @@
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.StringReader;
+import java.util.function.Supplier;
import java.util.stream.Stream;
import org.eclipse.rdf4j.model.IRI;
@@ -32,6 +33,7 @@
import org.eclipse.rdf4j.query.QueryLanguage;
import org.eclipse.rdf4j.query.QueryResults;
import org.eclipse.rdf4j.repository.Repository;
+import org.eclipse.rdf4j.repository.RepositoryConnection;
import org.eclipse.rdf4j.rio.RDFFormat;
import org.eclipse.rdf4j.testsuite.sparql.AbstractComplianceTest;
import org.junit.jupiter.api.DynamicTest;
@@ -44,38 +46,42 @@
*/
public class ConstructTest extends AbstractComplianceTest {
- public ConstructTest(Repository repo) {
+ public ConstructTest(Supplier repo) {
super(repo);
}
private void testConstructModifiers() throws Exception {
- loadTestData("/testdata-query/dataset-construct-modifiers.ttl");
- String qry = "PREFIX foaf: \n" + "PREFIX site: \n"
- + "CONSTRUCT { \n" + " ?iri foaf:name ?name . \n" + " ?iri foaf:nick ?nick . \n" + "} \n"
- + "WHERE { \n" + " ?iri foaf:name ?name ; \n" + " site:hits ?hits ; \n" + " foaf:nick ?nick . \n"
- + "} \n" + "ORDER BY desc(?hits) \n" + "LIMIT 3";
- Statement[] correctResult = {
- statement(iri("urn:1"), iri("http://xmlns.com/foaf/0.1/name"), literal("Alice"), null),
- statement(iri("urn:1"), iri("http://xmlns.com/foaf/0.1/nick"), literal("Al"), null),
-
- statement(iri("urn:3"), iri("http://xmlns.com/foaf/0.1/name"), literal("Eve"), null),
- statement(iri("urn:3"), iri("http://xmlns.com/foaf/0.1/nick"), literal("Ev"), null),
-
- statement(iri("urn:2"), iri("http://xmlns.com/foaf/0.1/name"), literal("Bob"), null),
- statement(iri("urn:2"), iri("http://xmlns.com/foaf/0.1/nick"), literal("Bo"), null), };
- GraphQuery gq = conn.prepareGraphQuery(qry);
- try (GraphQueryResult result = gq.evaluate()) {
- assertNotNull(result);
- assertTrue(result.hasNext());
- int resultNo = 0;
- while (result.hasNext()) {
- Statement st = result.next();
- assertThat(resultNo).isLessThan(correctResult.length);
- assertEquals(correctResult[resultNo], st);
- resultNo++;
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/dataset-construct-modifiers.ttl", conn);
+ String qry = "PREFIX foaf: \n" + "PREFIX site: \n"
+ + "CONSTRUCT { \n" + " ?iri foaf:name ?name . \n" + " ?iri foaf:nick ?nick . \n" + "} \n"
+ + "WHERE { \n" + " ?iri foaf:name ?name ; \n" + " site:hits ?hits ; \n"
+ + " foaf:nick ?nick . \n" + "} \n" + "ORDER BY desc(?hits) \n" + "LIMIT 3";
+ Statement[] correctResult = {
+ statement(iri("urn:1"), iri("http://xmlns.com/foaf/0.1/name"), literal("Alice"), null),
+ statement(iri("urn:1"), iri("http://xmlns.com/foaf/0.1/nick"), literal("Al"), null),
+
+ statement(iri("urn:3"), iri("http://xmlns.com/foaf/0.1/name"), literal("Eve"), null),
+ statement(iri("urn:3"), iri("http://xmlns.com/foaf/0.1/nick"), literal("Ev"), null),
+
+ statement(iri("urn:2"), iri("http://xmlns.com/foaf/0.1/name"), literal("Bob"), null),
+ statement(iri("urn:2"), iri("http://xmlns.com/foaf/0.1/nick"), literal("Bo"), null), };
+ GraphQuery gq = conn.prepareGraphQuery(qry);
+ try (GraphQueryResult result = gq.evaluate()) {
+ assertNotNull(result);
+ assertTrue(result.hasNext());
+ int resultNo = 0;
+ while (result.hasNext()) {
+ Statement st = result.next();
+ assertThat(resultNo).isLessThan(correctResult.length);
+ assertEquals(correctResult[resultNo], st);
+ resultNo++;
+ }
+ assertEquals(correctResult.length, resultNo);
}
- assertEquals(correctResult.length, resultNo);
}
+ closeRepository(repo);
}
/**
@@ -83,34 +89,43 @@ private void testConstructModifiers() throws Exception {
*/
private void testConstruct_CyclicPathWithJoin() {
- IRI test = iri("urn:test"), a = iri("urn:a"), b = iri("urn:b"), c = iri("urn:c");
- conn.add(test, RDF.TYPE, DCAT.CATALOG);
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ IRI test = iri("urn:test"), a = iri("urn:a"), b = iri("urn:b"), c = iri("urn:c");
+ conn.add(test, RDF.TYPE, DCAT.CATALOG);
- String query = "PREFIX dcat: \n" + "\n" + "CONSTRUCT {\n" + " ?x .\n"
- + " ?x ?x .\n" + "}\n" + "WHERE {\n" + " ?x a dcat:Catalog .\n" + "}";
+ String query = "PREFIX dcat: \n" + "\n" + "CONSTRUCT {\n"
+ + " ?x .\n" + " ?x ?x .\n" + "}\n" + "WHERE {\n" + " ?x a dcat:Catalog .\n"
+ + "}";
- Model result = QueryResults.asModel(conn.prepareGraphQuery(query).evaluate());
+ Model result = QueryResults.asModel(conn.prepareGraphQuery(query).evaluate());
- assertThat(result.contains(a, b, test)).isTrue();
- assertThat(result.contains(test, c, test)).isTrue();
+ assertThat(result.contains(a, b, test)).isTrue();
+ assertThat(result.contains(test, c, test)).isTrue();
+ }
+ closeRepository(repo);
}
private void testSES2104ConstructBGPSameURI() throws Exception {
- final String queryStr = "PREFIX : CONSTRUCT {:x :p :x } WHERE {} ";
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ final String queryStr = "PREFIX : CONSTRUCT {:x :p :x } WHERE {} ";
- conn.add(new StringReader("@prefix : . :a :p :b . "), "", RDFFormat.TURTLE);
+ conn.add(new StringReader("@prefix : . :a :p :b . "), "", RDFFormat.TURTLE);
- final IRI x = conn.getValueFactory().createIRI("urn:x");
- final IRI p = conn.getValueFactory().createIRI("urn:p");
+ final IRI x = conn.getValueFactory().createIRI("urn:x");
+ final IRI p = conn.getValueFactory().createIRI("urn:p");
- GraphQuery query = conn.prepareGraphQuery(QueryLanguage.SPARQL, queryStr);
- try (GraphQueryResult evaluate = query.evaluate()) {
- Model result = QueryResults.asModel(evaluate);
+ GraphQuery query = conn.prepareGraphQuery(QueryLanguage.SPARQL, queryStr);
+ try (GraphQueryResult evaluate = query.evaluate()) {
+ Model result = QueryResults.asModel(evaluate);
- assertNotNull(result);
- assertFalse(result.isEmpty());
- assertTrue(result.contains(x, p, x));
+ assertNotNull(result);
+ assertFalse(result.isEmpty());
+ assertTrue(result.contains(x, p, x));
+ }
}
+ closeRepository(repo);
}
public Stream tests() {
diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/DefaultGraphTest.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/DefaultGraphTest.java
index ce746c64f1c..20e86934b89 100644
--- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/DefaultGraphTest.java
+++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/DefaultGraphTest.java
@@ -16,6 +16,7 @@
import static org.junit.jupiter.api.Assertions.fail;
import java.util.List;
+import java.util.function.Supplier;
import java.util.stream.Stream;
import org.eclipse.rdf4j.model.Resource;
@@ -26,10 +27,10 @@
import org.eclipse.rdf4j.query.TupleQuery;
import org.eclipse.rdf4j.query.TupleQueryResult;
import org.eclipse.rdf4j.repository.Repository;
+import org.eclipse.rdf4j.repository.RepositoryConnection;
import org.eclipse.rdf4j.testsuite.sparql.AbstractComplianceTest;
import org.eclipse.rdf4j.testsuite.sparql.vocabulary.EX;
import org.junit.jupiter.api.DynamicTest;
-import org.junit.jupiter.api.Test;
/**
* Tests on handling default graph identification (DEFAULT keyword, rf4j:nil).
@@ -39,89 +40,104 @@
*/
public class DefaultGraphTest extends AbstractComplianceTest {
- public DefaultGraphTest(Repository repo) {
+ public DefaultGraphTest(Supplier repo) {
super(repo);
}
private void testNullContext1() throws Exception {
- loadTestData("/testdata-query/dataset-query.trig");
- String query = " SELECT * " + " FROM DEFAULT " + " WHERE { ?s ?p ?o } ";
-
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-
- try (TupleQueryResult result = tq.evaluate()) {
- assertNotNull(result);
-
- while (result.hasNext()) {
- BindingSet bs = result.next();
- assertNotNull(bs);
-
- Resource s = (Resource) bs.getValue("s");
-
- assertNotNull(s);
- assertNotEquals(EX.BOB, s); // should not be present in default
- // graph
- assertNotEquals(EX.ALICE, s); // should not be present in
- // default
- // graph
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/dataset-query.trig", conn);
+ String query = " SELECT * " + " FROM DEFAULT " + " WHERE { ?s ?p ?o } ";
+
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+
+ try (TupleQueryResult result = tq.evaluate()) {
+ assertNotNull(result);
+
+ while (result.hasNext()) {
+ BindingSet bs = result.next();
+ assertNotNull(bs);
+
+ Resource s = (Resource) bs.getValue("s");
+
+ assertNotNull(s);
+ assertNotEquals(EX.BOB, s); // should not be present in default
+ // graph
+ assertNotEquals(EX.ALICE, s); // should not be present in
+ // default
+ // graph
+ }
+ } catch (QueryEvaluationException e) {
+ e.printStackTrace();
+ fail(e.getMessage());
}
- } catch (QueryEvaluationException e) {
- e.printStackTrace();
- fail(e.getMessage());
}
+ closeRepository(repo);
}
private void testNullContext2() throws Exception {
- loadTestData("/testdata-query/dataset-query.trig");
- String query = " SELECT * " + " FROM rdf4j:nil " + " WHERE { ?s ?p ?o } ";
-
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-
- try (TupleQueryResult result = tq.evaluate()) {
- assertNotNull(result);
-
- while (result.hasNext()) {
- BindingSet bs = result.next();
- assertNotNull(bs);
-
- Resource s = (Resource) bs.getValue("s");
-
- assertNotNull(s);
- assertNotEquals(EX.BOB, s); // should not be present in default
- // graph
- assertNotEquals(EX.ALICE, s); // should not be present in
- // default
- // graph
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/dataset-query.trig", conn);
+ String query = " SELECT * " + " FROM rdf4j:nil " + " WHERE { ?s ?p ?o } ";
+
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+
+ try (TupleQueryResult result = tq.evaluate()) {
+ assertNotNull(result);
+
+ while (result.hasNext()) {
+ BindingSet bs = result.next();
+ assertNotNull(bs);
+
+ Resource s = (Resource) bs.getValue("s");
+
+ assertNotNull(s);
+ assertNotEquals(EX.BOB, s); // should not be present in default
+ // graph
+ assertNotEquals(EX.ALICE, s); // should not be present in
+ // default
+ // graph
+ }
+ } catch (QueryEvaluationException e) {
+ e.printStackTrace();
+ fail(e.getMessage());
}
- } catch (QueryEvaluationException e) {
- e.printStackTrace();
- fail(e.getMessage());
+ } finally {
+ closeRepository(repo);
}
}
private void testSesameNilAsGraph() throws Exception {
- loadTestData("/testdata-query/dataset-query.trig");
- String query = " SELECT * " + " WHERE { GRAPH rdf4j:nil { ?s ?p ?o } } ";
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/dataset-query.trig", conn);
+ String query = " SELECT * " + " WHERE { GRAPH rdf4j:nil { ?s ?p ?o } } ";
// query.append(" WHERE { ?s ?p ?o } ");
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
- try {
- List result = QueryResults.asList(tq.evaluate());
+ try {
+ List result = QueryResults.asList(tq.evaluate());
- // nil graph should not be empty
- assertThat(result.size()).isGreaterThan(1);
+ // nil graph should not be empty
+ assertThat(result.size()).isGreaterThan(1);
- for (BindingSet bs : result) {
- Resource s = (Resource) bs.getValue("s");
+ for (BindingSet bs : result) {
+ Resource s = (Resource) bs.getValue("s");
- assertNotNull(s);
- assertThat(s).withFailMessage("%s should not be present in nil graph", EX.BOB).isNotEqualTo(EX.BOB);
- assertThat(s).withFailMessage("%s should not be present in nil graph", EX.ALICE).isNotEqualTo(EX.ALICE);
+ assertNotNull(s);
+ assertThat(s).withFailMessage("%s should not be present in nil graph", EX.BOB).isNotEqualTo(EX.BOB);
+ assertThat(s).withFailMessage("%s should not be present in nil graph", EX.ALICE)
+ .isNotEqualTo(EX.ALICE);
+ }
+ } catch (QueryEvaluationException e) {
+ e.printStackTrace();
+ fail(e.getMessage());
}
- } catch (QueryEvaluationException e) {
- e.printStackTrace();
- fail(e.getMessage());
+ } finally {
+ closeRepository(repo);
}
}
diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/DescribeTest.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/DescribeTest.java
index 5cee875421b..562ef051e19 100644
--- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/DescribeTest.java
+++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/DescribeTest.java
@@ -14,6 +14,7 @@
import static org.assertj.core.api.Assertions.assertThat;
import java.util.Set;
+import java.util.function.Supplier;
import java.util.stream.Stream;
import org.eclipse.rdf4j.model.BNode;
@@ -28,9 +29,9 @@
import org.eclipse.rdf4j.query.QueryLanguage;
import org.eclipse.rdf4j.query.QueryResults;
import org.eclipse.rdf4j.repository.Repository;
+import org.eclipse.rdf4j.repository.RepositoryConnection;
import org.eclipse.rdf4j.testsuite.sparql.AbstractComplianceTest;
import org.junit.jupiter.api.DynamicTest;
-import org.junit.jupiter.api.Test;
/**
* Tests on SPARQL DESCRIBE queries
@@ -39,264 +40,304 @@
*/
public class DescribeTest extends AbstractComplianceTest {
- public DescribeTest(Repository repo) {
+ public DescribeTest(Supplier repo) {
super(repo);
}
private void testDescribeA() throws Exception {
- loadTestData("/testdata-query/dataset-describe.trig");
- String query = getNamespaceDeclarations() + "DESCRIBE ex:a";
-
- GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query);
-
- ValueFactory f = conn.getValueFactory();
- IRI a = f.createIRI("http://example.org/a");
- IRI p = f.createIRI("http://example.org/p");
- try (GraphQueryResult evaluate = gq.evaluate()) {
- Model result = QueryResults.asModel(evaluate);
- Set objects = result.filter(a, p, null).objects();
- assertThat(objects).isNotNull();
- for (Value object : objects) {
- if (object instanceof BNode) {
- assertThat(result.contains((Resource) object, null, null)).isTrue();
- assertThat(result.filter((Resource) object, null, null)).hasSize(2);
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/dataset-describe.trig", conn);
+ String query = getNamespaceDeclarations() + "DESCRIBE ex:a";
+
+ GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query);
+
+ ValueFactory f = conn.getValueFactory();
+ IRI a = f.createIRI("http://example.org/a");
+ IRI p = f.createIRI("http://example.org/p");
+ try (GraphQueryResult evaluate = gq.evaluate()) {
+ Model result = QueryResults.asModel(evaluate);
+ Set objects = result.filter(a, p, null).objects();
+ assertThat(objects).isNotNull();
+ for (Value object : objects) {
+ if (object instanceof BNode) {
+ assertThat(result.contains((Resource) object, null, null)).isTrue();
+ assertThat(result.filter((Resource) object, null, null)).hasSize(2);
+ }
}
}
}
+ closeRepository(repo);
}
private void testDescribeAWhere() throws Exception {
- loadTestData("/testdata-query/dataset-describe.trig");
- String query = getNamespaceDeclarations() + "DESCRIBE ?x WHERE {?x rdfs:label \"a\". } ";
-
- GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query);
-
- ValueFactory f = conn.getValueFactory();
- IRI a = f.createIRI("http://example.org/a");
- IRI p = f.createIRI("http://example.org/p");
- try (GraphQueryResult evaluate = gq.evaluate()) {
- Model result = QueryResults.asModel(evaluate);
- Set objects = result.filter(a, p, null).objects();
- assertThat(objects).isNotNull();
- for (Value object : objects) {
- if (object instanceof BNode) {
- assertThat(result.contains((Resource) object, null, null)).isTrue();
- assertThat(result.filter((Resource) object, null, null)).hasSize(2);
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/dataset-describe.trig", conn);
+ String query = getNamespaceDeclarations() + "DESCRIBE ?x WHERE {?x rdfs:label \"a\". } ";
+
+ GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query);
+
+ ValueFactory f = conn.getValueFactory();
+ IRI a = f.createIRI("http://example.org/a");
+ IRI p = f.createIRI("http://example.org/p");
+ try (GraphQueryResult evaluate = gq.evaluate()) {
+ Model result = QueryResults.asModel(evaluate);
+ Set objects = result.filter(a, p, null).objects();
+ assertThat(objects).isNotNull();
+ for (Value object : objects) {
+ if (object instanceof BNode) {
+ assertThat(result.contains((Resource) object, null, null)).isTrue();
+ assertThat(result.filter((Resource) object, null, null)).hasSize(2);
+ }
}
}
}
+ closeRepository(repo);
}
private void testDescribeWhere() throws Exception {
- loadTestData("/testdata-query/dataset-describe.trig");
- String query = getNamespaceDeclarations() + "DESCRIBE ?x WHERE {?x rdfs:label ?y . } ";
-
- GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query);
-
- ValueFactory vf = conn.getValueFactory();
- IRI a = vf.createIRI("http://example.org/a");
- IRI b = vf.createIRI("http://example.org/b");
- IRI c = vf.createIRI("http://example.org/c");
- IRI e = vf.createIRI("http://example.org/e");
- IRI f = vf.createIRI("http://example.org/f");
- IRI p = vf.createIRI("http://example.org/p");
-
- try (GraphQueryResult evaluate = gq.evaluate()) {
- Model result = QueryResults.asModel(evaluate);
- assertThat(result.contains(a, p, null)).isTrue();
- assertThat(result.contains(b, RDFS.LABEL, null)).isTrue();
- assertThat(result.contains(c, RDFS.LABEL, null)).isTrue();
- assertThat(result.contains(null, p, b)).isTrue();
- assertThat(result.contains(e, RDFS.LABEL, null)).isTrue();
- assertThat(result.contains(null, p, e)).isTrue();
- assertThat(result.contains(f, null, null)).isFalse();
- Set objects = result.filter(a, p, null).objects();
- assertThat(objects).isNotNull();
- for (Value object : objects) {
- if (object instanceof BNode) {
- assertThat(result.contains((Resource) object, null, null)).isTrue();
- assertThat(result.filter((Resource) object, null, null)).hasSize(2);
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/dataset-describe.trig", conn);
+ String query = getNamespaceDeclarations() + "DESCRIBE ?x WHERE {?x rdfs:label ?y . } ";
+
+ GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query);
+
+ ValueFactory vf = conn.getValueFactory();
+ IRI a = vf.createIRI("http://example.org/a");
+ IRI b = vf.createIRI("http://example.org/b");
+ IRI c = vf.createIRI("http://example.org/c");
+ IRI e = vf.createIRI("http://example.org/e");
+ IRI f = vf.createIRI("http://example.org/f");
+ IRI p = vf.createIRI("http://example.org/p");
+
+ try (GraphQueryResult evaluate = gq.evaluate()) {
+ Model result = QueryResults.asModel(evaluate);
+ assertThat(result.contains(a, p, null)).isTrue();
+ assertThat(result.contains(b, RDFS.LABEL, null)).isTrue();
+ assertThat(result.contains(c, RDFS.LABEL, null)).isTrue();
+ assertThat(result.contains(null, p, b)).isTrue();
+ assertThat(result.contains(e, RDFS.LABEL, null)).isTrue();
+ assertThat(result.contains(null, p, e)).isTrue();
+ assertThat(result.contains(f, null, null)).isFalse();
+ Set objects = result.filter(a, p, null).objects();
+ assertThat(objects).isNotNull();
+ for (Value object : objects) {
+ if (object instanceof BNode) {
+ assertThat(result.contains((Resource) object, null, null)).isTrue();
+ assertThat(result.filter((Resource) object, null, null)).hasSize(2);
+ }
}
}
}
+ closeRepository(repo);
}
private void testDescribeB() throws Exception {
- loadTestData("/testdata-query/dataset-describe.trig");
- String query = getNamespaceDeclarations() + "DESCRIBE ex:b";
-
- GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query);
-
- ValueFactory f = conn.getValueFactory();
- IRI b = f.createIRI("http://example.org/b");
- IRI p = f.createIRI("http://example.org/p");
- try (GraphQueryResult evaluate = gq.evaluate()) {
- Model result = QueryResults.asModel(evaluate);
- Set subjects = result.filter(null, p, b).subjects();
- assertThat(subjects).isNotNull();
- for (Value subject : subjects) {
- if (subject instanceof BNode) {
- assertThat(result.contains(null, null, subject)).isTrue();
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/dataset-describe.trig", conn);
+ String query = getNamespaceDeclarations() + "DESCRIBE ex:b";
+
+ GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query);
+
+ ValueFactory f = conn.getValueFactory();
+ IRI b = f.createIRI("http://example.org/b");
+ IRI p = f.createIRI("http://example.org/p");
+ try (GraphQueryResult evaluate = gq.evaluate()) {
+ Model result = QueryResults.asModel(evaluate);
+ Set subjects = result.filter(null, p, b).subjects();
+ assertThat(subjects).isNotNull();
+ for (Value subject : subjects) {
+ if (subject instanceof BNode) {
+ assertThat(result.contains(null, null, subject)).isTrue();
+ }
}
}
}
+ closeRepository(repo);
}
private void testDescribeD() throws Exception {
- loadTestData("/testdata-query/dataset-describe.trig");
- String query = getNamespaceDeclarations() + "DESCRIBE ex:d";
-
- GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query);
-
- ValueFactory f = conn.getValueFactory();
- IRI d = f.createIRI("http://example.org/d");
- IRI p = f.createIRI("http://example.org/p");
- IRI e = f.createIRI("http://example.org/e");
- try (GraphQueryResult evaluate = gq.evaluate()) {
- Model result = QueryResults.asModel(evaluate);
-
- assertThat(result.contains(null, p, e)).isTrue();
- assertThat(result.contains(e, null, null)).isFalse();
-
- Set objects = result.filter(d, p, null).objects();
- assertThat(objects).isNotNull();
- for (Value object : objects) {
- if (object instanceof BNode) {
- Set childObjects = result.filter((BNode) object, null, null).objects();
- assertThat(childObjects).isNotEmpty();
- for (Value childObject : childObjects) {
- if (childObject instanceof BNode) {
- assertThat(result.contains((BNode) childObject, null, null)).isTrue();
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/dataset-describe.trig", conn);
+ String query = getNamespaceDeclarations() + "DESCRIBE ex:d";
+
+ GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query);
+
+ ValueFactory f = conn.getValueFactory();
+ IRI d = f.createIRI("http://example.org/d");
+ IRI p = f.createIRI("http://example.org/p");
+ IRI e = f.createIRI("http://example.org/e");
+ try (GraphQueryResult evaluate = gq.evaluate()) {
+ Model result = QueryResults.asModel(evaluate);
+
+ assertThat(result.contains(null, p, e)).isTrue();
+ assertThat(result.contains(e, null, null)).isFalse();
+
+ Set objects = result.filter(d, p, null).objects();
+ assertThat(objects).isNotNull();
+ for (Value object : objects) {
+ if (object instanceof BNode) {
+ Set childObjects = result.filter((BNode) object, null, null).objects();
+ assertThat(childObjects).isNotEmpty();
+ for (Value childObject : childObjects) {
+ if (childObject instanceof BNode) {
+ assertThat(result.contains((BNode) childObject, null, null)).isTrue();
+ }
}
}
}
}
}
+ closeRepository(repo);
}
private void testDescribeF() throws Exception {
- loadTestData("/testdata-query/dataset-describe.trig");
- String query = getNamespaceDeclarations() + "DESCRIBE ex:f";
-
- GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query);
-
- ValueFactory vf = conn.getValueFactory();
- IRI f = vf.createIRI("http://example.org/f");
- IRI p = vf.createIRI("http://example.org/p");
- try (GraphQueryResult evaluate = gq.evaluate()) {
- Model result = QueryResults.asModel(evaluate);
-
- assertThat(result).isNotNull().hasSize(4);
-
- Set objects = result.filter(f, p, null).objects();
- for (Value object : objects) {
- if (object instanceof BNode) {
- Set childObjects = result.filter((BNode) object, null, null).objects();
- assertThat(childObjects).isNotEmpty();
- for (Value childObject : childObjects) {
- if (childObject instanceof BNode) {
- assertThat(result.contains((BNode) childObject, null, null)).isTrue();
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/dataset-describe.trig", conn);
+ String query = getNamespaceDeclarations() + "DESCRIBE ex:f";
+
+ GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query);
+
+ ValueFactory vf = conn.getValueFactory();
+ IRI f = vf.createIRI("http://example.org/f");
+ IRI p = vf.createIRI("http://example.org/p");
+ try (GraphQueryResult evaluate = gq.evaluate()) {
+ Model result = QueryResults.asModel(evaluate);
+
+ assertThat(result).isNotNull().hasSize(4);
+
+ Set objects = result.filter(f, p, null).objects();
+ for (Value object : objects) {
+ if (object instanceof BNode) {
+ Set childObjects = result.filter((BNode) object, null, null).objects();
+ assertThat(childObjects).isNotEmpty();
+ for (Value childObject : childObjects) {
+ if (childObject instanceof BNode) {
+ assertThat(result.contains((BNode) childObject, null, null)).isTrue();
+ }
}
}
}
}
}
+ closeRepository(repo);
}
private void testDescribeMultipleA() {
- String update = "insert data { . [] . . } ";
- conn.prepareUpdate(QueryLanguage.SPARQL, update).execute();
-
- String query = getNamespaceDeclarations() + "DESCRIBE ";
-
- GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query);
-
- ValueFactory vf = conn.getValueFactory();
- IRI urn1 = vf.createIRI("urn:1");
- IRI p1 = vf.createIRI("urn:p1");
- IRI p2 = vf.createIRI("urn:p2");
- IRI urn2 = vf.createIRI("urn:2");
- IRI blank = vf.createIRI("urn:blank");
-
- try (GraphQueryResult evaluate = gq.evaluate()) {
- Model result = QueryResults.asModel(evaluate);
- assertThat(result.contains(urn1, p1, null)).isTrue();
- assertThat(result.contains(null, blank, urn1)).isTrue();
- assertThat(result.contains(urn2, p2, null)).isTrue();
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ String update = "insert data { . [] . . } ";
+ conn.prepareUpdate(QueryLanguage.SPARQL, update).execute();
+
+ String query = getNamespaceDeclarations() + "DESCRIBE ";
+
+ GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query);
+
+ ValueFactory vf = conn.getValueFactory();
+ IRI urn1 = vf.createIRI("urn:1");
+ IRI p1 = vf.createIRI("urn:p1");
+ IRI p2 = vf.createIRI("urn:p2");
+ IRI urn2 = vf.createIRI("urn:2");
+ IRI blank = vf.createIRI("urn:blank");
+
+ try (GraphQueryResult evaluate = gq.evaluate()) {
+ Model result = QueryResults.asModel(evaluate);
+ assertThat(result.contains(urn1, p1, null)).isTrue();
+ assertThat(result.contains(null, blank, urn1)).isTrue();
+ assertThat(result.contains(urn2, p2, null)).isTrue();
+ }
}
+ closeRepository(repo);
}
private void testDescribeMultipleB() {
- String update = "insert data { . [] . . } ";
- conn.prepareUpdate(QueryLanguage.SPARQL, update).execute();
-
- String query = getNamespaceDeclarations() + "DESCRIBE ";
-
- GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query);
-
- ValueFactory vf = conn.getValueFactory();
- IRI urn1 = vf.createIRI("urn:1");
- IRI p1 = vf.createIRI("urn:p1");
- IRI p2 = vf.createIRI("urn:p2");
- IRI urn2 = vf.createIRI("urn:2");
- IRI blank = vf.createIRI("urn:blank");
- try (GraphQueryResult evaluate = gq.evaluate()) {
- Model result = QueryResults.asModel(evaluate);
-
- assertThat(result.contains(urn1, p1, null)).isTrue();
- assertThat(result.contains(urn1, blank, null)).isTrue();
- assertThat(result.contains(urn2, p2, null)).isTrue();
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ String update = "insert data { . [] . . } ";
+ conn.prepareUpdate(QueryLanguage.SPARQL, update).execute();
+
+ String query = getNamespaceDeclarations() + "DESCRIBE ";
+
+ GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query);
+
+ ValueFactory vf = conn.getValueFactory();
+ IRI urn1 = vf.createIRI("urn:1");
+ IRI p1 = vf.createIRI("urn:p1");
+ IRI p2 = vf.createIRI("urn:p2");
+ IRI urn2 = vf.createIRI("urn:2");
+ IRI blank = vf.createIRI("urn:blank");
+ try (GraphQueryResult evaluate = gq.evaluate()) {
+ Model result = QueryResults.asModel(evaluate);
+
+ assertThat(result.contains(urn1, p1, null)).isTrue();
+ assertThat(result.contains(urn1, blank, null)).isTrue();
+ assertThat(result.contains(urn2, p2, null)).isTrue();
+ }
}
+ closeRepository(repo);
}
private void testDescribeMultipleC() {
- String update = "insert data { . [] . [] . . } ";
- conn.prepareUpdate(QueryLanguage.SPARQL, update).execute();
-
- String query = getNamespaceDeclarations() + "DESCRIBE ";
-
- GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query);
-
- ValueFactory vf = conn.getValueFactory();
- IRI urn1 = vf.createIRI("urn:1");
- IRI p1 = vf.createIRI("urn:p1");
- IRI p2 = vf.createIRI("urn:p2");
- IRI urn2 = vf.createIRI("urn:2");
- IRI blank = vf.createIRI("urn:blank");
- try (GraphQueryResult evaluate = gq.evaluate()) {
- Model result = QueryResults.asModel(evaluate);
-
- assertThat(result.contains(urn1, p1, null)).isTrue();
- assertThat(result.contains(urn1, blank, null)).isTrue();
- assertThat(result.contains(null, blank, urn1)).isTrue();
- assertThat(result.contains(urn2, p2, null)).isTrue();
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ String update = "insert data { . [] . [] . . } ";
+ conn.prepareUpdate(QueryLanguage.SPARQL, update).execute();
+
+ String query = getNamespaceDeclarations() + "DESCRIBE ";
+
+ GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query);
+
+ ValueFactory vf = conn.getValueFactory();
+ IRI urn1 = vf.createIRI("urn:1");
+ IRI p1 = vf.createIRI("urn:p1");
+ IRI p2 = vf.createIRI("urn:p2");
+ IRI urn2 = vf.createIRI("urn:2");
+ IRI blank = vf.createIRI("urn:blank");
+ try (GraphQueryResult evaluate = gq.evaluate()) {
+ Model result = QueryResults.asModel(evaluate);
+
+ assertThat(result.contains(urn1, p1, null)).isTrue();
+ assertThat(result.contains(urn1, blank, null)).isTrue();
+ assertThat(result.contains(null, blank, urn1)).isTrue();
+ assertThat(result.contains(urn2, p2, null)).isTrue();
+ }
}
+ closeRepository(repo);
}
private void testDescribeMultipleD() {
- String update = "insert data { . [] . . [] . . [] .} ";
- conn.prepareUpdate(QueryLanguage.SPARQL, update).execute();
-
- String query = getNamespaceDeclarations() + "DESCRIBE ";
-
- GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query);
-
- ValueFactory vf = conn.getValueFactory();
- IRI urn1 = vf.createIRI("urn:1");
- IRI p1 = vf.createIRI("urn:p1");
- IRI p2 = vf.createIRI("urn:p2");
- IRI urn2 = vf.createIRI("urn:2");
- IRI urn4 = vf.createIRI("urn:4");
- IRI blank = vf.createIRI("urn:blank");
- try (GraphQueryResult evaluate = gq.evaluate()) {
- Model result = QueryResults.asModel(evaluate);
-
- assertThat(result.contains(urn1, p1, null)).isTrue();
- assertThat(result.contains(null, blank, urn1)).isTrue();
- assertThat(result.contains(urn2, p2, null)).isTrue();
- assertThat(result.contains(urn4, p2, null)).isTrue();
- assertThat(result.contains(urn4, blank, null)).isTrue();
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ String update = "insert data { . [] . . [] . . [] .} ";
+ conn.prepareUpdate(QueryLanguage.SPARQL, update).execute();
+
+ String query = getNamespaceDeclarations() + "DESCRIBE ";
+
+ GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query);
+
+ ValueFactory vf = conn.getValueFactory();
+ IRI urn1 = vf.createIRI("urn:1");
+ IRI p1 = vf.createIRI("urn:p1");
+ IRI p2 = vf.createIRI("urn:p2");
+ IRI urn2 = vf.createIRI("urn:2");
+ IRI urn4 = vf.createIRI("urn:4");
+ IRI blank = vf.createIRI("urn:blank");
+ try (GraphQueryResult evaluate = gq.evaluate()) {
+ Model result = QueryResults.asModel(evaluate);
+
+ assertThat(result.contains(urn1, p1, null)).isTrue();
+ assertThat(result.contains(null, blank, urn1)).isTrue();
+ assertThat(result.contains(urn2, p2, null)).isTrue();
+ assertThat(result.contains(urn4, p2, null)).isTrue();
+ assertThat(result.contains(urn4, blank, null)).isTrue();
+ }
}
+ closeRepository(repo);
}
public Stream tests() {
diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/ExistsTest.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/ExistsTest.java
index 8cc086b2cc9..dc0808f7b06 100644
--- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/ExistsTest.java
+++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/ExistsTest.java
@@ -13,6 +13,7 @@
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.List;
+import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@@ -21,9 +22,9 @@
import org.eclipse.rdf4j.query.BindingSet;
import org.eclipse.rdf4j.query.TupleQuery;
import org.eclipse.rdf4j.repository.Repository;
+import org.eclipse.rdf4j.repository.RepositoryConnection;
import org.eclipse.rdf4j.testsuite.sparql.AbstractComplianceTest;
import org.junit.jupiter.api.DynamicTest;
-import org.junit.jupiter.api.Test;
/**
* Test for queries using EXISTS
@@ -32,37 +33,40 @@
*/
public class ExistsTest extends AbstractComplianceTest {
- public ExistsTest(Repository repo) {
+ public ExistsTest(Supplier repo) {
super(repo);
}
private void testFilterNotExistsBindingToCurrentSolutionMapping() {
-
- String ex = "http://example/";
- IRI a1 = Values.iri(ex, "a1");
- IRI a2 = Values.iri(ex, "a2");
-
- IRI both = Values.iri(ex, "both");
-
- IRI predicate1 = Values.iri(ex, "predicate1");
- IRI predicate2 = Values.iri(ex, "predicate2");
-
- conn.add(a1, predicate1, both);
- conn.add(a1, predicate2, both);
-
- conn.add(a2, predicate1, both);
- conn.add(a2, predicate2, Values.bnode());
-
- TupleQuery tupleQuery = conn.prepareTupleQuery("PREFIX : \n" + "SELECT * WHERE {\n"
- + " ?a :predicate1 ?p1\n" + " FILTER NOT EXISTS {\n" + " ?a :predicate2 ?p2 .\n"
- + " FILTER(?p2 = ?p1)\n" + " }\n" + "}\n");
-
- try (Stream stream = tupleQuery.evaluate().stream()) {
- List collect = stream.collect(Collectors.toList());
- assertEquals(1, collect.size());
- assertEquals(a2, collect.get(0).getValue("a"));
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ String ex = "http://example/";
+ IRI a1 = Values.iri(ex, "a1");
+ IRI a2 = Values.iri(ex, "a2");
+
+ IRI both = Values.iri(ex, "both");
+
+ IRI predicate1 = Values.iri(ex, "predicate1");
+ IRI predicate2 = Values.iri(ex, "predicate2");
+
+ conn.add(a1, predicate1, both);
+ conn.add(a1, predicate2, both);
+
+ conn.add(a2, predicate1, both);
+ conn.add(a2, predicate2, Values.bnode());
+
+ TupleQuery tupleQuery = conn.prepareTupleQuery("PREFIX : \n" + "SELECT * WHERE {\n"
+ + " ?a :predicate1 ?p1\n" + " FILTER NOT EXISTS {\n" + " ?a :predicate2 ?p2 .\n"
+ + " FILTER(?p2 = ?p1)\n" + " }\n" + "}\n");
+
+ try (Stream stream = tupleQuery.evaluate().stream()) {
+ List collect = stream.collect(Collectors.toList());
+ assertEquals(1, collect.size());
+ assertEquals(a2, collect.get(0).getValue("a"));
+ }
+ } finally {
+ closeRepository(repo);
}
-
}
public Stream tests() {
diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/GroupByTest.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/GroupByTest.java
index ab60e476490..c5913728458 100644
--- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/GroupByTest.java
+++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/GroupByTest.java
@@ -13,14 +13,15 @@
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
+import java.util.function.Supplier;
import java.util.stream.Stream;
import org.eclipse.rdf4j.query.TupleQuery;
import org.eclipse.rdf4j.query.TupleQueryResult;
import org.eclipse.rdf4j.repository.Repository;
+import org.eclipse.rdf4j.repository.RepositoryConnection;
import org.eclipse.rdf4j.testsuite.sparql.AbstractComplianceTest;
import org.junit.jupiter.api.DynamicTest;
-import org.junit.jupiter.api.Test;
/**
* Tests on SPARQL GROUP BY
@@ -29,18 +30,23 @@
*/
public class GroupByTest extends AbstractComplianceTest {
- public GroupByTest(Repository repo) {
+ public GroupByTest(Supplier repo) {
super(repo);
}
private void testGroupByEmpty() {
- // see issue https://github.com/eclipse/rdf4j/issues/573
- String query = "select ?x where {?x ?p ?o} group by ?x";
-
- TupleQuery tq = conn.prepareTupleQuery(query);
- try (TupleQueryResult result = tq.evaluate()) {
- assertNotNull(result);
- assertFalse(result.hasNext());
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ // see issue https://github.com/eclipse/rdf4j/issues/573
+ String query = "select ?x where {?x ?p ?o} group by ?x";
+
+ TupleQuery tq = conn.prepareTupleQuery(query);
+ try (TupleQueryResult result = tq.evaluate()) {
+ assertNotNull(result);
+ assertFalse(result.hasNext());
+ }
+ } finally {
+ closeRepository(repo);
}
}
diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/InTest.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/InTest.java
index fc4f14f282a..e139d5f869f 100644
--- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/InTest.java
+++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/InTest.java
@@ -16,6 +16,7 @@
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
+import java.util.function.Supplier;
import java.util.stream.Stream;
import org.eclipse.rdf4j.model.Literal;
@@ -27,9 +28,9 @@
import org.eclipse.rdf4j.query.TupleQuery;
import org.eclipse.rdf4j.query.TupleQueryResult;
import org.eclipse.rdf4j.repository.Repository;
+import org.eclipse.rdf4j.repository.RepositoryConnection;
import org.eclipse.rdf4j.testsuite.sparql.AbstractComplianceTest;
import org.junit.jupiter.api.DynamicTest;
-import org.junit.jupiter.api.Test;
/**
* Tests on the IN operator.
@@ -39,58 +40,73 @@
*/
public class InTest extends AbstractComplianceTest {
- public InTest(Repository repo) {
+ public InTest(Supplier repo) {
super(repo);
}
private void testInComparison1() throws Exception {
- loadTestData("/testdata-query/dataset-ses1913.trig");
- String query = " PREFIX : \n"
- + " SELECT ?y WHERE { :a :p ?y. FILTER(?y in (:c, :d, 1/0 , 1)) } ";
-
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-
- try (TupleQueryResult result = tq.evaluate()) {
- assertNotNull(result);
- assertTrue(result.hasNext());
-
- BindingSet bs = result.next();
- Value y = bs.getValue("y");
- assertNotNull(y);
- assertTrue(y instanceof Literal);
- assertEquals(literal("1", CoreDatatype.XSD.INTEGER), y);
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/dataset-ses1913.trig", conn);
+ String query = " PREFIX : \n"
+ + " SELECT ?y WHERE { :a :p ?y. FILTER(?y in (:c, :d, 1/0 , 1)) } ";
+
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+
+ try (TupleQueryResult result = tq.evaluate()) {
+ assertNotNull(result);
+ assertTrue(result.hasNext());
+
+ BindingSet bs = result.next();
+ Value y = bs.getValue("y");
+ assertNotNull(y);
+ assertTrue(y instanceof Literal);
+ assertEquals(literal("1", CoreDatatype.XSD.INTEGER), y);
+ }
+ } finally {
+ closeRepository(repo);
}
}
private void testInComparison2() throws Exception {
- loadTestData("/testdata-query/dataset-ses1913.trig");
- String query = " PREFIX : \n"
- + " SELECT ?y WHERE { :a :p ?y. FILTER(?y in (:c, :d, 1/0)) } ";
-
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-
- try (TupleQueryResult result = tq.evaluate()) {
- assertNotNull(result);
- assertFalse(result.hasNext());
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/dataset-ses1913.trig", conn);
+ String query = " PREFIX : \n"
+ + " SELECT ?y WHERE { :a :p ?y. FILTER(?y in (:c, :d, 1/0)) } ";
+
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+
+ try (TupleQueryResult result = tq.evaluate()) {
+ assertNotNull(result);
+ assertFalse(result.hasNext());
+ }
+ } finally {
+ closeRepository(repo);
}
}
private void testInComparison3() throws Exception {
- loadTestData("/testdata-query/dataset-ses1913.trig");
- String query = " PREFIX : \n"
- + " SELECT ?y WHERE { :a :p ?y. FILTER(?y in (:c, :d, 1, 1/0)) } ";
-
- TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-
- try (TupleQueryResult result = tq.evaluate()) {
- assertNotNull(result);
- assertTrue(result.hasNext());
-
- BindingSet bs = result.next();
- Value y = bs.getValue("y");
- assertNotNull(y);
- assertTrue(y instanceof Literal);
- assertEquals(literal("1", XSD.INTEGER), y);
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ loadTestData("/testdata-query/dataset-ses1913.trig", conn);
+ String query = " PREFIX : \n"
+ + " SELECT ?y WHERE { :a :p ?y. FILTER(?y in (:c, :d, 1, 1/0)) } ";
+
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+
+ try (TupleQueryResult result = tq.evaluate()) {
+ assertNotNull(result);
+ assertTrue(result.hasNext());
+
+ BindingSet bs = result.next();
+ Value y = bs.getValue("y");
+ assertNotNull(y);
+ assertTrue(y instanceof Literal);
+ assertEquals(literal("1", XSD.INTEGER), y);
+ }
+ } finally {
+ closeRepository(repo);
}
}
diff --git a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/MinusTest.java b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/MinusTest.java
index 0a6226fd39a..58c8ebf1b25 100644
--- a/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/MinusTest.java
+++ b/testsuites/sparql/src/main/java/org/eclipse/rdf4j/testsuite/sparql/tests/MinusTest.java
@@ -13,6 +13,7 @@
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.List;
+import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@@ -22,6 +23,7 @@
import org.eclipse.rdf4j.query.BindingSet;
import org.eclipse.rdf4j.query.TupleQuery;
import org.eclipse.rdf4j.repository.Repository;
+import org.eclipse.rdf4j.repository.RepositoryConnection;
import org.eclipse.rdf4j.testsuite.sparql.AbstractComplianceTest;
import org.junit.jupiter.api.DynamicTest;
@@ -32,41 +34,44 @@
*/
public class MinusTest extends AbstractComplianceTest {
- public MinusTest(Repository repo) {
+ public MinusTest(Supplier repo) {
super(repo);
}
private void testScopingOfFilterInMinus() {
+ Repository repo = openRepository();
+ try (RepositoryConnection conn = repo.getConnection()) {
+ String ex = "http://example/";
+ IRI a1 = Values.iri(ex, "a1");
+ IRI a2 = Values.iri(ex, "a2");
- String ex = "http://example/";
- IRI a1 = Values.iri(ex, "a1");
- IRI a2 = Values.iri(ex, "a2");
+ IRI both = Values.iri(ex, "both");
- IRI both = Values.iri(ex, "both");
+ IRI predicate1 = Values.iri(ex, "predicate1");
+ IRI predicate2 = Values.iri(ex, "predicate2");
- IRI predicate1 = Values.iri(ex, "predicate1");
- IRI predicate2 = Values.iri(ex, "predicate2");
+ conn.add(a1, predicate1, both);
+ conn.add(a1, predicate2, both);
- conn.add(a1, predicate1, both);
- conn.add(a1, predicate2, both);
+ conn.add(a2, predicate1, both);
+ conn.add(a2, predicate2, Values.bnode());
- conn.add(a2, predicate1, both);
- conn.add(a2, predicate2, Values.bnode());
+ TupleQuery tupleQuery = conn.prepareTupleQuery(
+ "PREFIX : \n" + "SELECT * WHERE {\n" + " ?a :predicate1 ?p1\n" + " MINUS {\n"
+ + " ?a :predicate2 ?p2 .\n" + " FILTER(?p2 = ?p1)\n" + " }\n" + "} ORDER BY ?a\n");
- TupleQuery tupleQuery = conn.prepareTupleQuery(
- "PREFIX : \n" + "SELECT * WHERE {\n" + " ?a :predicate1 ?p1\n" + " MINUS {\n"
- + " ?a :predicate2 ?p2 .\n" + " FILTER(?p2 = ?p1)\n" + " }\n" + "} ORDER BY ?a\n");
+ try (Stream stream = tupleQuery.evaluate().stream()) {
+ List