diff --git a/jena-arq/src/main/java/org/apache/jena/http/AsyncHttpRDF.java b/jena-arq/src/main/java/org/apache/jena/http/AsyncHttpRDF.java index 55b6b3e88d2..d1240607e33 100644 --- a/jena-arq/src/main/java/org/apache/jena/http/AsyncHttpRDF.java +++ b/jena-arq/src/main/java/org/apache/jena/http/AsyncHttpRDF.java @@ -206,6 +206,7 @@ public static T getOrElseThrow(CompletableFuture cf, HttpRequest httpRequ } } + /** * MUST consume or close the input stream * @see HttpLib#finish(HttpResponse) diff --git a/jena-arq/src/main/java/org/apache/jena/http/sys/ExecHTTPBuilder.java b/jena-arq/src/main/java/org/apache/jena/http/sys/ExecHTTPBuilder.java index c62c468534f..cea0a6e2778 100644 --- a/jena-arq/src/main/java/org/apache/jena/http/sys/ExecHTTPBuilder.java +++ b/jena-arq/src/main/java/org/apache/jena/http/sys/ExecHTTPBuilder.java @@ -29,6 +29,7 @@ import org.apache.jena.riot.web.HttpNames; import org.apache.jena.sparql.core.Var; import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.engine.dispatch.SparqlDispatcherRegistry; import org.apache.jena.sparql.exec.http.Params; import org.apache.jena.sparql.exec.http.QuerySendMode; import org.apache.jena.sparql.syntax.syntaxtransform.QueryTransformOps; @@ -46,7 +47,7 @@ public abstract class ExecHTTPBuilder { protected String serviceURL = null; private Query query = null; protected String queryString = null; - protected boolean parseCheck = true; + protected Optional parseCheck = Optional.empty(); private HttpClient httpClient = null; protected Map httpHeaders = new HashMap<>(); protected Params params = Params.create(); @@ -84,10 +85,14 @@ public Y endpoint(String serviceURL) { /** Whether to parse query strings passed to {@link #query(String)}. */ public Y parseCheck(boolean parseCheck) { - this.parseCheck = parseCheck; + this.parseCheck = Optional.of(parseCheck); return thisBuilder(); } + protected boolean effectiveParseCheck() { + return SparqlDispatcherRegistry.effectiveParseCheck(parseCheck, contextAcc); + } + /** Set the query - this also sets the query string to agree with the query argument. */ public Y query(Query query) { Objects.requireNonNull(query); @@ -102,14 +107,14 @@ public Y query(Query query) { */ public Y query(String queryStr) { Objects.requireNonNull(queryStr); - Query query = parseCheck ? QueryFactory.create(queryStr) : null; + Query query = effectiveParseCheck() ? QueryFactory.create(queryStr) : null; setQuery(query, queryStr); return thisBuilder(); } public Y query(String queryStr, Syntax syntax) { Objects.requireNonNull(queryStr); - Query query = QueryFactory.create(queryStr, syntax); + Query query = effectiveParseCheck() ? QueryFactory.create(queryStr, syntax) : null; setQuery(query, queryStr); return thisBuilder(); } diff --git a/jena-arq/src/main/java/org/apache/jena/http/sys/ExecUpdateHTTPBuilder.java b/jena-arq/src/main/java/org/apache/jena/http/sys/ExecUpdateHTTPBuilder.java index 22e551c4cee..66a31257a11 100644 --- a/jena-arq/src/main/java/org/apache/jena/http/sys/ExecUpdateHTTPBuilder.java +++ b/jena-arq/src/main/java/org/apache/jena/http/sys/ExecUpdateHTTPBuilder.java @@ -21,13 +21,13 @@ import java.net.http.HttpClient; import java.util.*; import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; import org.apache.jena.graph.Node; import org.apache.jena.http.HttpEnv; import org.apache.jena.query.ARQ; import org.apache.jena.sparql.core.Var; import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.engine.dispatch.SparqlDispatcherRegistry; import org.apache.jena.sparql.exec.http.Params; import org.apache.jena.sparql.exec.http.UpdateSendMode; import org.apache.jena.sparql.syntax.syntaxtransform.UpdateTransformOps; @@ -37,110 +37,14 @@ import org.apache.jena.sys.JenaSystem; import org.apache.jena.update.Update; import org.apache.jena.update.UpdateException; -import org.apache.jena.update.UpdateFactory; import org.apache.jena.update.UpdateRequest; public abstract class ExecUpdateHTTPBuilder { - /** Update element. Either an Update object or a string. */ - private record UpdateElt(Update update, String updateString) { - UpdateElt(Update update) { this(Objects.requireNonNull(update), null); } - UpdateElt(String updateString) { this(null, Objects.requireNonNull(updateString)); } - boolean isParsed() { return update != null; } - - @Override - public String toString() { - return isParsed() - ? new UpdateRequest(update()).toString() // Reuse UpdateRequest's serialization approach - : updateString(); - } - } - - /** Accumulator for update elements. Can build an overall string or UpdateRequest from the elements. */ - private class UpdateEltAcc implements Iterable { - /** Delimiter for joining multiple SPARQL update strings into a single one. - * The delimiter takes into account that the last line of a statement may be a single-line-comment. */ - public static final String DELIMITER = "\n;\n"; - - private List updateOperations = new ArrayList<>(); - private List updateOperationsView = Collections.unmodifiableList(updateOperations); - private boolean isParsed = true; // True iff there are no strings in updateOperations - - public boolean isParsed() { - return isParsed; - } - - public void add(UpdateElt updateElt) { - isParsed = isParsed && updateElt.isParsed(); - updateOperations.add(updateElt); - } - - public void add(Update update) { - add(new UpdateElt(update)); - } - - /** Add a string by parsing it. */ - public void add(String updateRequestString) { - UpdateRequest updateRequest = UpdateFactory.create(updateRequestString); - add(updateRequest); - } - - public void add(UpdateRequest updateRequest) { - updateRequest.getOperations().forEach(this::add); - } - - /** Add a string without parsing it. */ - public void addString(String updateRequestString) { - add(new UpdateElt(updateRequestString)); - } - - /** Attempt to build an UpdateRequest from the state of this accumulator. Attempts to parse any string elements. */ - public UpdateRequest buildUpdateRequest() { - return addToUpdateRequest(new UpdateRequest()); - } - - public UpdateRequest addToUpdateRequest(UpdateRequest updateRequest) { - for (UpdateElt elt : updateOperations) { - if (elt.isParsed()) { - updateRequest.add(elt.update()); - } else { - try { - updateRequest.add(elt.updateString()); - } catch (Exception e) { - // Expose the string that failed to parse - e.addSuppressed(new RuntimeException("Failed to parse: " + elt.updateString())); - throw e; - } - } - } - return updateRequest; - } - - public void clear() { - updateOperations.clear(); - isParsed = true; - } - - public boolean isEmpty() { - return updateOperations.isEmpty(); - } - - @Override - public Iterator iterator() { - return updateOperationsView.iterator(); - } - - public String buildString() { - return updateOperations.stream() - .map(UpdateElt::toString) - .collect(Collectors.joining(DELIMITER)); - } - } - static { JenaSystem.init(); } protected String serviceURL; - protected boolean parseCheck = true; + protected Optional parseCheck = Optional.empty(); private UpdateEltAcc updateEltAcc = new UpdateEltAcc(); protected Params params = Params.create(); @@ -173,7 +77,7 @@ public Y update(UpdateRequest updateRequest) { public Y update(String updateRequestString) { Objects.requireNonNull(updateRequestString); - if (parseCheck) { + if (effectiveParseCheck()) { updateEltAcc.add(updateRequestString); } else { updateEltAcc.addString(updateRequestString); @@ -201,10 +105,14 @@ public Y updateString(String updateString) { } public Y parseCheck(boolean parseCheck) { - this.parseCheck = parseCheck; + this.parseCheck = Optional.of(parseCheck); return thisBuilder(); } + protected boolean effectiveParseCheck() { + return SparqlDispatcherRegistry.effectiveParseCheck(parseCheck, contextAcc); + } + public Y substitution(Binding binding) { binding.forEach(this.substitutionMap::put); return thisBuilder(); diff --git a/jena-arq/src/main/java/org/apache/jena/http/sys/UpdateElt.java b/jena-arq/src/main/java/org/apache/jena/http/sys/UpdateElt.java new file mode 100644 index 00000000000..aff845e5415 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/http/sys/UpdateElt.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.http.sys; + +import java.util.Objects; + +import org.apache.jena.update.Update; +import org.apache.jena.update.UpdateRequest; + +/** Update element. Either an Update object or a string. */ +record UpdateElt(Update update, String updateString) { + UpdateElt(Update update) { this(Objects.requireNonNull(update), null); } + UpdateElt(String updateString) { this(null, Objects.requireNonNull(updateString)); } + boolean isParsed() { return update != null; } + + @Override + public String toString() { + return isParsed() + ? new UpdateRequest(update()).toString() // Reuse UpdateRequest's serialization approach + : updateString(); + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/http/sys/UpdateEltAcc.java b/jena-arq/src/main/java/org/apache/jena/http/sys/UpdateEltAcc.java new file mode 100644 index 00000000000..b65554fd0f5 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/http/sys/UpdateEltAcc.java @@ -0,0 +1,110 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.http.sys; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.stream.Collectors; + +import org.apache.jena.update.Update; +import org.apache.jena.update.UpdateFactory; +import org.apache.jena.update.UpdateRequest; + +/** Accumulator for update elements. Can build an overall string or UpdateRequest from the elements. */ +public class UpdateEltAcc implements Iterable { + /** Delimiter for joining multiple SPARQL update strings into a single one. + * The delimiter takes into account that the last line of a statement may be a single-line-comment. */ + public static final String DELIMITER = "\n;\n"; + + private List updateOperations = new ArrayList<>(); + private List updateOperationsView = Collections.unmodifiableList(updateOperations); + private boolean isParsed = true; // True iff there are no strings in updateOperations + + public boolean isParsed() { + return isParsed; + } + + public void add(UpdateElt updateElt) { + isParsed = isParsed && updateElt.isParsed(); + updateOperations.add(updateElt); + } + + public void add(Update update) { + add(new UpdateElt(update)); + } + + /** Add a string by parsing it. */ + public void add(String updateRequestString) { + UpdateRequest updateRequest = UpdateFactory.create(updateRequestString); + add(updateRequest); + } + + public void add(UpdateRequest updateRequest) { + updateRequest.getOperations().forEach(this::add); + } + + /** Add a string without parsing it. */ + public void addString(String updateRequestString) { + add(new UpdateElt(updateRequestString)); + } + + /** Attempt to build an UpdateRequest from the state of this accumulator. Attempts to parse any string elements. */ + public UpdateRequest buildUpdateRequest() { + return addToUpdateRequest(new UpdateRequest()); + } + + public UpdateRequest addToUpdateRequest(UpdateRequest updateRequest) { + for (UpdateElt elt : updateOperations) { + if (elt.isParsed()) { + updateRequest.add(elt.update()); + } else { + try { + updateRequest.add(elt.updateString()); + } catch (Exception e) { + // Expose the string that failed to parse + e.addSuppressed(new RuntimeException("Failed to parse: " + elt.updateString())); + throw e; + } + } + } + return updateRequest; + } + + public void clear() { + updateOperations.clear(); + isParsed = true; + } + + public boolean isEmpty() { + return updateOperations.isEmpty(); + } + + @Override + public Iterator iterator() { + return updateOperationsView.iterator(); + } + + public String buildString() { + return updateOperations.stream() + .map(UpdateElt::toString) + .collect(Collectors.joining(DELIMITER)); + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/riot/system/StreamRDFOps.java b/jena-arq/src/main/java/org/apache/jena/riot/system/StreamRDFOps.java index dae935e5816..8483ab18997 100644 --- a/jena-arq/src/main/java/org/apache/jena/riot/system/StreamRDFOps.java +++ b/jena-arq/src/main/java/org/apache/jena/riot/system/StreamRDFOps.java @@ -104,6 +104,11 @@ public static void sendGraphToStream(Graph graph, StreamRDF stream, String baseU stream.base(baseURI); if ( prefixMap != null ) sendPrefixesToStream(prefixMap, stream) ; + sendGraphTriplesToStream(graph, stream); + } + + /** Send only the triples of graph to a StreamRDF */ + public static void sendGraphTriplesToStream(Graph graph, StreamRDF stream) { ExtendedIterator iter = graph.find(null, null, null) ; try { StreamRDFOps.sendTriplesToStream(iter, stream) ; diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/ARQConstants.java b/jena-arq/src/main/java/org/apache/jena/sparql/ARQConstants.java index 8c19d110b04..1d3fb188539 100644 --- a/jena-arq/src/main/java/org/apache/jena/sparql/ARQConstants.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/ARQConstants.java @@ -317,6 +317,12 @@ public class ARQConstants public static final Symbol registryExtensions = SystemARQ.allocSymbol("registryExtensions") ; - public static void init() {} + public static final Symbol registrySparqlDispatchers = + SystemARQ.allocSymbol("registrySparqlDispatchers") ; + + /** Symbol for disabling parse checks of queries and updates when executing them against a dataset */ + public static final Symbol parseCheck = + SystemARQ.allocSymbol("parseCheck") ; + public static void init() {} } diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/engine/Timeouts.java b/jena-arq/src/main/java/org/apache/jena/sparql/engine/Timeouts.java index 5eb30c690ba..e91e63293ee 100644 --- a/jena-arq/src/main/java/org/apache/jena/sparql/engine/Timeouts.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/engine/Timeouts.java @@ -233,13 +233,23 @@ public static String toString(Timeout timeout) { return result; } - // Set times from context if not set directly. e..g Context provides default values. - // Contrast with SPARQLQueryProcessor where the context is limiting values of the protocol parameter. + /** + * Update unset values in the builder with values from the context. + * + * Set times from context if not set directly, i.e. context provides default values. + * Contrast with SPARQLQueryProcessor where the context is limiting values of the protocol parameter. + */ public static void applyDefaultQueryTimeoutFromContext(TimeoutBuilderImpl builder, Context cxt) { Timeout queryTimeout = extractQueryTimeout(cxt); applyDefaultTimeout(builder, queryTimeout); } + /** Update unset values in the builder with values from the context. */ + public static void applyDefaultUpdateTimeoutFromContext(TimeoutBuilderImpl builder, Context cxt) { + Timeout queryTimeout = extractUpdateTimeout(cxt); + applyDefaultTimeout(builder, queryTimeout); + } + /** Returns milliseconds if the given time unit is null. */ private static TimeUnit nullToMillis(TimeUnit unit) { return unit != null ? unit : TimeUnit.MILLISECONDS; diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/engine/dispatch/ChainingQueryDispatcher.java b/jena-arq/src/main/java/org/apache/jena/sparql/engine/dispatch/ChainingQueryDispatcher.java new file mode 100644 index 00000000000..0ae6f3ee0d3 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/engine/dispatch/ChainingQueryDispatcher.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.engine.dispatch; + +import org.apache.jena.query.Query; +import org.apache.jena.query.Syntax; +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.exec.QueryExec; +import org.apache.jena.sparql.util.Context; + +/** + * A query dispatcher is responsible for taking a query and routing it to the + * appropriate component or system for preparing the execution against a dataset. + * The result is a {@linkplain QueryExec} instance. + * Queries may be passed to dispatchers in syntactic or string representation. + * + * Query dispatchers form a chain, and a ChainingQueryDispatcher acts as a link in such a chain. + * A ChainingQueryDispatcher instance can choose to process a query by itself or to delegate processing to the + * remainder of the chain. + * + * @see SparqlDispatcherRegistry + */ +public interface ChainingQueryDispatcher { + QueryExec create(Query query, DatasetGraph dsg, Binding initialBinding, Context context, QueryDispatcher chain); + QueryExec create(String queryString, Syntax syntax, DatasetGraph dsg, Binding initialBinding, Context context, QueryDispatcher chain); +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/engine/dispatch/ChainingUpdateDispatcher.java b/jena-arq/src/main/java/org/apache/jena/sparql/engine/dispatch/ChainingUpdateDispatcher.java new file mode 100644 index 00000000000..a1cbf2c5495 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/engine/dispatch/ChainingUpdateDispatcher.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.engine.dispatch; + +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.exec.UpdateExec; +import org.apache.jena.sparql.util.Context; +import org.apache.jena.update.UpdateRequest; + +/** + * An update dispatcher is responsible for taking an update request and routing it to the + * appropriate component or system for preparing the execution against a dataset. + * The result is a {@linkplain UpdateExec} instance. + * Update requests may be passed to dispatchers in syntactic or string representation. + * + * Update dispatchers form a chain, and a ChainingUpdateDispatcher acts as a link in such a chain. + * A ChainingUpdateDispatcher instance can choose to process an update request by itself or to delegate processing to the + * remainder of the chain. + * + * @see SparqlDispatcherRegistry + */ +public interface ChainingUpdateDispatcher { + UpdateExec create(String updateRequestString, DatasetGraph dsg, Binding initialBinding, Context context, UpdateDispatcher chain); + UpdateExec create(UpdateRequest updateRequest, DatasetGraph dsg, Binding initialBinding, Context context, UpdateDispatcher chain); +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/engine/dispatch/DatasetGraphOverSparql.java b/jena-arq/src/main/java/org/apache/jena/sparql/engine/dispatch/DatasetGraphOverSparql.java new file mode 100644 index 00000000000..63fd3448dcb --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/engine/dispatch/DatasetGraphOverSparql.java @@ -0,0 +1,527 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.engine.dispatch; + +import static org.apache.jena.query.ReadWrite.WRITE; + +import java.util.Iterator; +import java.util.List; +import java.util.Objects; +import java.util.function.Function; + +import org.apache.jena.atlas.iterator.Iter; +import org.apache.jena.atlas.iterator.IteratorCloseable; +import org.apache.jena.graph.Graph; +import org.apache.jena.graph.Node; +import org.apache.jena.graph.Triple; +import org.apache.jena.query.Query; +import org.apache.jena.query.QueryFactory; +import org.apache.jena.query.ReadWrite; +import org.apache.jena.query.TxnType; +import org.apache.jena.riot.system.PrefixMap; +import org.apache.jena.riot.system.PrefixMapFactory; +import org.apache.jena.riot.system.Prefixes; +import org.apache.jena.riot.system.StreamRDF; +import org.apache.jena.sparql.JenaTransactionException; +import org.apache.jena.sparql.core.BasicPattern; +import org.apache.jena.sparql.core.DatasetGraphBase; +import org.apache.jena.sparql.core.GraphView; +import org.apache.jena.sparql.core.Quad; +import org.apache.jena.sparql.core.Substitute; +import org.apache.jena.sparql.core.Transactional; +import org.apache.jena.sparql.core.TransactionalNull; +import org.apache.jena.sparql.core.Var; +import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.engine.binding.BindingFactory; +import org.apache.jena.sparql.exec.QueryExec; +import org.apache.jena.sparql.exec.UpdateExec; +import org.apache.jena.sparql.expr.Expr; +import org.apache.jena.sparql.expr.aggregate.AggCount; +import org.apache.jena.sparql.modify.request.QuadAcc; +import org.apache.jena.sparql.modify.request.QuadDataAcc; +import org.apache.jena.sparql.modify.request.Target; +import org.apache.jena.sparql.modify.request.UpdateClear; +import org.apache.jena.sparql.modify.request.UpdateDataDelete; +import org.apache.jena.sparql.modify.request.UpdateDataInsert; +import org.apache.jena.sparql.modify.request.UpdateDeleteWhere; +import org.apache.jena.sparql.modify.request.UpdateDrop; +import org.apache.jena.sparql.syntax.Element; +import org.apache.jena.sparql.syntax.ElementNamedGraph; +import org.apache.jena.sparql.syntax.ElementTriplesBlock; +import org.apache.jena.sparql.syntax.ElementUnion; +import org.apache.jena.sparql.util.Context; +import org.apache.jena.update.Update; +import org.apache.jena.update.UpdateRequest; + +/** + * This class provides a base implementation of the Jena DatasetGraph interface + * to a remote SPARQL endpoint. Efficiency not guaranteed. + * + * Any returned iterators must be closed to free the resources. + * This base class does not support transactions. + * + * All inserts are passed on as SPARQL update requests. + * Blank nodes should be avoided because they are likely to become renamed across separate requests. + * + * Invocation of deleteAny() across the default graph and all named graphs fires two requests. + * All other methods fire a single request. + */ +public abstract class DatasetGraphOverSparql + extends DatasetGraphBase +{ + private PrefixMap prefixes = PrefixMapFactory.create(); + private Transactional transactional = TransactionalNull.create(); + + public DatasetGraphOverSparql() { + super(); + initContext(); + } + + protected PrefixMap getPrefixes() { + return prefixes; + } + + protected Transactional getTransactional() { + return transactional; + } + + protected void initContext() { + Context cxt = getContext(); + // Use the context to advertise that SPARQL statements should not be parsed. + SparqlDispatcherRegistry.setParseCheck(cxt, false); + } + + protected abstract QueryExec query(Query query); + protected abstract UpdateExec update(UpdateRequest UpdateRequest); + + protected void execUpdate(Update update) { + execUpdate(new UpdateRequest(update)); + } + + protected void execUpdate(UpdateRequest updateRequest) { + UpdateExec uExec = update(updateRequest); + uExec.execute(); + } + + /** + * This method must return a StreamRDF instance that handles bulk inserts of RDF tuples (triples or quads). + * The default implementation flushes every 1000 tuples. + * Alternative implementations could e.g. flush by the string length of the update request. + */ + protected StreamRDF newUpdateSink() { + StreamRDF sink = new StreamRDFToUpdateRequest(this::execUpdate, Prefixes.adapt(getPrefixes()), 1000); + return sink; + } + + @Override + public Iterator listGraphNodes() { + QueryExec qExec = query(graphsQuery); + return Iter.onClose( + Iter.map(qExec.select(), b -> b.get(vg)), + qExec::close); + } + + @Override + public Iterator find(Node g, Node s, Node p, Node o) { + Iterator result; + if (g == null || Node.ANY.equals(g)) { + result = findTriplesOrQuads(this::query, s, p, o); + } else if (Quad.isDefaultGraph(g)) { + Iterator base = findTriples(this::query, s, p, o); + result = Iter.map(base, t -> Quad.create(Quad.defaultGraphIRI, t)); + } else { + result = findQuads(this::query, g, s, p, o); + } + return result; + } + + @Override + public Iterator findNG(Node g, Node s, Node p, Node o) { + Iterator result = findQuads(this::query, g, s, p, o); + return result; + } + + @Override + public Graph getDefaultGraph() { + DatasetGraphOverSparql self = this; + return new GraphView(this, Quad.defaultGraphNodeGenerated) { + @Override + protected int graphBaseSize() { + long size = sizeLong(); + return (size < Integer.MAX_VALUE) ? (int)size : Integer.MAX_VALUE; + } + + @Override + public long sizeLong() { + long result = fetchLong(self::query, defaultGraphSizeQuery, vc); + return result; + } + }; + } + + @Override + public Graph getGraph(Node graphNode) { + DatasetGraphOverSparql self = this; + return new GraphView(this, graphNode) { + @Override + protected int graphBaseSize() { + long size = sizeLong(); + return (size < Integer.MAX_VALUE) ? (int)size : Integer.MAX_VALUE; + } + + @Override + public long sizeLong() { + Query q = createQueryNamedGraphSize(graphNode, vc); + long result = fetchLong(self::query, q, vc); + return result; + } + }; + } + + @Override + public void addGraph(Node graphName, Graph graph) { + StreamRDF sink = newUpdateSink(); + try { + sink.start(); + StreamRDFToUpdateRequest.sendGraphTriplesToStream(graph, graphName, sink); + } finally { + sink.finish(); + } + } + + @Override + public void removeGraph(Node graphName) { + Objects.requireNonNull(graphName); + delete(graphName, Node.ANY, Node.ANY, Node.ANY); + // UpdateRequest ur = new UpdateRequest(new UpdateDrop(graphName, true)); + // execUpdate(ur); + } + + @Override + public void add(Quad quad) { + Quad q = harmonizeTripleInQuad(quad); + if (!q.isConcrete()) { + throw new IllegalArgumentException("Concrete quad expected."); + } + Update update = new UpdateDataInsert(new QuadDataAcc(List.of(q))); + execUpdate(new UpdateRequest(update)); + } + + @Override + public void delete(Quad quad) { + Quad q = harmonizeTripleInQuad(quad); + if (!q.isConcrete()) { + throw new IllegalArgumentException("Concrete quad expected."); + } + Update update = new UpdateDataDelete(new QuadDataAcc(List.of(q))); + execUpdate(update); + } + + @Override + public void deleteAny(Node g, Node s, Node p, Node o) { + boolean allowDrop = true; + UpdateRequest updateRequest; + if (allowDrop && isWildcard(s) && isWildcard(p) && isWildcard(o)) { + updateRequest = new UpdateRequest(buildDeleteByGraph(g)); + } else { + updateRequest = buildDeleteByPattern(g, s, p, o); + } + execUpdate(updateRequest); + } + + @Override + public long size() { + long result = fetchLong(this::query, graphsCountQuery, vc); + return result; + } + + @Override + public boolean supportsTransactions() { + return false; + } + + @Override + public boolean supportsTransactionAbort() { + return false; + } + + @Override + public void abort() { + getTransactional().abort(); + } + + @Override + public void begin(ReadWrite readWrite) { + getTransactional().begin(readWrite); + } + + @Override + public void commit() { + getTransactional().commit(); + } + + @Override + public void end() { + // Note: AbstractTestRDFConnection.transaction_bad_01() expects + // a JenaTransactionException to be thrown if the + // conditions of the if-statement below are satisfied. + try { + if (isInTransaction()) { + if (transactionMode().equals(WRITE)) { + String msg = "end() called for WRITE transaction without commit or abort having been called. This causes a forced abort."; + throw new JenaTransactionException(msg); + } + } + } finally { + getTransactional().end(); + } + } + + @Override + public boolean isInTransaction() { + return getTransactional().isInTransaction(); + } + + @Override + public void begin(TxnType type) { + getTransactional().begin(type); + } + + @Override + public boolean promote(Promote mode) { + return getTransactional().promote(mode); + } + + @Override + public ReadWrite transactionMode() { + return getTransactional().transactionMode(); + } + + @Override + public TxnType transactionType() { + return getTransactional().transactionType(); + } + + @Override + public PrefixMap prefixes() { + return prefixes; + } + + // ----- SPARQL Statement Generation ----- + + private static final Var vg = Var.alloc("g"); + private static final Var vs = Var.alloc("s"); + private static final Var vp = Var.alloc("p"); + private static final Var vo = Var.alloc("o"); + private static final Query graphsQuery = QueryFactory.create("SELECT ?g { GRAPH ?g { } }"); + + private static final Var vc = Var.alloc("c"); + private static final Query graphsCountQuery = QueryFactory.create("SELECT (COUNT(*) AS ?c) { GRAPH ?g { } }"); + + private static final Query defaultGraphSizeQuery = QueryFactory.create("SELECT (COUNT(*) AS ?c) { ?s ?p ?o }"); + + private static IteratorCloseable findTriples(Function executor, Node s, Node p, Node o) { + Triple triple = matchTriple(Triple.create(s, p, o)); + Query query = createQueryTriple(triple); + QueryExec qExec = executor.apply(query); + return Iter.onClose( + Iter.map(qExec.select(), b -> Substitute.substitute(triple, b)), + qExec::close); + } + + private static IteratorCloseable findQuads(Function executor, Node g, Node s, Node p, Node o) { + Quad quad = matchQuad(g, s, p, o); + Query query = createQueryQuad(quad); + QueryExec qExec = executor.apply(query); + return Iter.onClose( + Iter.map(qExec.select(), b -> Substitute.substitute(quad, b)), + qExec::close); + } + + private static IteratorCloseable findTriplesOrQuads(Function executor, Node s, Node p, Node o) { + Quad quad = matchQuad(vg, s, p, o); + Query query = createQueryTriplesAndQuads(s, p, o); + QueryExec qExec = executor.apply(query); + return Iter.onClose( + Iter.map(qExec.select(), b -> { + if (!b.contains(vg)) { + // Unbound graph variable -> default graph. + b = BindingFactory.binding(b, vg, Quad.defaultGraphIRI); + } + return Substitute.substitute(quad, b); + }), + qExec::close); + } + + private static long fetchLong(Function executor, Query query, Var numberVar) { + long result; + try (QueryExec qExec = executor.apply(query)) { + Binding b = qExec.select().next(); + Number number = (Number)b.get(numberVar).getLiteralValue(); + result = number.longValue(); + } + return result; + } + + private static Node matchNode(Node n, Node d) { + return n == null || n.equals(Node.ANY) ? d : n; + } + + private static Triple matchTriple(Triple triple) { + return matchTriple(triple.getSubject(), triple.getPredicate(), triple.getObject()); + } + + private static Triple matchTriple(Node s, Node p, Node o) { + return Triple.create(matchNode(s, vs), matchNode(p, vp), matchNode(o, vo)); + } + + private static Quad harmonizeTripleInQuad(Quad quad) { + Quad result = quad.isTriple() ? new Quad(Quad.defaultGraphIRI, quad.asTriple()) : quad; + return result; + } + + private static Quad matchQuad(Node g, Node s, Node p, Node o) { + return Quad.create(matchNode(g, vg), matchNode(s, vs), matchNode(p, vp), matchNode(o, vo)); + } + + /** + * Generates the query: + *
+     * SELECT * {
+     *     { ?s ?p ?o }
+     *   UNION
+     *     { GRAPH ?g { ?s ?p ?o } }
+     * }
+     * 
+ */ + private static Query createQueryTriplesAndQuads(Node s, Node p, Node o) { + BasicPattern bgpTriples = new BasicPattern(); + bgpTriples.add(matchTriple(s, p, o)); + + Quad quad = matchQuad(vg, s, p, o); + BasicPattern bgpQuads = new BasicPattern(); + bgpQuads.add(quad.asTriple()); + + ElementUnion union = new ElementUnion(); + union.addElement(new ElementTriplesBlock(bgpTriples)); + union.addElement(new ElementNamedGraph(vg, new ElementTriplesBlock(bgpQuads))); + + Query query = new Query(); + query.setQuerySelectType(); + query.setQueryResultStar(true); + query.setQueryPattern(union); + return query; + } + + /** + * Generates the query: + *
+     * SELECT * { GRAPH ?g { ?s ?p ?o } }
+     * 
+ */ + private static Query createQueryQuad(Quad quad) { + BasicPattern bgp = new BasicPattern(); + bgp.add(quad.asTriple()); + Element element = new ElementTriplesBlock(bgp); + element = new ElementNamedGraph(quad.getGraph(), element); + + Query query = new Query(); + query.setQuerySelectType(); + query.setQueryResultStar(true); + query.setQueryPattern(element); + return query; + } + + /** + * Generates the query: + *
+     * SELECT * { ?s ?p ?o }
+     * 
+ */ + private static Query createQueryTriple(Triple m) { + BasicPattern bgp = new BasicPattern(); + bgp.add(m); + Element element = new ElementTriplesBlock(bgp); + + Query query = new Query(); + query.setQuerySelectType(); + query.setQueryResultStar(true); + query.setQueryPattern(element); + return query; + } + + /** + * Generates the query: + *
+     * SELECT (COUNT(*) AS ?c) { GRAPH <g> { ?s ?p ?o } }
+     * 
+ */ + private static Query createQueryNamedGraphSize(Node graphName, Var outputVar) { + BasicPattern bgp = new BasicPattern(); + bgp.add(Triple.create(vs, vp, vo)); + Element element = new ElementNamedGraph(graphName, new ElementTriplesBlock(bgp)); + + Query query = new Query(); + query.setQuerySelectType(); + query.setQueryPattern(element); + Expr exprAgg = query.allocAggregate(new AggCount()); + query.getProject().add(outputVar, exprAgg); + return query; + } + + private static Update buildDelete(Node g, Node s, Node p, Node o) { + Quad quad = matchQuad(g, s, p, o); + Update update = quad.isConcrete() + ? new UpdateDataDelete(new QuadDataAcc(List.of(quad))) + : new UpdateDeleteWhere(new QuadAcc(List.of(quad))); + return update; + } + + private static UpdateRequest buildDeleteByPattern(Node g, Node s, Node p, Node o) { + UpdateRequest updateRequest = new UpdateRequest(); + if (isWildcard(g)) { + updateRequest.add(buildDelete(Quad.defaultGraphIRI, s, p, o)); + updateRequest.add(buildDelete(g, s, p, o)); + } else { + updateRequest.add(buildDelete(g, s, p, o)); + } + return updateRequest; + } + + private static Update buildDeleteByGraph(Node g) { + Target target = chooseTarget(g); + boolean silent = true; + boolean useDrop = true; + Update update = useDrop + ? new UpdateDrop(target, silent) + : new UpdateClear(target, silent); + return update; + } + + private static Target chooseTarget(Node g) { + Target target = Quad.isDefaultGraph(g) + ? Target.DEFAULT + : Quad.isUnionGraph(g) + ? Target.NAMED + : (g == null || Node.ANY.equals(g)) + ? Target.ALL + : Target.create(g); + return target; + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/engine/dispatch/QueryDispatcher.java b/jena-arq/src/main/java/org/apache/jena/sparql/engine/dispatch/QueryDispatcher.java new file mode 100644 index 00000000000..8d28c41e259 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/engine/dispatch/QueryDispatcher.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.engine.dispatch; + +import org.apache.jena.query.Query; +import org.apache.jena.query.Syntax; +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.exec.QueryExec; +import org.apache.jena.sparql.util.Context; + +public interface QueryDispatcher { + QueryExec create(String queryString, Syntax syntax, DatasetGraph dsg, Binding initialBinding, Context context); + QueryExec create(Query query, DatasetGraph dsg, Binding initialBinding, Context context); +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/engine/dispatch/QueryDispatcherOverRegistry.java b/jena-arq/src/main/java/org/apache/jena/sparql/engine/dispatch/QueryDispatcherOverRegistry.java new file mode 100644 index 00000000000..a8d10f9c532 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/engine/dispatch/QueryDispatcherOverRegistry.java @@ -0,0 +1,75 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.engine.dispatch; + +import java.util.List; + +import org.apache.jena.query.Query; +import org.apache.jena.query.QueryException; +import org.apache.jena.query.Syntax; +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.exec.QueryExec; +import org.apache.jena.sparql.util.Context; + +/** Abstraction of a registry's single chain as a service executor */ +public class QueryDispatcherOverRegistry + implements QueryDispatcher +{ + protected SparqlDispatcherRegistry registry; + + /** Position in the chain */ + protected int pos; + + public QueryDispatcherOverRegistry(SparqlDispatcherRegistry registry) { + this(registry, 0); + } + + public QueryDispatcherOverRegistry(SparqlDispatcherRegistry registry, int pos) { + super(); + this.registry = registry; + this.pos = pos; + } + + protected ChainingQueryDispatcher getDispatcher() { + List queryDispatchers = registry.getQueryDispatchers(); + int n = queryDispatchers.size(); + if (pos >= n) { + throw new QueryException("No more elements in query dispatcher chain (pos=" + pos + ", chain size=" + n + ")"); + } + ChainingQueryDispatcher dispatcher = queryDispatchers.get(pos); + return dispatcher; + } + + @Override + public QueryExec create(Query query, DatasetGraph dsg, Binding initialBinding, Context context) { + ChainingQueryDispatcher dispatcher = getDispatcher(); + QueryDispatcher next = new QueryDispatcherOverRegistry(registry, pos + 1); + QueryExec result = dispatcher.create(query, dsg, initialBinding, context, next); + return result; + } + + @Override + public QueryExec create(String queryString, Syntax syntax, DatasetGraph dsg, Binding initialBinding, Context context) { + ChainingQueryDispatcher dispatcher = getDispatcher(); + QueryDispatcher next = new QueryDispatcherOverRegistry(registry, pos + 1); + QueryExec result = dispatcher.create(queryString, syntax, dsg, initialBinding, context, next); + return result; + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/engine/dispatch/SparqlDispatcherRegistry.java b/jena-arq/src/main/java/org/apache/jena/sparql/engine/dispatch/SparqlDispatcherRegistry.java new file mode 100644 index 00000000000..57300557053 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/engine/dispatch/SparqlDispatcherRegistry.java @@ -0,0 +1,218 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.engine.dispatch; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Optional; + +import org.apache.jena.query.Query; +import org.apache.jena.query.Syntax; +import org.apache.jena.sparql.ARQConstants; +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.exec.ChainingQueryDispatcherMain; +import org.apache.jena.sparql.exec.ChainingUpdateDispatcherMain; +import org.apache.jena.sparql.exec.QueryExec; +import org.apache.jena.sparql.exec.UpdateExec; +import org.apache.jena.sparql.util.Context; +import org.apache.jena.sparql.util.ContextAccumulator; +import org.apache.jena.update.UpdateRequest; + +/** + * The SparqlDispatcherRegistry provides a plugin system for + * how to execute SPARQL statements against DatasetGraphs. + */ +public class SparqlDispatcherRegistry +{ + List queryDispatchers = Collections.synchronizedList(new ArrayList<>()); + List updateDispatchers = Collections.synchronizedList(new ArrayList<>()); + + // Singleton + private static SparqlDispatcherRegistry registry; + static { init(); } + + static public SparqlDispatcherRegistry get() + { + return registry; + } + + public List getQueryDispatchers() { + return queryDispatchers; + } + + public List getUpdateDispatchers() { + return updateDispatchers; + } + + /** If there is a registry in the context then return it otherwise yield the global instance */ + static public SparqlDispatcherRegistry chooseRegistry(Context context) + { + SparqlDispatcherRegistry result = get(context); + if (result == null) { + result = get(); + } + return result; + } + + /** Get the query engine registry from the context or null if there is none. + * Returns null if the context is null. */ + static public SparqlDispatcherRegistry get(Context context) + { + SparqlDispatcherRegistry result = context == null + ? null + : context.get(ARQConstants.registrySparqlDispatchers); + return result; + } + + static public void set(Context context, SparqlDispatcherRegistry registry) + { + context.set(ARQConstants.registrySparqlDispatchers, registry); + } + + public SparqlDispatcherRegistry copy() { + SparqlDispatcherRegistry result = new SparqlDispatcherRegistry(); + result.queryDispatchers.addAll(queryDispatchers); + result.updateDispatchers.addAll(updateDispatchers); + return result; + } + + /** Create a copy of the registry from the context or return a new instance */ + public static SparqlDispatcherRegistry copyFrom(Context context) { + SparqlDispatcherRegistry tmp = get(context); + SparqlDispatcherRegistry result = tmp != null + ? tmp.copy() + : new SparqlDispatcherRegistry(); + + return result; + } + + public SparqlDispatcherRegistry() { } + + private static void init() + { + registry = new SparqlDispatcherRegistry(); + + registry.add(new ChainingQueryDispatcherMain()); + registry.add(new ChainingUpdateDispatcherMain()); + } + + // ----- Query ----- + + /** Add a query dispatcher to the default registry */ + public static void addDispatcher(ChainingQueryDispatcher f) { get().add(f); } + + /** Add a query dispatcher */ + public void add(ChainingQueryDispatcher f) + { + // Add to low end so that newer factories are tried first + queryDispatchers.add(0, f); + } + + /** Remove a query dispatcher */ + public static void removeDispatcher(ChainingQueryDispatcher f) { get().remove(f); } + + /** Remove a query dispatcher */ + public void remove(ChainingQueryDispatcher f) { queryDispatchers.remove(f); } + + /** Check whether a query dispatcher is already registered in the default registry */ + public static boolean containsFactory(ChainingQueryDispatcher f) { return get().contains(f); } + + /** Check whether a query dispatcher is already registered */ + public boolean contains(ChainingQueryDispatcher f) { return queryDispatchers.contains(f); } + + public static QueryExec exec(Query query, DatasetGraph dsg, Binding initialBinding, Context context) { + SparqlDispatcherRegistry registry = chooseRegistry(context); + QueryDispatcher queryDispatcher = new QueryDispatcherOverRegistry(registry); + QueryExec qExec = queryDispatcher.create(query, dsg, initialBinding, context); + return qExec; + } + + public static QueryExec exec(String queryString, Syntax syntax, DatasetGraph dsg, Binding initialBinding, Context context) { + SparqlDispatcherRegistry registry = chooseRegistry(context); + QueryDispatcher queryDispatcher = new QueryDispatcherOverRegistry(registry); + QueryExec qExec = queryDispatcher.create(queryString, syntax, dsg, initialBinding, context); + return qExec; + } + + // ----- Update ----- + + /** Add an update dispatcher to the default registry */ + public static void addDispatcher(ChainingUpdateDispatcher f) { get().add(f); } + + /** Add an update dispatcher */ + public void add(ChainingUpdateDispatcher f) + { + // Add to low end so that newer factories are tried first + updateDispatchers.add(0, f); + } + + /** Remove an update dispatcher */ + public static void removeDispatcher(ChainingUpdateDispatcher f) { get().remove(f); } + + /** Remove an update dispatcher */ + public void remove(ChainingUpdateDispatcher f) { updateDispatchers.remove(f); } + + /** Check whether an update dispatcher is already registered in the default registry */ + public static boolean containsDispatcher(ChainingUpdateDispatcher f) { return get().contains(f); } + + /** Check whether an update dispatcher is already registered */ + public boolean contains(ChainingUpdateDispatcher f) { return updateDispatchers.contains(f); } + + public static UpdateExec exec(UpdateRequest updateRequest, DatasetGraph dsg, Binding initialBinding, Context context) { + SparqlDispatcherRegistry registry = chooseRegistry(context); + UpdateDispatcher updateDispatcher = new UpdateDispatcherOverRegistry(registry); + UpdateExec uExec = updateDispatcher.create(updateRequest, dsg, initialBinding, context); + return uExec; + } + + public static UpdateExec exec(String updateRequestString, DatasetGraph dsg, Binding initialBinding, Context context) { + SparqlDispatcherRegistry registry = chooseRegistry(context); + UpdateDispatcher updateDispatcher = new UpdateDispatcherOverRegistry(registry); + UpdateExec uExec = updateDispatcher.create(updateRequestString, dsg, initialBinding, context); + return uExec; + } + + // ----- Parse Check ----- + + public static void setParseCheck(Context cxt, Boolean value) { + cxt.set(ARQConstants.parseCheck, value); + } + + public static Optional getParseCheck(DatasetGraph dsg) { + return Optional.ofNullable(dsg).map(DatasetGraph::getContext).flatMap(SparqlDispatcherRegistry::getParseCheck); + } + + public static Optional getParseCheck(Context cxt) { + return Optional.ofNullable(cxt).map(c -> c.get(ARQConstants.parseCheck)); + } + + public static Optional getParseCheck(ContextAccumulator cxtAcc) { + return Optional.ofNullable(cxtAcc).map(ca -> ca.get(ARQConstants.parseCheck)); + } + + public static boolean effectiveParseCheck(Optional parseCheck, Context cxt) { + return parseCheck.orElseGet(() -> getParseCheck(cxt).orElse(true)); + } + + public static boolean effectiveParseCheck(Optional parseCheck, ContextAccumulator cxtAcc) { + return parseCheck.orElseGet(() -> getParseCheck(cxtAcc).orElse(true)); + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/engine/dispatch/StreamRDFToUpdateRequest.java b/jena-arq/src/main/java/org/apache/jena/sparql/engine/dispatch/StreamRDFToUpdateRequest.java new file mode 100644 index 00000000000..901a74a37f7 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/engine/dispatch/StreamRDFToUpdateRequest.java @@ -0,0 +1,161 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.engine.dispatch; + +import java.util.Objects; +import java.util.function.Consumer; + +import org.apache.jena.graph.Graph; +import org.apache.jena.graph.Node; +import org.apache.jena.graph.Triple; +import org.apache.jena.riot.system.StreamRDF; +import org.apache.jena.riot.system.StreamRDFOps; +import org.apache.jena.riot.system.StreamRDFWrapper; +import org.apache.jena.shared.PrefixMapping; +import org.apache.jena.sparql.core.Quad; +import org.apache.jena.sparql.modify.request.QuadDataAcc; +import org.apache.jena.sparql.modify.request.UpdateDataInsert; +import org.apache.jena.update.UpdateRequest; + +/** + * {@link StreamRDF} that writes to an {@link RDFLink}. + */ +/* package */ class StreamRDFToUpdateRequest implements StreamRDF { + public static final int DFT_BUFFER_SIZE = 1000; + + private Consumer sink; + private int bufferSize; + private PrefixMapping prefixes; + private QuadDataAcc quadAcc = new QuadDataAcc(); + + /** + * Constructs the StreamRDFToRDFLink using default {@value #DFT_BUFFER_SIZE} quad buffer size. + * + * @param link the link to talk to. + */ + public StreamRDFToUpdateRequest(Consumer sink) { + this(sink, null); + } + + public StreamRDFToUpdateRequest(Consumer sink, PrefixMapping prefixes) { + this(sink, prefixes, DFT_BUFFER_SIZE); + } + + public StreamRDFToUpdateRequest(Consumer sink, PrefixMapping prefixes, int bufferSize) { + super(); + if (bufferSize < 1) { + throw new IllegalArgumentException("Buffer size must be at least 1"); + } + + this.sink = Objects.requireNonNull(sink); + this.prefixes = prefixes; + this.bufferSize = bufferSize; + } + + /** + * See if we should flush the buffer. + */ + private void isBufferFull() { + if ( quadAcc.getQuads().size() >= bufferSize ) { + flush(); + } + } + + /** + * Flushes the buffer to the connection. + */ + private void flush() { + if (!quadAcc.getQuads().isEmpty()) { + UpdateRequest updateRequest = new UpdateRequest(new UpdateDataInsert(quadAcc)); + if (prefixes != null) { + updateRequest.setPrefixMapping(prefixes); + } + try { + sink.accept(updateRequest); + } finally { + quadAcc.close(); + } + quadAcc = new QuadDataAcc(); + } + } + + @Override + public void start() { + // does nothing. + } + + @Override + public void triple(Triple triple) { + quadAcc.addTriple(triple); + isBufferFull(); + } + + @Override + public void quad(Quad quad) { + quadAcc.addQuad(quad); + isBufferFull(); + } + + @Override + public void base(String base) { + // do nothing + } + + @Override + public void version(String version) {} + + @Override + public void prefix(String prefix, String iri) { + if (prefixes != null) { + prefixes.setNsPrefix(prefix, iri); + } + } + + @Override + public void finish() { + flush(); + quadAcc.close(); + } + + // ----- Utils; move to StreamRDFOps? ----- + + static class StreamRDFTriplesToQuads + extends StreamRDFWrapper { + + protected final Node graphName; + + public StreamRDFTriplesToQuads(StreamRDF other, Node graphName) { + super(other); + this.graphName = Objects.requireNonNull(graphName); + } + + @Override + public void triple(Triple triple) { + Quad quad = Quad.create(graphName, triple); + get().quad(quad); + } + } + + /** Send triples of the source graph as quads in the given target graph to the sink. */ + static void sendGraphTriplesToStream(Graph sourceGraph, Node targetGraphName, StreamRDF sink) { + boolean isSinkDefaultGraph = targetGraphName == null || Quad.isDefaultGraph(targetGraphName); + StreamRDF effectiveSink = isSinkDefaultGraph ? sink : new StreamRDFTriplesToQuads(sink, targetGraphName); + StreamRDFOps.sendGraphTriplesToStream(sourceGraph, effectiveSink); + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/engine/dispatch/UpdateDispatcher.java b/jena-arq/src/main/java/org/apache/jena/sparql/engine/dispatch/UpdateDispatcher.java new file mode 100644 index 00000000000..f1a45c2d216 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/engine/dispatch/UpdateDispatcher.java @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.engine.dispatch; + +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.exec.UpdateExec; +import org.apache.jena.sparql.util.Context; +import org.apache.jena.update.UpdateRequest; + +public interface UpdateDispatcher { + UpdateExec create(String updateRequestString, DatasetGraph dsg, Binding initialBinding, Context context); + UpdateExec create(UpdateRequest updateRequest, DatasetGraph dsg, Binding initialBinding, Context context); +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/engine/dispatch/UpdateDispatcherOverRegistry.java b/jena-arq/src/main/java/org/apache/jena/sparql/engine/dispatch/UpdateDispatcherOverRegistry.java new file mode 100644 index 00000000000..978c55fb0cc --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/engine/dispatch/UpdateDispatcherOverRegistry.java @@ -0,0 +1,74 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.engine.dispatch; + +import java.util.List; + +import org.apache.jena.query.QueryException; +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.exec.UpdateExec; +import org.apache.jena.sparql.util.Context; +import org.apache.jena.update.UpdateRequest; + +/** Abstraction of a registry's single chain as a service executor */ +public class UpdateDispatcherOverRegistry + implements UpdateDispatcher +{ + protected SparqlDispatcherRegistry registry; + + /** Position in the chain */ + protected int pos; + + public UpdateDispatcherOverRegistry(SparqlDispatcherRegistry registry) { + this(registry, 0); + } + + public UpdateDispatcherOverRegistry(SparqlDispatcherRegistry registry, int pos) { + super(); + this.registry = registry; + this.pos = pos; + } + + protected ChainingUpdateDispatcher getDispatcher() { + List updateDispatchers = registry.getUpdateDispatchers(); + int n = updateDispatchers.size(); + if (pos >= n) { + throw new QueryException("No more elements in query dispatcher chain (pos=" + pos + ", chain size=" + n + ")"); + } + ChainingUpdateDispatcher dispatcher = updateDispatchers.get(pos); + return dispatcher; + } + + @Override + public UpdateExec create(UpdateRequest updateRequest, DatasetGraph dsg, Binding initialBinding, Context context) { + ChainingUpdateDispatcher dispatcher = getDispatcher(); + UpdateDispatcher next = new UpdateDispatcherOverRegistry(registry, pos + 1); + UpdateExec result = dispatcher.create(updateRequest, dsg, initialBinding, context, next); + return result; + } + + @Override + public UpdateExec create(String queryString, DatasetGraph dsg, Binding initialBinding, Context context) { + ChainingUpdateDispatcher dispatcher = getDispatcher(); + UpdateDispatcher next = new UpdateDispatcherOverRegistry(registry, pos + 1); + UpdateExec result = dispatcher.create(queryString, dsg, initialBinding, context, next); + return result; + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/engine/http/QueryExceptionHTTP.java b/jena-arq/src/main/java/org/apache/jena/sparql/engine/http/QueryExceptionHTTP.java index 4afb1e8d672..d1491cbd3fa 100644 --- a/jena-arq/src/main/java/org/apache/jena/sparql/engine/http/QueryExceptionHTTP.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/engine/http/QueryExceptionHTTP.java @@ -18,6 +18,8 @@ package org.apache.jena.sparql.engine.http; +import java.util.Optional; + import org.apache.jena.atlas.web.HttpException; import org.apache.jena.query.QueryException; import org.apache.jena.web.HttpSC; @@ -44,12 +46,13 @@ public static QueryExceptionHTTP rewrap(HttpException httpEx) { // Therefore we need to wrap appropriately int responseCode = httpEx.getStatusCode(); if (responseCode != -1) { - // Was an actual HTTP error - String responseLine = httpEx.getStatusLine() != null ? httpEx.getStatusLine() : HttpSC.getMessage(responseCode); + // Was an actual HTTP error. Enrich the message with the first line of the HTTP response. + String x = (httpEx.getStatusLine() != null) ? httpEx.getStatusLine() : HttpSC.getMessage(responseCode); + String responseLine = x + extractResponseLine(": ", httpEx.getResponse()); return new QueryExceptionHTTP(responseCode, responseLine, httpEx); } else if (httpEx.getMessage() != null) { // Some non-HTTP error with a valid message e.g. Socket Communications failed, IO error - return new QueryExceptionHTTP(responseCode, "Unexpected error making the query: " + httpEx.getMessage(), httpEx); + return new QueryExceptionHTTP(responseCode, "Unexpected error making the query: " + httpEx.getMessage() + ". Cause: " + httpEx.getCause(), httpEx); } else if (httpEx.getCause() != null) { // Some other error with a cause e.g. Socket Communications failed, IO error return new QueryExceptionHTTP(responseCode, "Unexpected error making the query, see cause for further details", httpEx); @@ -59,6 +62,16 @@ public static QueryExceptionHTTP rewrap(HttpException httpEx) { } } + /** Extract the first line of the given string. If that line is non-empty then prepend the given prefix. */ + private static String extractResponseLine(String prefixIfPresent, String str) { + String s = str == null ? "" : str; + String lines[] = s.split("\\r?\\n"); + String firstLine = lines[0].trim(); + + String result = firstLine.isEmpty() ? "" : prefixIfPresent + firstLine; + return result; + } + /** * Constructor for QueryExceptionHTTP. * @param responseCode diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/ChainingQueryDispatcherMain.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/ChainingQueryDispatcherMain.java new file mode 100644 index 00000000000..ffc747ade18 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/ChainingQueryDispatcherMain.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec; + +import org.apache.jena.atlas.logging.Log; +import org.apache.jena.query.Query; +import org.apache.jena.query.QueryFactory; +import org.apache.jena.query.Syntax; +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.engine.QueryEngineFactory; +import org.apache.jena.sparql.engine.QueryEngineRegistry; +import org.apache.jena.sparql.engine.Timeouts; +import org.apache.jena.sparql.engine.Timeouts.Timeout; +import org.apache.jena.sparql.engine.Timeouts.TimeoutBuilderImpl; +import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.engine.dispatch.ChainingQueryDispatcher; +import org.apache.jena.sparql.engine.dispatch.QueryDispatcher; +import org.apache.jena.sparql.util.Context; + +public class ChainingQueryDispatcherMain + implements ChainingQueryDispatcher +{ + @Override + public QueryExec create(String queryString, Syntax syntax, DatasetGraph dsg, Binding initialBinding, Context context, QueryDispatcher chain) { + Query query = QueryFactory.create(queryString, syntax); + return create(query, dsg, initialBinding, context, chain); + } + + @Override + public QueryExec create(Query queryActual, DatasetGraph dataset, Binding initialBinding, Context cxt, QueryDispatcher chain) { + QueryEngineFactory qeFactory = QueryEngineRegistry.findFactory(queryActual, dataset, cxt); + if ( qeFactory == null ) { + Log.warn(QueryExecDatasetBuilder.class, "Failed to find a QueryEngineFactory"); + return null; + } + + TimeoutBuilderImpl timeoutBuilder = new TimeoutBuilderImpl(); + Timeouts.applyDefaultQueryTimeoutFromContext(timeoutBuilder, cxt); + Timeout timeout = timeoutBuilder.build(); + + String queryStringActual = null; + QueryExec qExec = new QueryExecDataset(queryActual, queryStringActual, dataset, cxt, qeFactory, + timeout, initialBinding); + return qExec; + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/ChainingUpdateDispatcherMain.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/ChainingUpdateDispatcherMain.java new file mode 100644 index 00000000000..38e219c23ac --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/ChainingUpdateDispatcherMain.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec; + +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.engine.Timeouts; +import org.apache.jena.sparql.engine.Timeouts.Timeout; +import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.engine.dispatch.ChainingUpdateDispatcher; +import org.apache.jena.sparql.engine.dispatch.UpdateDispatcher; +import org.apache.jena.sparql.modify.UpdateEngineFactory; +import org.apache.jena.sparql.modify.UpdateEngineRegistry; +import org.apache.jena.sparql.util.Context; +import org.apache.jena.update.UpdateException; +import org.apache.jena.update.UpdateFactory; +import org.apache.jena.update.UpdateRequest; + +public class ChainingUpdateDispatcherMain + implements ChainingUpdateDispatcher +{ + @Override + public UpdateExec create(String updateRequestString, DatasetGraph dsg, Binding initialBinding, Context context, UpdateDispatcher chain) { + UpdateRequest updateRequest = UpdateFactory.create(updateRequestString); + return create(updateRequest, dsg, initialBinding, context, chain); + } + + @Override + public UpdateExec create(UpdateRequest updateRequest, DatasetGraph dataset, Binding initialBinding, Context cxt, UpdateDispatcher chain) { + UpdateRequest actualUpdate = updateRequest; + + UpdateEngineFactory f = UpdateEngineRegistry.get().find(dataset, cxt); + if ( f == null ) + throw new UpdateException("Failed to find an UpdateEngine"); + + Timeout timeout = Timeouts.extractUpdateTimeout(cxt); + UpdateExec uExec = new UpdateExecDataset(actualUpdate, dataset, initialBinding, cxt, f, timeout); + return uExec; + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecBuilderWrapper.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecBuilderWrapper.java new file mode 100644 index 00000000000..657d0669ceb --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecBuilderWrapper.java @@ -0,0 +1,126 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec; + +import java.util.concurrent.TimeUnit; + +import org.apache.jena.graph.Node; +import org.apache.jena.query.ARQ; +import org.apache.jena.query.Query; +import org.apache.jena.query.Syntax; +import org.apache.jena.sparql.core.Var; +import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.util.Context; +import org.apache.jena.sparql.util.Symbol; + +public class QueryExecBuilderWrapper + extends QueryExecModWrapper + implements QueryExecBuilder +{ + public QueryExecBuilderWrapper(T delegate) { + super(delegate); + } + + @Override + public X query(Query query) { + getDelegate().query(query); + return self(); + } + + /** Set the query. */ + @Override + public X query(String queryString) { + getDelegate().query(queryString); + return self(); + } + + @Override + public QueryExecBuilder parseCheck(boolean parseCheck) { + getDelegate().parseCheck(parseCheck); + return self(); + } + + /** Set the query. */ + @Override + public X query(String queryString, Syntax syntax) { + getDelegate().query(queryString, syntax); + return self(); + } + + /** Set a context entry. */ + @Override + public X set(Symbol symbol, Object value) { + getDelegate().set(symbol, value); + return self(); + } + + /** Set a context entry. */ + @Override + public X set(Symbol symbol, boolean value) { + getDelegate().set(symbol, value); + return self(); + } + + /** + * Set the context. If not set, publics to the system context + * ({@link ARQ#getContext}). + */ + @Override + public X context(Context context) { + getDelegate().context(context); + return self(); + } + + /** Provide a set of (Var, Node) for substitution in the query when QueryExec is built. */ + @Override + public X substitution(Binding binding) { + getDelegate().substitution(binding); + return self(); + } + + /** Provide a (Var, Node) for substitution in the query when QueryExec is built. */ + @Override + public X substitution(Var var, Node value) { + getDelegate().substitution(var, value); + return self(); + } + + /** Provide a (var name, Node) for substitution in the query when QueryExec is built. */ + @Override + public X substitution(String var, Node value) { + getDelegate().substitution(var, value); + return self(); + } + + /** Set the overall query execution timeout. */ + @Override + public X timeout(long value, TimeUnit timeUnit) { + getDelegate().timeout(value, timeUnit); + return self(); + } + + /** + * Build the {@link QueryExec}. Further changes to he builder do not affect this + * {@link QueryExec}. + */ + @Override + public QueryExec build() { + return getDelegate().build(); + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecDataset.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecDataset.java index 32fdd214076..ac1dd13ec02 100644 --- a/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecDataset.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecDataset.java @@ -18,10 +18,15 @@ package org.apache.jena.sparql.exec; -import java.util.*; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.NoSuchElementException; +import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; + import org.apache.jena.atlas.iterator.Iter; import org.apache.jena.atlas.json.JsonArray; import org.apache.jena.atlas.json.JsonObject; @@ -48,10 +53,10 @@ import org.apache.jena.sparql.engine.Plan; import org.apache.jena.sparql.engine.QueryEngineFactory; import org.apache.jena.sparql.engine.QueryIterator; +import org.apache.jena.sparql.engine.Timeouts.Timeout; import org.apache.jena.sparql.engine.binding.Binding; import org.apache.jena.sparql.engine.binding.BindingFactory; import org.apache.jena.sparql.engine.iterator.QueryIteratorWrapper; -import org.apache.jena.sparql.engine.Timeouts.Timeout; import org.apache.jena.sparql.graph.GraphOps; import org.apache.jena.sparql.modify.TemplateLib; import org.apache.jena.sparql.syntax.ElementGroup; diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecDatasetBuilder.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecDatasetBuilder.java index 4c3d84b6591..568f60b57b7 100644 --- a/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecDatasetBuilder.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecDatasetBuilder.java @@ -20,23 +20,25 @@ import java.util.HashMap; import java.util.Map; -import java.util.Objects; +import java.util.Optional; import java.util.concurrent.TimeUnit; -import org.apache.jena.atlas.logging.Log; import org.apache.jena.graph.Graph; import org.apache.jena.graph.Node; -import org.apache.jena.query.*; +import org.apache.jena.query.ARQ; +import org.apache.jena.query.DatasetFactory; +import org.apache.jena.query.Query; +import org.apache.jena.query.QueryFactory; +import org.apache.jena.query.Syntax; import org.apache.jena.sparql.ARQConstants; import org.apache.jena.sparql.core.DatasetGraph; import org.apache.jena.sparql.core.DatasetGraphFactory; import org.apache.jena.sparql.core.Var; -import org.apache.jena.sparql.engine.QueryEngineFactory; -import org.apache.jena.sparql.engine.QueryEngineRegistry; -import org.apache.jena.sparql.engine.binding.Binding; import org.apache.jena.sparql.engine.Timeouts; import org.apache.jena.sparql.engine.Timeouts.Timeout; import org.apache.jena.sparql.engine.Timeouts.TimeoutBuilderImpl; +import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.engine.dispatch.SparqlDispatcherRegistry; import org.apache.jena.sparql.syntax.syntaxtransform.QueryTransformOps; import org.apache.jena.sparql.util.Context; import org.apache.jena.sparql.util.ContextAccumulator; @@ -56,11 +58,11 @@ public static QueryExecDatasetBuilder create() { return builder; } - private static final long UNSET = -1; - private DatasetGraph dataset = null; private Query query = null; private String queryString = null; + private Syntax syntax = null; + private Optional parseCheck = Optional.empty(); private ContextAccumulator contextAcc = ContextAccumulator.newBuilder(()->ARQ.getContext(), ()->Context.fromDataset(dataset)); @@ -80,6 +82,8 @@ private QueryExecDatasetBuilder() { } @Override public QueryExecDatasetBuilder query(Query query) { this.query = query; + this.queryString = null; + this.syntax = null; return this; } @@ -89,19 +93,24 @@ public QueryExecDatasetBuilder query(String queryString) { return this; } - /** The parse-check flag has no effect for query execs over datasets. */ @Override - public QueryExecDatasetBuilder parseCheck(boolean parseCheck) { + public QueryExecDatasetBuilder query(String queryString, Syntax syntax) { + this.query = effectiveParseCheck() ? QueryFactory.create(queryString, syntax) : null; + this.queryString = queryString; + this.syntax = syntax; return this; } @Override - public QueryExecDatasetBuilder query(String queryString, Syntax syntax) { - this.queryString = queryString; - this.query = QueryFactory.create(queryString, syntax); + public QueryExecDatasetBuilder parseCheck(boolean parseCheck) { + this.parseCheck = Optional.of(parseCheck); return this; } + protected boolean effectiveParseCheck() { + return SparqlDispatcherRegistry.effectiveParseCheck(parseCheck, contextAcc); + } + public QueryExecDatasetBuilder dataset(DatasetGraph dsg) { this.dataset = dsg; return this; @@ -187,38 +196,50 @@ public QueryExecDatasetBuilder overallTimeout(long timeout, TimeUnit timeUnit) { @Override public QueryExec build() { - Objects.requireNonNull(query, "No query for QueryExec"); + if (query == null && queryString == null) { + throw new NullPointerException("No query for QueryExec"); + } + // Queries can have FROM/FROM NAMED or VALUES to get data. - //Objects.requireNonNull(dataset, "No dataset for QueryExec"); - query.ensureResultVars(); - Context cxt = getContext(); + // Objects.requireNonNull(dataset, "No dataset for QueryExec"); - QueryEngineFactory qeFactory = QueryEngineRegistry.findFactory(query, dataset, cxt); - if ( qeFactory == null ) { - Log.warn(QueryExecDatasetBuilder.class, "Failed to find a QueryEngineFactory"); - return null; + if (query != null) { + query.ensureResultVars(); } + Context cxt = getContext(); + // Initial bindings / parameterized query Query queryActual = query; String queryStringActual = queryString; + // Place the effective timeout into the context + Timeouts.applyDefaultQueryTimeoutFromContext(timeoutBuilder, cxt); + Timeout timeout = timeoutBuilder.build(); + Timeouts.setQueryTimeout(cxt, timeout); + if ( substitutionMap != null && ! substitutionMap.isEmpty() ) { - queryActual = QueryTransformOps.replaceVars(query, substitutionMap); + if (queryActual == null) { + queryActual = QueryFactory.create(queryString, syntax); + } + queryActual = QueryTransformOps.replaceVars(queryActual, substitutionMap); queryStringActual = null; } - Timeouts.applyDefaultQueryTimeoutFromContext(this.timeoutBuilder, cxt); - if ( dataset != null ) cxt.set(ARQConstants.sysCurrentDataset, DatasetFactory.wrap(dataset)); if ( queryActual != null ) cxt.set(ARQConstants.sysCurrentQuery, queryActual); - Timeout timeout = timeoutBuilder.build(); + QueryExec qExec; + if (queryActual != null) { + // Pass on query object. + qExec = SparqlDispatcherRegistry.exec(queryActual, dataset, initialBinding, cxt); + } else { + // Pass on query string. + qExec = SparqlDispatcherRegistry.exec(queryStringActual, syntax, dataset, initialBinding, cxt); + } - QueryExec qExec = new QueryExecDataset(queryActual, queryStringActual, dataset, cxt, qeFactory, - timeout, initialBinding); return qExec; } } diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecModWrapper.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecModWrapper.java new file mode 100644 index 00000000000..acfc2a127c7 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecModWrapper.java @@ -0,0 +1,77 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec; + +import java.util.concurrent.TimeUnit; + +import org.apache.jena.sparql.util.Context; + +public class QueryExecModWrapper + implements QueryExecMod +{ + protected T delegate; + + public QueryExecModWrapper(T delegate) { + super(); + this.delegate = delegate; + } + + protected T getDelegate() { + return delegate; + } + + @SuppressWarnings("unchecked") + public X self() { + return (X)this; + } + + @Override + public X timeout(long timeout) { + getDelegate().timeout(timeout, TimeUnit.MILLISECONDS); + return self(); + } + + @Override + public X timeout(long timeout, TimeUnit timeoutUnits) { + getDelegate().timeout(timeout, TimeUnit.MILLISECONDS); + return self(); + } + + @Override + public X initialTimeout(long timeout, TimeUnit timeUnit) { + getDelegate().initialTimeout(timeout, timeUnit); + return self(); + } + + @Override + public X overallTimeout(long timeout, TimeUnit timeUnit) { + getDelegate().overallTimeout(timeout, timeUnit); + return self(); + } + + @Override + public Context getContext() { + return getDelegate().getContext(); + } + + @Override + public QueryExec build() { + return getDelegate().build(); + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecWrapper.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecWrapper.java new file mode 100644 index 00000000000..8a10ffd4dda --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecWrapper.java @@ -0,0 +1,151 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec; + +import java.util.Iterator; +import java.util.function.Supplier; + +import org.apache.jena.atlas.json.JsonArray; +import org.apache.jena.atlas.json.JsonObject; +import org.apache.jena.graph.Graph; +import org.apache.jena.graph.Triple; +import org.apache.jena.query.Query; +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.core.Quad; +import org.apache.jena.sparql.util.Context; + +public abstract class QueryExecWrapper + implements QueryExec +{ + private QueryExec delegate; + + public QueryExecWrapper(QueryExec delegate) { + super(); + this.delegate = delegate; + } + + protected QueryExec getDelegate() { + return delegate; + } + + @Override + public Context getContext() { + return getDelegate().getContext(); + } + + @Override + public Query getQuery() { + return getDelegate().getQuery(); + } + + @Override + public String getQueryString() { + return getDelegate().getQueryString(); + } + + @Override + public void close() { + getDelegate().close(); + } + + @Override + public boolean isClosed() { + return getDelegate().isClosed(); + } + + @Override + public void abort() { + getDelegate().abort(); + } + + @Override + public RowSet select() { + return exec(() -> getDelegate().select()); + } + + @Override + public Graph construct() { + return exec(() -> getDelegate().construct()); + } + + @Override + public Graph construct(Graph graph) { + return exec(() -> getDelegate().construct(graph)); + } + + @Override + public Graph describe() { + return exec(() -> getDelegate().describe()); + } + + @Override + public Graph describe(Graph graph) { + return exec(() -> getDelegate().describe(graph)); + } + + @Override + public boolean ask() { + return exec(() -> getDelegate().ask()); + } + + @Override + public Iterator constructTriples() { + return exec(() -> getDelegate().constructTriples()); + } + + @Override + public Iterator describeTriples() { + return exec(() -> getDelegate().describeTriples()); + } + + @Override + public Iterator constructQuads() { + return exec(() -> getDelegate().constructQuads()); + } + + @Override + public DatasetGraph constructDataset() { + return exec(() -> getDelegate().constructDataset()); + } + + @Override + public DatasetGraph constructDataset(DatasetGraph dataset) { + return exec(() -> getDelegate().constructDataset(dataset)); + } + + @Override + public JsonArray execJson() { + return exec(() -> getDelegate().execJson()); + } + + @Override + public Iterator execJsonItems() { + return exec(() -> getDelegate().execJsonItems()); + } + + @Override + public DatasetGraph getDataset() { + return getDelegate().getDataset(); + } + + protected T exec(Supplier supplier) { + T result = supplier.get(); + return result; + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExec.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExec.java index 1ef542bce01..4dac951a4f9 100644 --- a/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExec.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExec.java @@ -48,8 +48,4 @@ public static UpdateExecBuilder service(String serviceURL) { public static UpdateExecDatasetBuilder newBuilder() { return UpdateExecDatasetBuilder.create(); } - - /** Execute */ - @Override - public void execute(); } diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecAdapter.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecAdapter.java index b97c2af0420..45968427517 100644 --- a/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecAdapter.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecAdapter.java @@ -20,6 +20,7 @@ import org.apache.jena.sparql.util.Context; import org.apache.jena.update.UpdateExecution; +import org.apache.jena.update.UpdateRequest; public class UpdateExecAdapter implements UpdateExec { @@ -38,6 +39,16 @@ protected UpdateExecAdapter(UpdateExecution updateProc) { this.updateProc = updateProc; } + @Override + public UpdateRequest getUpdateRequest() { + return updateProc.getUpdateRequest(); + } + + @Override + public String getUpdateRequestString() { + return updateProc.getUpdateRequestString(); + } + @Override public void execute() { updateProc.execute(); } diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecBuilderWrapper.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecBuilderWrapper.java new file mode 100644 index 00000000000..a1fa27a4ec0 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecBuilderWrapper.java @@ -0,0 +1,126 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec; + +import java.util.concurrent.TimeUnit; + +import org.apache.jena.graph.Node; +import org.apache.jena.sparql.core.Var; +import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.util.Context; +import org.apache.jena.sparql.util.Symbol; +import org.apache.jena.update.Update; +import org.apache.jena.update.UpdateRequest; + +public class UpdateExecBuilderWrapper + implements UpdateExecBuilder +{ + private T delegate; + + public UpdateExecBuilderWrapper(T delegate) { + super(); + this.delegate = delegate; + } + + protected T getDelegate() { + return delegate; + } + + @SuppressWarnings("unchecked") + protected X self() { + return (X)this; + } + + @Override + public X update(UpdateRequest request) { + getDelegate().update(request); + return self(); + } + + @Override + public X update(Update update) { + getDelegate().update(update); + return self(); + } + + @Override + public X update(String updateString) { + getDelegate().update(updateString); + return self(); + } + + @Override + public X parseCheck(boolean parseCheck) { + getDelegate().parseCheck(parseCheck); + return self(); + } + + @Override + public X set(Symbol symbol, Object value) { + getDelegate().set(symbol, value); + return self(); + } + + @Override + public X set(Symbol symbol, boolean value) { + getDelegate().set(symbol, value); + return self(); + } + + @Override + public X context(Context context) { + getDelegate().context(context); + return self(); + } + + @Override + public X substitution(Binding binding) { + getDelegate().substitution(binding); + return self(); + } + + @Override + public X substitution(Var var, Node value) { + getDelegate().substitution(var, value); + return self(); + } + + @Override + public X substitution(String var, Node value) { + getDelegate().substitution(var, value); + return self(); + } + + @Override + public X timeout(long value, TimeUnit timeUnit) { + getDelegate().timeout(value, timeUnit); + return self(); + } + + @Override + public UpdateExec build() { + UpdateExec result = getDelegate().build(); + return result; + } + + @Override + public void execute() { + getDelegate().execute(); + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecDatasetBuilder.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecDatasetBuilder.java index e1601177833..4f796fc004e 100644 --- a/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecDatasetBuilder.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecDatasetBuilder.java @@ -21,23 +21,24 @@ import java.util.HashMap; import java.util.Map; import java.util.Objects; +import java.util.Optional; import java.util.concurrent.TimeUnit; import org.apache.jena.graph.Node; +import org.apache.jena.http.sys.UpdateEltAcc; import org.apache.jena.query.ARQ; import org.apache.jena.sparql.core.DatasetGraph; import org.apache.jena.sparql.core.Var; -import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.engine.Timeouts; import org.apache.jena.sparql.engine.Timeouts.Timeout; import org.apache.jena.sparql.engine.Timeouts.TimeoutBuilderImpl; -import org.apache.jena.sparql.modify.UpdateEngineFactory; -import org.apache.jena.sparql.modify.UpdateEngineRegistry; +import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.engine.dispatch.SparqlDispatcherRegistry; import org.apache.jena.sparql.syntax.syntaxtransform.UpdateTransformOps; import org.apache.jena.sparql.util.Context; import org.apache.jena.sparql.util.ContextAccumulator; import org.apache.jena.sparql.util.Symbol; import org.apache.jena.update.Update; -import org.apache.jena.update.UpdateException; import org.apache.jena.update.UpdateFactory; import org.apache.jena.update.UpdateRequest; @@ -55,8 +56,8 @@ public class UpdateExecDatasetBuilder implements UpdateExecBuilder { private TimeoutBuilderImpl timeoutBuilder = new TimeoutBuilderImpl(); - private UpdateRequest update = null; - private UpdateRequest updateRequest = new UpdateRequest(); + private Optional parseCheck = Optional.empty(); + private UpdateEltAcc updateEltAcc = new UpdateEltAcc(); private UpdateExecDatasetBuilder() {} @@ -79,17 +80,25 @@ public UpdateExecDatasetBuilder update(Update update) { /** Parse and update operations to the {@link UpdateRequest} being built. */ @Override public UpdateExecDatasetBuilder update(String updateRequestString) { - UpdateRequest more = UpdateFactory.create(updateRequestString); - add(more); + if (effectiveParseCheck()) { + UpdateRequest more = UpdateFactory.create(updateRequestString); + add(more); + } else { + updateEltAcc.add(updateRequestString); + } return this; } - /** Hint has no effect on update execs over datasets. */ @Override public UpdateExecBuilder parseCheck(boolean parseCheck) { + this.parseCheck = Optional.of(parseCheck); return this; } + protected boolean effectiveParseCheck() { + return SparqlDispatcherRegistry.effectiveParseCheck(parseCheck, contextAcc); + } + public UpdateExecDatasetBuilder dataset(DatasetGraph dsg) { this.dataset = dsg; return this; @@ -157,22 +166,29 @@ public UpdateExecDatasetBuilder initialBinding(Binding initialBinding) { @Override public UpdateExec build() { - Objects.requireNonNull(dataset, "No dataset for update"); - Objects.requireNonNull(updateRequest, "No update request"); - - UpdateRequest actualUpdate = updateRequest; + UpdateRequest actualUpdate = null; - if ( substitutionMap != null && ! substitutionMap.isEmpty() ) + if ( substitutionMap != null && ! substitutionMap.isEmpty() ) { + actualUpdate = updateEltAcc.buildUpdateRequest(); actualUpdate = UpdateTransformOps.transform(actualUpdate, substitutionMap); + } Context cxt = getContext(); - UpdateEngineFactory f = UpdateEngineRegistry.get().find(dataset, cxt); - if ( f == null ) - throw new UpdateException("Failed to find an UpdateEngine"); + Timeouts.applyDefaultUpdateTimeoutFromContext(timeoutBuilder, cxt); Timeout timeout = timeoutBuilder.build(); - - UpdateExec uExec = new UpdateExecDataset(actualUpdate, dataset, initialBinding, cxt, f, timeout); + Timeouts.setUpdateTimeout(cxt, timeout); + + UpdateExec uExec; + if (updateEltAcc.isParsed()) { + if (actualUpdate == null) { + actualUpdate = updateEltAcc.buildUpdateRequest(); + } + uExec = SparqlDispatcherRegistry.exec(actualUpdate, dataset, initialBinding, cxt); + } else { + String actualString = updateEltAcc.buildString(); + uExec = SparqlDispatcherRegistry.exec(actualString, dataset, initialBinding, cxt); + } return uExec; } @@ -189,10 +205,10 @@ public void execute(DatasetGraph dsg) { } private void add(UpdateRequest request) { - request.getOperations().forEach(this::add); + updateEltAcc.add(request); } private void add(Update update) { - this.updateRequest.add(update); + updateEltAcc.add(update); } } diff --git a/jena-geosparql/src/main/java/org/apache/jena/geosparql/spatial/task/TaskState.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecWrapper.java similarity index 62% rename from jena-geosparql/src/main/java/org/apache/jena/geosparql/spatial/task/TaskState.java rename to jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecWrapper.java index add2ab9e086..274447df082 100644 --- a/jena-geosparql/src/main/java/org/apache/jena/geosparql/spatial/task/TaskState.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecWrapper.java @@ -16,13 +16,15 @@ * limitations under the License. */ -package org.apache.jena.geosparql.spatial.task; +package org.apache.jena.sparql.exec; -public enum TaskState { - CREATED, // Task object created. - STARTING, // Task execution requested, but core task.run() method not yet invoked. - RUNNING, // Core task.run() method invoked. - ABORTING, // Abort called while not in TERMINATING / TERMINATED state. - TERMINATING, // Core task.run() method exited. - TERMINATED, // Task cleanup complete. Triggering this event may require a call to close(). +import org.apache.jena.sparql.exec.tracker.UpdateProcessorWrapper; + +public class UpdateExecWrapper + extends UpdateProcessorWrapper + implements UpdateExec +{ + public UpdateExecWrapper(T delegate) { + super(delegate); + } } diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/QueryExecHTTP.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/QueryExecHTTP.java index 9e4582ad466..8152e0c86f8 100644 --- a/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/QueryExecHTTP.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/QueryExecHTTP.java @@ -18,7 +18,12 @@ package org.apache.jena.sparql.exec.http; -import static org.apache.jena.http.HttpLib.*; +import static org.apache.jena.http.HttpLib.acceptHeader; +import static org.apache.jena.http.HttpLib.contentTypeHeader; +import static org.apache.jena.http.HttpLib.dft; +import static org.apache.jena.http.HttpLib.finishInputStream; +import static org.apache.jena.http.HttpLib.requestURL; +import static org.apache.jena.http.HttpLib.responseHeader; import java.io.ByteArrayInputStream; import java.io.IOException; @@ -52,8 +57,21 @@ import org.apache.jena.http.AsyncHttpRDF; import org.apache.jena.http.HttpEnv; import org.apache.jena.http.HttpLib; -import org.apache.jena.query.*; -import org.apache.jena.riot.*; +import org.apache.jena.query.ARQ; +import org.apache.jena.query.Query; +import org.apache.jena.query.QueryCancelledException; +import org.apache.jena.query.QueryException; +import org.apache.jena.query.QueryExecException; +import org.apache.jena.query.QueryFactory; +import org.apache.jena.query.QueryParseException; +import org.apache.jena.query.QueryType; +import org.apache.jena.query.ResultSet; +import org.apache.jena.query.Syntax; +import org.apache.jena.riot.Lang; +import org.apache.jena.riot.RDFDataMgr; +import org.apache.jena.riot.RDFLanguages; +import org.apache.jena.riot.ResultSetMgr; +import org.apache.jena.riot.WebContent; import org.apache.jena.riot.resultset.ResultSetLang; import org.apache.jena.riot.resultset.ResultSetReaderRegistry; import org.apache.jena.riot.web.HttpNames; diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/BasicTaskExec.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/BasicTaskExec.java new file mode 100644 index 00000000000..c7aecc57db7 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/BasicTaskExec.java @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec.tracker; + +import org.apache.jena.sparql.engine.iterator.Abortable; + +/** + * Interface for tasks that can execute once - i.e. not periodic ones. + * Combines task information with cancellation. + */ +public interface BasicTaskExec + extends BasicTaskInfo, Abortable // XXX Abortable is in iterator package - not ideal. +{ +} diff --git a/jena-geosparql/src/main/java/org/apache/jena/geosparql/spatial/task/BasicTask.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/BasicTaskInfo.java similarity index 56% rename from jena-geosparql/src/main/java/org/apache/jena/geosparql/spatial/task/BasicTask.java rename to jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/BasicTaskInfo.java index 496f9e153cd..8db4ece3eac 100644 --- a/jena-geosparql/src/main/java/org/apache/jena/geosparql/spatial/task/BasicTask.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/BasicTaskInfo.java @@ -16,39 +16,44 @@ * limitations under the License. */ -package org.apache.jena.geosparql.spatial.task; - -import org.apache.jena.sparql.engine.iterator.Abortable; - -/** An outside view of a running task */ -public interface BasicTask extends Abortable { - - public interface TaskListener { - void onStateChange(T task); - } +package org.apache.jena.sparql.exec.tracker; +public interface BasicTaskInfo { + /** The state of the task. */ TaskState getTaskState(); - /** A label for the task. */ - String getLabel(); - - @Override - void abort(); - + /** Time stamp for when the task object was created. */ long getCreationTime(); + + /** Time stamp for when the task was started. Returns -1 if was not started yet.*/ long getStartTime(); - long getEndTime(); + + /** + * Time stamp for when the task completed. Returns -1 if it has not finished yet. + */ + long getFinishTime(); + + /** Time stamp for when the task was cancelled. Returns -1 if not aborted. */ long getAbortTime(); - /** If non null, the throwable that is the cause for an exceptional termination of the task. */ - Throwable getThrowable(); + /** + * Return a description suitable for presentation to users. + * This might be a less technical description than what is returned by toString(). + */ + String getLabel(); - /** Get the last status message of the task. May be null. */ String getStatusMessage(); + /** + * If this method returns a non-null result then the task is considered to be failing or to have failed. + * A non-null result does not imply that the task has already reached TERMINATED state. + */ + Throwable getThrowable(); + /** Whether abort has been called. */ - // XXX this might be different from whether the task actually transitioned into aborting state. - boolean isAborting(); + default boolean isAborting() { + return getAbortTime() >= 0; + } default boolean isTerminated() { TaskState state = getTaskState(); diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/ChainingQueryDispatcherExecTracker.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/ChainingQueryDispatcherExecTracker.java new file mode 100644 index 00000000000..1cd46f03994 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/ChainingQueryDispatcherExecTracker.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec.tracker; + +import org.apache.jena.query.Query; +import org.apache.jena.query.Syntax; +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.engine.dispatch.ChainingQueryDispatcher; +import org.apache.jena.sparql.engine.dispatch.QueryDispatcher; +import org.apache.jena.sparql.exec.QueryExec; +import org.apache.jena.sparql.util.Context; + +public class ChainingQueryDispatcherExecTracker + implements ChainingQueryDispatcher +{ + @Override + public QueryExec create(Query query, DatasetGraph dsg, Binding initialBinding, Context context, + QueryDispatcher chain) { + QueryExec delegate = chain.create(query, dsg, initialBinding, context); + QueryExec result = TaskEventBroker.track(context, delegate); + + // Remove event broker from dispatcher context as to avoid tracking possible nested executions. + TaskEventBroker.remove(context); + return result; + } + + @Override + public QueryExec create(String queryString, Syntax syntax, DatasetGraph dsg, Binding initialBinding, + Context context, QueryDispatcher chain) { + QueryExec delegate = chain.create(queryString, syntax, dsg, initialBinding, context); + QueryExec result = TaskEventBroker.track(context, delegate); + + // Remove event broker from dispatcher context as to avoid tracking possible nested executions. + TaskEventBroker.remove(context); + return result; + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/ChainingUpdateDispatcherExecTracker.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/ChainingUpdateDispatcherExecTracker.java new file mode 100644 index 00000000000..ce3e48b269f --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/ChainingUpdateDispatcherExecTracker.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec.tracker; + +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.engine.dispatch.ChainingUpdateDispatcher; +import org.apache.jena.sparql.engine.dispatch.UpdateDispatcher; +import org.apache.jena.sparql.exec.UpdateExec; +import org.apache.jena.sparql.util.Context; +import org.apache.jena.update.UpdateRequest; + +public class ChainingUpdateDispatcherExecTracker + implements ChainingUpdateDispatcher +{ + @Override + public UpdateExec create(String updateRequestString, DatasetGraph dsg, Binding initialBinding, Context context, + UpdateDispatcher chain) { + UpdateExec delegate = chain.create(updateRequestString, dsg, initialBinding, context); + UpdateExec result = TaskEventBroker.track(context, delegate); + + // Remove event broker from dispatcher context as to avoid tracking possible nested executions. + TaskEventBroker.remove(context); + return result; + } + + @Override + public UpdateExec create(UpdateRequest updateRequest, DatasetGraph dsg, Binding initialBinding, Context context, + UpdateDispatcher chain) { + UpdateExec delegate = chain.create(updateRequest, dsg, initialBinding, context); + UpdateExec result = TaskEventBroker.track(context, delegate); + + // Remove event broker from dispatcher context as to avoid tracking possible nested executions. + TaskEventBroker.remove(context); + return result; + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/IteratorTracked.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/IteratorTracked.java new file mode 100644 index 00000000000..6fb9ab6bcf3 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/IteratorTracked.java @@ -0,0 +1,89 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec.tracker; + +import java.util.Iterator; +import java.util.Objects; +import java.util.function.BooleanSupplier; +import java.util.function.Consumer; +import java.util.function.Supplier; + +import org.apache.jena.atlas.iterator.IteratorWrapper; + +/** + * Iterator wrapper that forwards an encountered exception + * to a configured destination. + */ +public class IteratorTracked + extends IteratorWrapper +{ + protected ThrowableTracker tracker; + + public IteratorTracked(Iterator iterator, ThrowableTracker tracker) { + super(iterator); + this.tracker = Objects.requireNonNull(tracker); + } + + @Override + public boolean hasNext() { + return trackBoolean(tracker, get()::hasNext); + } + + @Override + public T next() { + return track(tracker, get()::next); + } + + @Override + public void forEachRemaining(Consumer action) { + trackForEachRemaining(tracker, get(), action); + } + + public static boolean trackBoolean(ThrowableTracker tracker, BooleanSupplier action) { + try { + boolean result = action.getAsBoolean(); + return result; + } catch (Throwable t) { + tracker.report(t); + t.addSuppressed(new RuntimeException("Error during hasNext.")); + throw t; + } + } + + public static T track(ThrowableTracker tracker, Supplier action) { + try { + T result = action.get(); + return result; + } catch (Throwable t) { + tracker.report(t); + t.addSuppressed(new RuntimeException("Error during hasNext.")); + throw t; + } + } + + public static void trackForEachRemaining(ThrowableTracker tracker, Iterator it, Consumer action) { + try { + it.forEachRemaining(action); + } catch (Throwable t) { + tracker.report(t); + t.addSuppressed(new RuntimeException("Error during forEachRemaining.")); + throw t; + } + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/QueryExecTask.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/QueryExecTask.java new file mode 100644 index 00000000000..12c6fd6a123 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/QueryExecTask.java @@ -0,0 +1,204 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec.tracker; + +import java.util.Objects; + +import org.apache.jena.query.Query; +import org.apache.jena.query.QueryType; +import org.apache.jena.sparql.exec.QueryExec; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Wraps a QueryExec and tracks its execution and any error in its machinery. + * This includes obtained RowSets and Iterators. + */ +public class QueryExecTask + extends QueryExecTaskBase // + implements BasicTaskExec +{ + private static final Logger logger = LoggerFactory.getLogger(QueryExecTask.class); + + protected TaskListener listener; + protected long creationTime; + protected long startTime = -1; + protected long abortTime = -1; + protected long finishTime = -1; + protected TaskState currentState = TaskState.CREATED; + + /** + * Note: The constructor does not notify the listener with the creation event. + * This has to be done externally, such as using {@link #create(QueryExec, TaskListener)}. + */ + protected QueryExecTask(QueryExec delegate, long creationTime, TaskListener listener) { + super(delegate, new ThrowableTrackerFirst()); + this.listener = listener; + this.creationTime = creationTime; + } + + /** Wrap a QueryExec and notify the listener with the creation event using the current time. */ + public static QueryExecTask create(QueryExec delegate, TaskListener listener) { + long creationTime = System.currentTimeMillis(); + return create(delegate, creationTime, listener); + } + + /** Wrap a QueryExec and notify the listener with the creation event using the given time. */ + public static QueryExecTask create(QueryExec delegate, long creationTime, TaskListener listener) { + Objects.requireNonNull(delegate); + Objects.requireNonNull(listener); + QueryExecTask result = new QueryExecTask(delegate, creationTime, listener); + listener.onStateChange(result); + return result; + } + + @Override + public TaskState getTaskState() { + return currentState; + } + + @Override + public long getCreationTime() { + return creationTime; + } + + @Override + public long getAbortTime() { + return abortTime; + } + + @Override + public long getStartTime() { + return startTime; + } + + @Override + public long getFinishTime() { + return finishTime; + } + + @Override + public Throwable getThrowable() { + return getThrowableTracker().getFirstThrowable(); + } + + @Override + public String getLabel() { + Query query = getQuery(); + String result; + if (query == null) { + result = getQueryString(); + if (result == null) { + result = "Unknown query"; + } + } else { + result = query.toString(); + } + return result; + } + + @Override + public void abort() { + if (!isAborting()) { + this.abortTime = System.currentTimeMillis(); + super.abort(); + } + } + + @Override + public void beforeExec(QueryType queryType) { + if (!TaskState.CREATED.equals(currentState)) { + throw new IllegalStateException("Already started."); + } + + startTime = System.currentTimeMillis(); + transition(TaskState.STARTING, () -> {}); + transition(TaskState.RUNNING, () -> super.beforeExec(queryType)); + } + + @Override + public void afterExec() { + try { + transition(TaskState.TERMINATING, () -> {}); + } finally { + try { + super.afterExec(); + } finally { + updateFinishTime(); + advertiseStateChange(TaskState.TERMINATED); + } + } + } + + protected void updateFinishTime() { + if (finishTime < 0) { + finishTime = System.currentTimeMillis(); + } + } + + /** Update state notifies all listeners of the change. */ + protected void advertiseStateChange(TaskState newState) { + Objects.requireNonNull(newState); + if (currentState == null || newState.ordinal() > currentState.ordinal()) { + // State oldState = currentState; + currentState = newState; + if (listener != null) { + try { + listener.onStateChange(this); + } catch (Throwable e) { + logger.warn("Exception raised in listener.", e); + } + } + } + } + + /** + * Run the given action. + * + * On success, transitions to the specified target state. + * + * On failure, transitions to {@link TaskState#TERMINATING} and re-throws the encountered exception. + * This should cause a subsequent call to close() which transitions to {@link TaskState#TERMINATED}. + */ + protected void transition(TaskState targetState, Runnable action) { + try { + action.run(); + advertiseStateChange(targetState); + } catch (Throwable throwable) { + throwable.addSuppressed(new RuntimeException("Failure transitioning from " + currentState + " to " + targetState + ".", throwable)); + getThrowableTracker().report(throwable); + advertiseStateChange(TaskState.TERMINATING); + throw throwable; + } + } + + @Override + public String getStatusMessage() { + return ""; + } + + @Override + public String toString() { + return "QueryExecTask [startTime=" + getStartTime() + + ", finishTime=" + getFinishTime() + ", getThrowable=" + getThrowable() + ", queryExecType=" + getQueryExecType() + // Queries excluded because they make the string less readable. + // + ", query=" + getQuery() + ", queryString=" + getQueryString() + + ", delegate=" + getDelegate() + "]"; + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/QueryExecTaskBase.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/QueryExecTaskBase.java new file mode 100644 index 00000000000..dd4a78a5a35 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/QueryExecTaskBase.java @@ -0,0 +1,200 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec.tracker; + +import java.util.Iterator; +import java.util.function.Supplier; + +import org.apache.jena.atlas.json.JsonArray; +import org.apache.jena.atlas.json.JsonObject; +import org.apache.jena.graph.Graph; +import org.apache.jena.graph.Triple; +import org.apache.jena.query.Query; +import org.apache.jena.query.QueryType; +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.core.Quad; +import org.apache.jena.sparql.exec.QueryExec; +import org.apache.jena.sparql.exec.RowSet; +import org.apache.jena.sparql.util.Context; + +/** + * Wrapper for QueryExec that tracks any encountered exception. + * This is accomplished by wrapping RowSets and Iterators of the underlying QueryExec. + */ +public abstract class QueryExecTaskBase implements QueryExec +{ + private T delegate; + private ThrowableTracker throwableTracker; + private QueryType queryExecType = null; + + public QueryExecTaskBase(T delegate, ThrowableTracker tracker) { + super(); + this.delegate = delegate; + this.throwableTracker = tracker; + } + + protected T getDelegate() { + return delegate; + } + + protected ThrowableTracker getThrowableTracker() { + return throwableTracker; + } + + /** + * The query type requested for execution. + * For example, calling select() sets this type to {@link QueryType#SELECT}. + */ + public QueryType getQueryExecType() { + return queryExecType; + } + + @Override + public Context getContext() { + return getDelegate().getContext(); + } + + @Override + public Query getQuery() { + return getDelegate().getQuery(); + } + + @Override + public String getQueryString() { + return getDelegate().getQueryString(); + } + + @Override + public void close() { + try { + getDelegate().close(); + } finally { + afterExec(); + } + } + + @Override + public boolean isClosed() { + return getDelegate().isClosed(); + } + + @Override + public void abort() { + getDelegate().abort(); + } + + public void beforeExec(QueryType queryType) { + this.queryExecType = queryType; + } + + /** Note that afterExec is run only on close. */ + public void afterExec() { + } + + @Override + public RowSet select() { + return compute(QueryType.SELECT, () -> wrapRowSet(getDelegate().select())); + } + + @Override + public Graph construct() { + return compute(QueryType.CONSTRUCT, () -> getDelegate().construct()); + } + + @Override + public Graph construct(Graph graph) { + return compute(QueryType.CONSTRUCT, () -> getDelegate().construct(graph)); + } + + @Override + public Graph describe() { + return compute(QueryType.DESCRIBE, () -> getDelegate().describe()); + } + + @Override + public Graph describe(Graph graph) { + return compute(QueryType.DESCRIBE, () -> getDelegate().describe(graph)); + } + + @Override + public boolean ask() { + return compute(QueryType.ASK, () -> getDelegate().ask()); + } + + @Override + public Iterator constructTriples() { + return compute(QueryType.CONSTRUCT, () -> wrapIterator(getDelegate().constructTriples())); + } + + @Override + public Iterator describeTriples() { + return compute(QueryType.CONSTRUCT, () -> wrapIterator(getDelegate().describeTriples())); + } + + @Override + public Iterator constructQuads() { + return compute(QueryType.CONSTRUCT, () -> wrapIterator(getDelegate().constructQuads())); + } + + @Override + public DatasetGraph constructDataset() { + return compute(QueryType.CONSTRUCT, () -> getDelegate().constructDataset()); + } + + @Override + public DatasetGraph constructDataset(DatasetGraph dataset) { + return compute(QueryType.CONSTRUCT, () -> getDelegate().constructDataset(dataset)); + } + + @Override + public JsonArray execJson() { + return compute(QueryType.CONSTRUCT_JSON, () -> getDelegate().execJson()); + } + + @Override + public Iterator execJsonItems() { + return compute(QueryType.CONSTRUCT_JSON, () -> wrapIterator(getDelegate().execJsonItems())); + } + + @Override + public DatasetGraph getDataset() { + return getDelegate().getDataset(); + } + + protected RowSet wrapRowSet(RowSet base) { + return new RowSetTracked(base, getThrowableTracker()); + } + + protected Iterator wrapIterator(Iterator base) { + return new IteratorTracked<>(base, getThrowableTracker()); + } + + protected X compute(QueryType queryType, Supplier supplier) { + beforeExec(queryType); + try { + X result = supplier.get(); + return result; + } catch(Throwable e) { + e.addSuppressed(new RuntimeException("Error during select().")); + throwableTracker.report(e); + throw e; + } + // afterExec is called during close() + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/RowSetTracked.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/RowSetTracked.java new file mode 100644 index 00000000000..3109685e464 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/RowSetTracked.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec.tracker; + +import java.util.List; +import java.util.Objects; +import java.util.function.Consumer; + +import org.apache.jena.riot.rowset.RowSetWrapper; +import org.apache.jena.sparql.core.Var; +import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.exec.RowSet; + +/** RowSetWrapper that tracks any encountered exceptions in the provided tracker. */ +public class RowSetTracked + extends RowSetWrapper +{ + protected ThrowableTracker tracker; + + public RowSetTracked(RowSet other, ThrowableTracker tracker) { + super(other); + this.tracker = Objects.requireNonNull(tracker); + } + + public ThrowableTracker getTracker() { + return tracker; + } + + @Override + public boolean hasNext() { + return IteratorTracked.trackBoolean(tracker, get()::hasNext); + } + + @Override + public Binding next() { + return IteratorTracked.track(tracker, get()::next); + } + + @Override + public List getResultVars() { + return IteratorTracked.track(tracker, get()::getResultVars); + } + + @Override + public void forEachRemaining(Consumer action) { + IteratorTracked.trackForEachRemaining(tracker, get(), action); + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/TaskEventBroker.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/TaskEventBroker.java new file mode 100644 index 00000000000..5bc358de798 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/TaskEventBroker.java @@ -0,0 +1,135 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec.tracker; + +import java.util.Map; +import java.util.NoSuchElementException; +import java.util.concurrent.ConcurrentHashMap; + +import org.apache.jena.sparql.SystemARQ; +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.exec.QueryExec; +import org.apache.jena.sparql.exec.UpdateExec; +import org.apache.jena.sparql.util.Context; +import org.apache.jena.sparql.util.Symbol; + +/** + * A broker that is both sink and source for task events. + * + * A broker can connect to other ones + * using {@link #connect(TaskEventBroker)} and disconnect from + * them all using {@link #disconnectFromAll()}. + */ +public class TaskEventBroker + extends TaskEventSource + implements TaskListener +{ + private Map, Runnable> upstreamRegistrations = new ConcurrentHashMap<>(); + + public Runnable connect(TaskEventBroker upstream) { + Runnable unregisterFromBase = upstream.addListener(BasicTaskExec.class, this); + Runnable unregisterFromThis = upstreamRegistrations.computeIfAbsent(upstream, u -> { + return () -> { + unregisterFromBase.run(); + upstreamRegistrations.remove(upstream); + }; + }); + return unregisterFromThis; + } + + @Override + public void onStateChange(BasicTaskExec task) { + advertiseStateChange(task); + } + + public void disconnectFromAll() { + upstreamRegistrations.values().forEach(Runnable::run); + } + + public static QueryExec track(QueryExec queryExec) { + Context cxt = queryExec.getContext(); + return track(cxt, queryExec); + } + + /** + * If there is a taskTracker in the context then return a {@link QueryExecTask}. + * Otherwise return the provided query exec. + */ + public static QueryExec track(Context cxt, QueryExec queryExec) { + TaskEventBroker registry = get(cxt); + QueryExec result = (registry == null) + ? queryExec + : QueryExecTask.create(queryExec, registry); + return result; + } + + public static UpdateExec track(UpdateExec updateExec) { + Context cxt = updateExec.getContext(); + return track(cxt, updateExec); + } + + /** + * If there is a taskTracker in the context then return a {@link QueryExecTask}. + * Otherwise return the provided query exec. + */ + public static UpdateExec track(Context cxt, UpdateExec updateExec) { + TaskEventBroker registry = get(cxt); + return track(registry, updateExec); + } + + public static UpdateExec track(TaskEventBroker tracker, UpdateExec updateExec) { + UpdateExec result = (tracker == null) + ? updateExec + : UpdateExecTask.create(updateExec, tracker); + return result; + } + + // ----- ARQ Integration ----- + + public static final Symbol symTaskEventBroker = SystemARQ.allocSymbol("taskEventBroker"); + + public static TaskEventBroker get(DatasetGraph dsg) { + return dsg == null ? null : get(dsg.getContext()); + } + + public static TaskEventBroker get(Context context) { + return context == null ? null : context.get(symTaskEventBroker); + } + + public static void remove(Context context) { + if (context != null) { + context.remove(symTaskEventBroker); + } + } + + /** Get an existing TaskEventBroker or atomically create a new one. */ + public static TaskEventBroker getOrCreate(Context context) { + TaskEventBroker result = context.computeIfAbsent(symTaskEventBroker, sym -> new TaskEventBroker()); + return result; + } + + /** Get an existing TaskEventBroker or fail with a {@link NoSuchElementException}. */ + public static TaskEventBroker require(Context context) { + TaskEventBroker result = get(context); + if (result == null) { + throw new NoSuchElementException("No task event broker in context."); + } + return result; + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/TaskEventHistory.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/TaskEventHistory.java new file mode 100644 index 00000000000..dddcf51dc3f --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/TaskEventHistory.java @@ -0,0 +1,192 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec.tracker; + +import java.util.Iterator; +import java.util.Map; +import java.util.Map.Entry; +import java.util.NoSuchElementException; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentLinkedDeque; +import java.util.concurrent.ConcurrentNavigableMap; +import java.util.concurrent.ConcurrentSkipListMap; +import java.util.concurrent.atomic.AtomicLong; + +import org.apache.jena.sparql.SystemARQ; +import org.apache.jena.sparql.util.Context; +import org.apache.jena.sparql.util.Symbol; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class TaskEventHistory + extends TaskEventBroker +{ + private static final Logger logger = LoggerFactory.getLogger(TaskEventHistory.class); + + // Relabel tasks by a sequential ids. + // XXX Id allocation could be factored out in a central place. Fuseki allocates IDs too. + protected AtomicLong nextSerial = new AtomicLong(); + protected Map taskIdToSerial = new ConcurrentHashMap<>(); + protected ConcurrentNavigableMap serialToTask = new ConcurrentSkipListMap<>(); + + protected int maxHistorySize = 1000; + + // History is indexed by serial id. + protected ConcurrentLinkedDeque> history = new ConcurrentLinkedDeque<>(); + + public BasicTaskExec getByTaskId(long taskId) { + Long serial = taskIdToSerial.get(taskId); + BasicTaskExec result = getTaskBySerialId(serial); + return result; + } + + public BasicTaskExec getTaskBySerialId(long serialId) { + return serialToTask.get(serialId); + } + + public ConcurrentNavigableMap getActiveTasks() { + return serialToTask; + } + + public ConcurrentLinkedDeque> getHistory() { + return history; + } + + public void setMaxHistorySize(int maxHistorySize) { + this.maxHistorySize = maxHistorySize; + trimHistory(); + } + + @Override + public void onStateChange(BasicTaskExec task) { + switch(task.getTaskState()) { + case STARTING: put(task); break; + case TERMINATED: remove(task); break; + default: break; + } + } + + public long getId(BasicTaskExec task) { + long id = System.identityHashCode(task); + return id; + } + + public Long getSerialId(long taskId) { + return taskIdToSerial.get(taskId); + } + + public void put(BasicTaskExec newTask) { + long taskId = getId(newTask); + boolean[] accepted = {false}; + taskIdToSerial.compute(taskId, (_taskId, oldSerial) -> { + if (oldSerial != null) { + BasicTaskExec oldTask = serialToTask.get(oldSerial); + if (oldTask != newTask) { + // Distinct tasks with the same id - should never happen. + logger.warn("Rejected task tracking because of a hash clash."); + } else { + logger.warn("Task was already added."); + } + return oldSerial; + } + + accepted[0] = true; + long r = nextSerial.incrementAndGet(); + serialToTask.put(r, newTask); + return r; + }); + if (accepted[0]) { + advertiseStateChange(newTask); + } + } + + protected void trimHistory() { + if (history.size() > maxHistorySize) { + Iterator> it = history.descendingIterator(); + while (history.size() >= maxHistorySize && it.hasNext()) { + // Note: No need to clean up taskIdToSerial here. + // Before items are added to the history, their serial id mapping is removed. + it.next(); + it.remove(); + } + } + } + + public boolean remove(BasicTaskExec task) { + long searchTaskId = getId(task); + + // Advertise state change before removing the task entry! + Long foundTaskId = taskIdToSerial.get(searchTaskId); + if (foundTaskId != null) { + advertiseStateChange(task); + } + + Long[] foundSerial = {null}; + taskIdToSerial.compute(searchTaskId, (_taskId, serial) -> { + if (serial != null) { + serialToTask.compute(serial, (s, oldTask) -> { + if (oldTask == task) { + foundSerial[0] = s; + return null; + } + return oldTask; + }); + return foundSerial[0] != null ? null : serial; + } + return serial; + }); + + boolean result = foundSerial[0] != null; + if (result) { + history.addFirst(Map.entry(foundSerial[0], task)); + trimHistory(); + } + return result; + } + + public void clear() { + history.clear(); + } + + @Override + public String toString() { + return "Active: " + serialToTask.size() + ", History: " + history.size() + "/" + maxHistorySize; + } + + // --- ARQ Integration --- + + public static final Symbol symTaskEventHistory = SystemARQ.allocSymbol("taskEventHistory"); + + public static TaskEventHistory get(Context context) { + return context == null ? null : context.get(symTaskEventHistory); + } + + public static TaskEventHistory getOrCreate(Context context) { + TaskEventHistory result = context.computeIfAbsent(symTaskEventHistory, sym -> new TaskEventHistory()); + return result; + } + + public static TaskEventHistory require(Context context) { + TaskEventHistory result = get(context); + if (result == null) { + throw new NoSuchElementException("No TaskEventHistory registered in context"); + } + return result; + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/TaskEventSource.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/TaskEventSource.java new file mode 100644 index 00000000000..4fb62a62ca5 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/TaskEventSource.java @@ -0,0 +1,75 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec.tracker; + +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class TaskEventSource { + private static final Logger logger = LoggerFactory.getLogger(TaskEventSource.class); + + // LinkedHashMap to retain listener order. + protected Map, TaskListener> listenersByType = + Collections.synchronizedMap(new LinkedHashMap<>()); + + public Runnable addListener(Class clz, TaskListener listener) { + listenersByType.compute(listener, (k, v) -> { + if (v != null) { + throw new RuntimeException("Listener already registered"); + } + return new TaskListenerTypeAdapter<>(clz, listener); + }); + return () -> listenersByType.remove(listener); + } + + protected void advertiseStateChange(BasicTaskExec task) { + for (TaskListener listener : listenersByType.values()) { + try { + listener.onStateChange(task); + } catch (Throwable t) { + logger.warn("Failure while notifying listener.", t); + } + } + } + + class TaskListenerTypeAdapter + implements TaskListener + { + protected Class clz; + protected TaskListener delegate; + + public TaskListenerTypeAdapter(Class clz, TaskListener delegate) { + super(); + this.clz = clz; + this.delegate = delegate; + } + + @Override + public void onStateChange(BasicTaskExec task) { + if (clz.isInstance(task)) { + Y obj = clz.cast(task); + delegate.onStateChange(obj); + } + } + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/TaskListener.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/TaskListener.java new file mode 100644 index 00000000000..88239425161 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/TaskListener.java @@ -0,0 +1,23 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec.tracker; + +public interface TaskListener { + void onStateChange(T task); +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/TaskListenerByState.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/TaskListenerByState.java new file mode 100644 index 00000000000..48dcd1acdb0 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/TaskListenerByState.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec.tracker; + +public interface TaskListenerByState + extends TaskListener +{ + @Override + public default void onStateChange(T task) { + switch (task.getTaskState()) { + case CREATED: onCreated(task); break; + case TERMINATED: onTerminated(task); break; + default: + // Log warning? + break; + } + } + + public void onCreated(T task); + public void onTerminated(T task); +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/TaskState.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/TaskState.java new file mode 100644 index 00000000000..b3d96b4ab06 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/TaskState.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec.tracker; + +public enum TaskState { + /** Task object has been created. */ + CREATED, + + /** A method semantically akin to beforeRun() or init() has been called. */ + STARTING, + + /** A method semantically akin to run() has been called. This implies that the beforeRun() method has completed. */ + RUNNING, + + /** A method semantically akin to afterRun() or close() has been called but not completed yet. */ + TERMINATING, + + /** A method semantically akin to afterRun() or close() has completed. */ + TERMINATED +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/ThrowableTracker.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/ThrowableTracker.java new file mode 100644 index 00000000000..271b100e996 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/ThrowableTracker.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec.tracker; + +import java.util.Iterator; + +public interface ThrowableTracker { + void report(Throwable throwable); + Iterator getThrowables(); + + default Throwable getFirstThrowable() { + Iterator it = getThrowables(); + Throwable result = it.hasNext() ? it.next() : null; + return result; + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/ThrowableTrackerFirst.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/ThrowableTrackerFirst.java new file mode 100644 index 00000000000..89ff933232a --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/ThrowableTrackerFirst.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec.tracker; + +import java.util.Collections; +import java.util.Iterator; +import java.util.List; + +/** Throwable tracker that only stores the first encountered exception. */ +public class ThrowableTrackerFirst + implements ThrowableTracker +{ + protected Throwable throwable = null; + + @Override + public void report(Throwable throwable) { + if (this.throwable == null) { + this.throwable = throwable; + } + // Ignore any throwables after the first + } + + @Override + public Iterator getThrowables() { + return throwable == null ? Collections.emptyIterator() : List.of(throwable).iterator(); + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/UpdateExecBase.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/UpdateExecBase.java new file mode 100644 index 00000000000..7a0b3fbcfce --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/UpdateExecBase.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec.tracker; + +import org.apache.jena.sparql.exec.UpdateExec; +import org.apache.jena.update.UpdateRequest; + +public abstract class UpdateExecBase + implements UpdateExec +{ + protected String updateRequestString; + protected UpdateRequest updateRequest; + + public UpdateExecBase(UpdateRequest updateRequest, String updateRequestString) { + super(); + // this.datasetGraph = datasetGraph; + this.updateRequest = updateRequest; + this.updateRequestString = updateRequestString; + } + +// public DatasetGraph getDatasetGraph() { +// return datasetGraph; +// } + + @Override + public UpdateRequest getUpdateRequest() { + return updateRequest; + } + + @Override + public String getUpdateRequestString() { + return updateRequestString; + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/UpdateExecTask.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/UpdateExecTask.java new file mode 100644 index 00000000000..894a7af0b4a --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/UpdateExecTask.java @@ -0,0 +1,191 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec.tracker; + +import java.util.Objects; + +import org.apache.jena.sparql.exec.UpdateExec; +import org.apache.jena.update.UpdateRequest; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** Wrapper for UpdateExec that tracks {@link BasicTaskInfo} information. */ +public class UpdateExecTask + extends UpdateProcessorWrapper + implements UpdateExec, BasicTaskExec +{ + private static final Logger logger = LoggerFactory.getLogger(UpdateExecTask.class); + + protected TaskListener listener; + + protected long startTime = -1; + protected long abortTime = -1; + protected long finishTime = -1; + protected Throwable throwable = null; + protected TaskState currentState = TaskState.CREATED; + + protected UpdateExecTask(UpdateExec delegate, long creationTime, TaskListener listener) { + super(delegate); + this.listener = listener; + this.startTime = creationTime; + } + + /** Wrap a UpdateExec and notify the listener with the creation event using the current time. */ + public static UpdateExecTask create(UpdateExec delegate, TaskListener listener) { + long creationTime = System.currentTimeMillis(); + return create(delegate, creationTime, listener); + } + + /** Wrap a QueryExec and notify the listener with the creation event using the given time. */ + public static UpdateExecTask create(UpdateExec delegate, long creationTime, TaskListener listener) { + Objects.requireNonNull(delegate); + Objects.requireNonNull(listener); + UpdateExecTask result = new UpdateExecTask(delegate, creationTime, listener); + listener.onStateChange(result); + return result; + } + + @Override + public TaskState getTaskState() { + return currentState; + } + + @Override + public long getCreationTime() { + return getCreationTime(); + } + + @Override + public long getStartTime() { + return startTime; + } + + @Override + public long getAbortTime() { + return abortTime; + } + + @Override + public long getFinishTime() { + return finishTime; + } + + @Override + public Throwable getThrowable() { + return throwable; + } + + @Override + public String getLabel() { + UpdateRequest updateRequest = getUpdateRequest(); + String result; + if (updateRequest == null) { + result = getUpdateRequestString(); + if (result == null) { + result = "Unknown query"; + } + } else { + result = updateRequest.toString(); + } + return result; + } + + @Override + public void abort() { // This method body is needed because of noop and delegating default methods. + if (!isAborting()) { + this.abortTime = System.currentTimeMillis(); + getDelegate().abort(); + } + } + + protected void updateThrowable(Throwable throwable) { + if (this.throwable == null) { + this.throwable = throwable; + } + } + + @Override + public void execute() { + Throwable throwable = null; + beforeExec(); + try { + super.execute(); + } catch (Throwable t) { + t.addSuppressed(new RuntimeException("Error during update execution")); + throwable = t; + } finally { + afterExec(throwable); + } + } + + protected void beforeExec() { + this.startTime = System.currentTimeMillis(); + updateState(TaskState.STARTING); + transition(TaskState.RUNNING, () -> {}); + } + + protected void afterExec(Throwable throwable) { + this.throwable = throwable; + try { + updateState(TaskState.TERMINATING); + } finally { + this.finishTime = System.currentTimeMillis(); + updateState(TaskState.TERMINATED); + } + } + + protected void updateState(TaskState newState) { + Objects.requireNonNull(newState); + if (currentState == null || newState.ordinal() > currentState.ordinal()) { + // State oldState = currentState; + currentState = newState; + if (listener != null) { + try { + listener.onStateChange(this); + } catch (Throwable e) { + logger.warn("Exception raised in listener.", e); + } + } + } + } + + protected void transition(TaskState targetState, Runnable action) { + try { + action.run(); + updateState(targetState); + } catch (Throwable throwable) { + throwable.addSuppressed(new RuntimeException("Failure transitioning from " + currentState + " to " + targetState + ".", throwable)); + updateThrowable(throwable); + updateState(TaskState.TERMINATING); + throw throwable; + } + } + + @Override + public String getStatusMessage() { + return ""; + } + + @Override + public String toString() { + return "UpdateExecTracked [startTime=" + getStartTime() + + ", finishTime=" + getFinishTime() + ", throwable=" + throwable + + ", delegate=" + getDelegate() + "]"; + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/UpdateProcessorWrapper.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/UpdateProcessorWrapper.java new file mode 100644 index 00000000000..d2a6f0d2b47 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/UpdateProcessorWrapper.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec.tracker; + +import org.apache.jena.update.UpdateProcessor; +import org.apache.jena.update.UpdateRequest; + +public class UpdateProcessorWrapper + implements UpdateProcessor +{ + private T delegate; + + public UpdateProcessorWrapper(T delegate) { + super(); + this.delegate = delegate; + } + + protected T getDelegate() { + return delegate; + } + + @Override + public UpdateRequest getUpdateRequest() { + return getDelegate().getUpdateRequest(); + } + + @Override + public String getUpdateRequestString() { + return getDelegate().getUpdateRequestString(); + } + + @Override + public void execute() { + getDelegate().execute(); + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/graph/GraphSPARQLService.java b/jena-arq/src/main/java/org/apache/jena/sparql/graph/GraphSPARQLService.java deleted file mode 100644 index 4f66bd46c00..00000000000 --- a/jena-arq/src/main/java/org/apache/jena/sparql/graph/GraphSPARQLService.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.jena.sparql.graph; - -import org.apache.jena.graph.Graph ; -import org.apache.jena.graph.Node ; -import org.apache.jena.graph.Triple ; -import org.apache.jena.graph.impl.GraphBase ; -import org.apache.jena.sparql.algebra.Op ; -import org.apache.jena.sparql.algebra.op.OpBGP ; -import org.apache.jena.sparql.core.BasicPattern ; -import org.apache.jena.sparql.core.Var ; -import org.apache.jena.util.iterator.ExtendedIterator ; -import org.slf4j.Logger ; -import org.slf4j.LoggerFactory ; - -/** - * This class provides the Jena Graph interface to a remote SPARQL endpoint. - * Efficiency not guaranteed. - */ - -public class GraphSPARQLService extends GraphBase implements Graph -{ - private static Logger log = LoggerFactory.getLogger(GraphSPARQLService.class) ; - - private String serviceURI ; - private String graphIRI = null ; - - // Remote default graph - public GraphSPARQLService(String serviceURI) - { - this.serviceURI = serviceURI ; - this.graphIRI = null ; - } - - // Remote named graph - public GraphSPARQLService(String serviceURI, String graphIRI) - { - this.serviceURI = serviceURI ; - this.graphIRI = graphIRI ; - } - -// @Override -// public Capabilities getCapabilities() -// { -// if (capabilities == null) -// capabilities = new AllCapabilities() -// { @Override public boolean handlesLiteralTyping() { return false; } }; -// return capabilities; -// } - - @Override - protected ExtendedIterator graphBaseFind(Triple m) - { - Node s = m.getMatchSubject() ; - Var sVar = null ; - if ( s == null ) - { - sVar = Var.alloc("s") ; - s = sVar ; - } - - Node p = m.getMatchPredicate() ; - Var pVar = null ; - if ( p == null ) - { - pVar = Var.alloc("p") ; - p = pVar ; - } - - Node o = m.getMatchObject() ; - Var oVar = null ; - if ( o == null ) - { - oVar = Var.alloc("o") ; - o = oVar ; - } - - Triple triple = Triple.create(s, p ,o) ; - - // Evaluate as an algebra expression - BasicPattern pattern = new BasicPattern() ; - pattern.add(triple) ; - Op op = new OpBGP(pattern) ; - -// // Make remote execution object. -// System.err.println("GraphSPARQLService.graphBaseFind: Unimplemented : remote service execution") ; -// //Plan plan = factory.create(op, getDataset(), BindingRoot.create(), null) ; -// -// QueryIterator qIter = plan.iterator() ; -// List triples = new ArrayList() ; -// -// for (; qIter.hasNext() ; ) -// { -// Binding b = qIter.nextBinding() ; -// Node sResult = s ; -// Node pResult = p ; -// Node oResult = o ; -// if ( sVar != null ) -// sResult = b.get(sVar) ; -// if ( pVar != null ) -// pResult = b.get(pVar) ; -// if ( oVar != null ) -// oResult = b.get(oVar) ; -// Triple resultTriple = Triple.create(sResult, pResult, oResult) ; -// if ( log.isDebugEnabled() ) -// log.debug(" "+resultTriple) ; -// triples.add(resultTriple) ; -// } -// qIter.close() ; -// return WrappedIterator.createNoRemove(triples.iterator()) ; - return null ; - } - -} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/modify/UpdateProcessorBase.java b/jena-arq/src/main/java/org/apache/jena/sparql/modify/UpdateProcessorBase.java index d51b96e050e..5d1b9953411 100644 --- a/jena-arq/src/main/java/org/apache/jena/sparql/modify/UpdateProcessorBase.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/modify/UpdateProcessorBase.java @@ -18,13 +18,14 @@ package org.apache.jena.sparql.modify; -import java.util.concurrent.atomic.AtomicBoolean; +import java.util.Objects ; +import java.util.concurrent.atomic.AtomicBoolean ; import org.apache.jena.atlas.iterator.Iter ; import org.apache.jena.sparql.core.DatasetGraph ; -import org.apache.jena.sparql.engine.binding.Binding ; import org.apache.jena.sparql.engine.Timeouts; import org.apache.jena.sparql.engine.Timeouts.Timeout; +import org.apache.jena.sparql.engine.binding.Binding ; import org.apache.jena.sparql.util.Context ; import org.apache.jena.update.UpdateProcessor ; import org.apache.jena.update.UpdateRequest ; @@ -70,6 +71,16 @@ public UpdateProcessorBase(UpdateRequest request, } } + @Override + public UpdateRequest getUpdateRequest() { + return request; + } + + @Override + public String getUpdateRequestString() { + return Objects.toString(request); + } + @Override public void execute() { UpdateEngine uProc = factory.create(datasetGraph, inputBinding, context); diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/system/InitExecTracking.java b/jena-arq/src/main/java/org/apache/jena/sparql/system/InitExecTracking.java new file mode 100644 index 00000000000..509381da664 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/system/InitExecTracking.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.system; + +import org.apache.jena.sparql.engine.dispatch.SparqlDispatcherRegistry; +import org.apache.jena.sparql.exec.tracker.ChainingQueryDispatcherExecTracker; +import org.apache.jena.sparql.exec.tracker.ChainingUpdateDispatcherExecTracker; +import org.apache.jena.sys.JenaSubsystemLifecycle; + +public class InitExecTracking implements JenaSubsystemLifecycle { + @Override + public void start() { + init(); + } + + public static void init() { + SparqlDispatcherRegistry.addDispatcher(new ChainingUpdateDispatcherExecTracker()); + SparqlDispatcherRegistry.addDispatcher(new ChainingQueryDispatcherExecTracker()); + } + + @Override + public void stop() {} + + /** Initialize very late so that custom dispatchers are registered before execution tracking. */ + @Override + public int level() { + return 999_999_999 ; + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/util/ContextAccumulator.java b/jena-arq/src/main/java/org/apache/jena/sparql/util/ContextAccumulator.java index 52e91147a6a..94f0a059e1f 100644 --- a/jena-arq/src/main/java/org/apache/jena/sparql/util/ContextAccumulator.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/util/ContextAccumulator.java @@ -93,6 +93,28 @@ public ContextAccumulator set(Symbol symbol, boolean value) { return this; } + /** Return the current value of a symbol. */ + public T get(Symbol symbol) { + T result = addedContext.get(symbol); + if (result == null) { + Context extra = extra(); + if (extra != null) { + result = extra.get(symbol); + } + + if (result == null) { + Context base = baseContext != null + ? baseContext + : baseContext(); + + if (base != null) { + result = base.get(symbol); + } + } + } + return result; + } + public ContextAccumulator context(Context context) { if ( context == null ) return this; diff --git a/jena-arq/src/main/java/org/apache/jena/update/UpdateAction.java b/jena-arq/src/main/java/org/apache/jena/update/UpdateAction.java index 987b76e7e68..7df958e89b3 100644 --- a/jena-arq/src/main/java/org/apache/jena/update/UpdateAction.java +++ b/jena-arq/src/main/java/org/apache/jena/update/UpdateAction.java @@ -37,6 +37,7 @@ import org.apache.jena.sparql.modify.UsingList; import org.apache.jena.sparql.modify.UsingUpdateSink; import org.apache.jena.sparql.modify.request.UpdateWithUsing; +import org.apache.jena.sparql.util.Context; /** * A class of forms for executing SPARQL Update operations. parse means the update @@ -356,7 +357,7 @@ public static void parseExecute(UsingList usingList, DatasetGraph dataset, Strin if ( in == null ) throw new UpdateException("File could not be opened: " + fileName); } - parseExecute(usingList, dataset, in, inputBinding, baseURI, syntax); + parseExecute(usingList, dataset, in, inputBinding, baseURI, syntax, null); if ( in != System.in ) IO.close(in); } @@ -408,7 +409,7 @@ public static void parseExecute(UsingList usingList, DatasetGraph dataset, Input * @param syntax The update language syntax */ public static void parseExecute(UsingList usingList, DatasetGraph dataset, InputStream input, String baseURI, Syntax syntax) { - parseExecute(usingList, dataset, input, (Binding)null, baseURI, syntax); + parseExecute(usingList, dataset, input, (Binding)null, baseURI, syntax, null); } /** @@ -427,7 +428,7 @@ public static void parseExecute(UsingList usingList, DatasetGraph dataset, Input */ public static void parseExecute(UsingList usingList, DatasetGraph dataset, InputStream input, QuerySolution inputBinding, String baseURI, Syntax syntax) { - parseExecute(usingList, dataset, input, BindingLib.asBinding(inputBinding), baseURI, syntax); + parseExecute(usingList, dataset, input, BindingLib.asBinding(inputBinding), baseURI, syntax, null); } /** @@ -445,9 +446,9 @@ public static void parseExecute(UsingList usingList, DatasetGraph dataset, Input * @param syntax The update language syntax */ public static void parseExecute(UsingList usingList, DatasetGraph dataset, InputStream input, Binding inputBinding, String baseURI, - Syntax syntax) { + Syntax syntax, Context context) { @SuppressWarnings("removal") - UpdateProcessorStreaming uProc = UpdateStreaming.createStreaming(dataset, inputBinding); + UpdateProcessorStreaming uProc = UpdateStreaming.makeStreaming(dataset, inputBinding, context); if ( uProc == null ) throw new ARQException("No suitable update procesors are registered/able to execute your updates"); diff --git a/jena-arq/src/main/java/org/apache/jena/update/UpdateSource.java b/jena-arq/src/main/java/org/apache/jena/update/UpdateSource.java new file mode 100644 index 00000000000..112a6baa8b5 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/update/UpdateSource.java @@ -0,0 +1,52 @@ +package org.apache.jena.update; + +import java.io.InputStream; +import java.util.Iterator; + +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.core.Prologue; +import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.engine.iterator.IterAbortable; +import org.apache.jena.sparql.lang.UpdateParser; +import org.apache.jena.sparql.modify.UpdateEngineFactory; +import org.apache.jena.sparql.modify.UpdateEngineRegistry; +import org.apache.jena.sparql.modify.UpdateProcessorStreamingBase; +import org.apache.jena.sparql.util.Context; + +public sealed interface UpdateSource { + // String getBaseURI(); + Iterator iterator(); + + // InputStream input + // UsingList usingList, String baseURI, Syntax syntax + public record UpdateSourceIterable(Iterator iterator) implements UpdateSource {} + + public record UpdateSourceUpdateRequest(UpdateRequest updateRequest) implements UpdateSource { +// @Override +// public String getBaseURI() { +// return updateRequest.getBaseURI(); +// } + + @Override + public Iterator iterator() { + return updateRequest.iterator(); + } + } + + + // Everything for local updates comes through one of these two make methods + /*package*/ static UpdateProcessorStreaming makeStreaming(DatasetGraph datasetGraph, Binding inputBinding, Context context) { + Prologue prologue = new Prologue(); + Context cxt = Context.setupContextForDataset(context, datasetGraph); + UpdateEngineFactory f = UpdateEngineRegistry.get().find(datasetGraph, cxt); + UpdateProcessorStreamingBase uProc = new UpdateProcessorStreamingBase(datasetGraph, inputBinding, prologue, cxt, f); + // uProc.getUpdateSink(). + return uProc; + } + +// public static IterAbortable toIterator(InputStream input, Prologue prologue) { +// UpdateParser parser = UpdateFactory.setupParser(uProc.getPrologue(), baseURI, syntax); +// parser.parse(sink, uProc.getPrologue(), input); +// +// } +} diff --git a/jena-arq/src/main/resources/META-INF/services/org.apache.jena.sys.JenaSubsystemLifecycle b/jena-arq/src/main/resources/META-INF/services/org.apache.jena.sys.JenaSubsystemLifecycle index 492a2de3b34..5ab54de8039 100644 --- a/jena-arq/src/main/resources/META-INF/services/org.apache.jena.sys.JenaSubsystemLifecycle +++ b/jena-arq/src/main/resources/META-INF/services/org.apache.jena.sys.JenaSubsystemLifecycle @@ -1,3 +1,4 @@ org.apache.jena.riot.system.InitRIOT org.apache.jena.sparql.system.InitARQ org.apache.jena.rdfs.sys.InitRDFS +org.apache.jena.sparql.system.InitExecTracking diff --git a/jena-arq/src/test/java/org/apache/jena/sparql/core/TS_SparqlCore.java b/jena-arq/src/test/java/org/apache/jena/sparql/core/TS_SparqlCore.java index 3ea561bfb4f..9e7f47012d1 100644 --- a/jena-arq/src/test/java/org/apache/jena/sparql/core/TS_SparqlCore.java +++ b/jena-arq/src/test/java/org/apache/jena/sparql/core/TS_SparqlCore.java @@ -46,6 +46,8 @@ , TestDatasetGraphFilteredView.class , TestSpecialDatasets.class , TestDatasetMisc.class + + , TestDatasetGraphOverSparql.class }) public class TS_SparqlCore diff --git a/jena-arq/src/test/java/org/apache/jena/sparql/core/TestDatasetGraphOverSparql.java b/jena-arq/src/test/java/org/apache/jena/sparql/core/TestDatasetGraphOverSparql.java new file mode 100644 index 00000000000..f036d167644 --- /dev/null +++ b/jena-arq/src/test/java/org/apache/jena/sparql/core/TestDatasetGraphOverSparql.java @@ -0,0 +1,101 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.core; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.apache.jena.graph.Node; +import org.apache.jena.graph.NodeFactory; +import org.apache.jena.query.Query; +import org.apache.jena.riot.Lang; +import org.apache.jena.riot.RDFParser; +import org.apache.jena.sparql.engine.dispatch.DatasetGraphOverSparql; +import org.apache.jena.sparql.exec.QueryExec; +import org.apache.jena.sparql.exec.UpdateExec; +import org.apache.jena.update.UpdateRequest; +import org.junit.jupiter.api.Test; + +public class TestDatasetGraphOverSparql extends AbstractDatasetGraphTests { + + @Override + protected DatasetGraph emptyDataset() { + DatasetGraph backend = DatasetGraphFactory.create(); + + DatasetGraph frontend = new DatasetGraphOverSparql() { + @Override + protected UpdateExec update(UpdateRequest update) { + return UpdateExec.dataset(backend).update(update).build(); + } + + @Override + protected QueryExec query(Query query) { + return QueryExec.dataset(backend).query(query).build(); + } + }; + + return frontend; + } + + @Test + public void deleteDefaultGraph() { + DatasetGraph dsg = testDataset(); + dsg.deleteAny(Quad.defaultGraphIRI, Node.ANY, Node.ANY, Node.ANY); + assertFalse(dsg.isEmpty()); + assertTrue(dsg.getDefaultGraph().isEmpty()); + } + + @Test + public void deleteNamedGraph() { + DatasetGraph dsg = testDataset(); + dsg.deleteAny(NodeFactory.createURI("http://www.example.org/g"), Node.ANY, Node.ANY, Node.ANY); + assertEquals(0, dsg.size()); + assertFalse(dsg.isEmpty()); + } + + @Test + public void deleteAllNamedGraphs() { + DatasetGraph dsg = testDataset(); + dsg.deleteAny(Quad.unionGraph, Node.ANY, Node.ANY, Node.ANY); + assertEquals(0, dsg.size()); + assertFalse(dsg.isEmpty()); + } + + @Test + public void deleteAllGraphs() { + DatasetGraph dsg = testDataset(); + dsg.deleteAny(Node.ANY, Node.ANY, Node.ANY, Node.ANY); + assertTrue(dsg.isEmpty()); + } + + private DatasetGraph testDataset() { + DatasetGraph dsg = emptyDataset(); + RDFParser.fromString(""" + PREFIX eg: + eg:s1 eg:p eg:o . + eg:g { + eg:s2 eg:p eg:o + } + """, Lang.TRIG).parse(dsg); + assertFalse(dsg.getDefaultGraph().isEmpty()); + assertEquals(1, dsg.size()); + return dsg; + } +} diff --git a/jena-arq/src/test/java/org/apache/jena/sparql/exec/tracker/TestExecTracker.java b/jena-arq/src/test/java/org/apache/jena/sparql/exec/tracker/TestExecTracker.java new file mode 100644 index 00000000000..e9aee2053bc --- /dev/null +++ b/jena-arq/src/test/java/org/apache/jena/sparql/exec/tracker/TestExecTracker.java @@ -0,0 +1,77 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec.tracker; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.ArrayList; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import org.apache.jena.graph.NodeFactory; +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.core.DatasetGraphFactory; +import org.apache.jena.sparql.exec.UpdateExec; +import org.apache.jena.system.Txn; +import org.junit.jupiter.api.Test; + +public class TestExecTracker { + + /** Test tracking of execution requests using TaskEventHistory. */ + @Test + public void test() { + int maxHistorySize = 100; + int requestCount = 50; + + DatasetGraph dsg = DatasetGraphFactory.createTxnMem(); + IntStream.range(0, requestCount).mapToObj(i -> NodeFactory.createURI("urn:foo:bar" + i)) + .forEach(x -> dsg.getDefaultGraph().add(x, x, x)); + + TaskEventBroker tracker = TaskEventBroker.getOrCreate(dsg.getContext()); + TaskEventHistory history = TaskEventHistory.getOrCreate(dsg.getContext()); + history.setMaxHistorySize(maxHistorySize); + history.connect(tracker); + + IntStream.range(0, requestCount).boxed().collect(Collectors.toCollection(ArrayList::new)) + .parallelStream().forEach(x -> { + Txn.executeWrite(dsg, () -> { + UpdateExec.dataset(dsg).update("INSERT { ?s ?p 1 } WHERE { ?s ?p ?o }").execute(); + }); + }); + + assertEquals(0, history.getActiveTasks().size()); + int expectedHistorySize = Math.min(requestCount, maxHistorySize); + assertEquals(expectedHistorySize, history.getHistory().size()); + + // Sanity check: Disconnect from the history and make sure no more events are logged. + + history.disconnectFromAll(); + history.clear(); + + IntStream.range(0, requestCount).boxed().collect(Collectors.toCollection(ArrayList::new)) + .parallelStream().forEach(x -> { + Txn.executeWrite(dsg, () -> { + UpdateExec.dataset(dsg).update("INSERT { ?s ?p 1 } WHERE { ?s ?p ?o }").execute(); + }); + }); + + assertEquals(0, history.getActiveTasks().size()); + assertEquals(0, history.getHistory().size()); + } +} diff --git a/jena-examples/src/main/java/arq/examples/ExampleDBpediaViaRemoteDataset.java b/jena-examples/src/main/java/arq/examples/ExampleDBpediaViaRemoteDataset.java new file mode 100644 index 00000000000..9dd642cd715 --- /dev/null +++ b/jena-examples/src/main/java/arq/examples/ExampleDBpediaViaRemoteDataset.java @@ -0,0 +1,43 @@ +package arq.examples; + +import java.util.concurrent.TimeUnit; + +import org.apache.jena.rdflink.RDFLinkHTTP; +import org.apache.jena.rdflink.dataset.DatasetGraphOverRDFLink; +import org.apache.jena.sparql.algebra.Table; +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.engine.dispatch.SparqlDispatcherRegistry; +import org.apache.jena.sparql.exec.QueryExec; +import org.apache.jena.sparql.exec.RowSetOps; + +/** + * An example that sends a query to the DBpedia endpoint via the {@link DatasetGraphOverRDFLink} abstraction. + * Technically, the query is passed through Jena's default {@link SparqlDispatcherRegistry}. + */ +public class ExampleDBpediaViaRemoteDataset { + public static void main(String... argv) { + // The query string is sent to the DBpedia endpoint as is (without parsing). + // By default, Jena would fail to parse it because of the undeclared prefixes. + String queryString = """ + SELECT * + FROM + { + ?s rdfs:label ?o . + ?o bif:contains 'Leipzig' + } + LIMIT 3 + """; + + DatasetGraph dsg = new DatasetGraphOverRDFLink(() -> + RDFLinkHTTP.newBuilder() + .destination("http://dbpedia.org/sparql") + .build()); + + Table table = QueryExec.dataset(dsg) + .timeout(10, TimeUnit.SECONDS) + .query(queryString) + .table(); + + RowSetOps.out(System.out, table.toRowSet()); + } +} diff --git a/jena-fuseki2/jena-fuseki-core/src/main/java/org/apache/jena/fuseki/server/Endpoint.java b/jena-fuseki2/jena-fuseki-core/src/main/java/org/apache/jena/fuseki/server/Endpoint.java index 98bef0ae264..c46fd95ed8e 100644 --- a/jena-fuseki2/jena-fuseki-core/src/main/java/org/apache/jena/fuseki/server/Endpoint.java +++ b/jena-fuseki2/jena-fuseki-core/src/main/java/org/apache/jena/fuseki/server/Endpoint.java @@ -178,7 +178,9 @@ public Builder processor(ActionProcessor processor) { public Endpoint build() { Objects.requireNonNull(operation, "Operation for Endpoint"); - return new Endpoint(operation, endpointName, authPolicy, processor, context); + // Always equip endpoints with a context. + Context finalCxt = (context != null) ? context : Context.create() ; + return new Endpoint(operation, endpointName, authPolicy, processor, finalCxt); } } diff --git a/jena-fuseki2/jena-fuseki-main/src/main/java/org/apache/jena/fuseki/main/FusekiServer.java b/jena-fuseki2/jena-fuseki-main/src/main/java/org/apache/jena/fuseki/main/FusekiServer.java index 1520b168622..4c0594f193a 100644 --- a/jena-fuseki2/jena-fuseki-main/src/main/java/org/apache/jena/fuseki/main/FusekiServer.java +++ b/jena-fuseki2/jena-fuseki-main/src/main/java/org/apache/jena/fuseki/main/FusekiServer.java @@ -1269,7 +1269,20 @@ public Builder addEndpoint(String datasetName, String endpointName, Operation op Objects.requireNonNull(datasetName, "datasetName"); Objects.requireNonNull(endpointName, "endpointName"); Objects.requireNonNull(operation, "operation"); - serviceEndpointOperation(datasetName, endpointName, operation, authPolicy); + serviceEndpointOperation(datasetName, endpointName, operation, authPolicy, null); + return this; + } + + /** + * Create an endpoint as a service of the dataset (i.e. {@code /dataset/endpointName}). + * The operation must already be registered with the builder. + * @see #registerOperation(Operation, ActionService) + */ + public Builder addEndpoint(String datasetName, String endpointName, Operation operation, AuthPolicy authPolicy, Context context) { + Objects.requireNonNull(datasetName, "datasetName"); + Objects.requireNonNull(endpointName, "endpointName"); + Objects.requireNonNull(operation, "operation"); + serviceEndpointOperation(datasetName, endpointName, operation, authPolicy, context); return this; } @@ -1295,11 +1308,11 @@ public Builder addOperation(String datasetName, Operation operation) { public Builder addOperation(String datasetName, Operation operation, AuthPolicy authPolicy) { Objects.requireNonNull(datasetName, "datasetName"); Objects.requireNonNull(operation, "operation"); - serviceEndpointOperation(datasetName, null, operation, authPolicy); + serviceEndpointOperation(datasetName, null, operation, authPolicy, null); return this; } - private void serviceEndpointOperation(String datasetName, String endpointName, Operation operation, AuthPolicy authPolicy) { + private void serviceEndpointOperation(String datasetName, String endpointName, Operation operation, AuthPolicy authPolicy, Context context) { String name = DataAccessPoint.canonical(datasetName); if ( ! operationRegistry.isRegistered(operation) ) @@ -1313,6 +1326,7 @@ private void serviceEndpointOperation(String datasetName, String endpointName, O .operation(operation) .endpointName(endpointName) .authPolicy(authPolicy) + .context(context) .build(); dsBuilder.addEndpoint(endpoint); } diff --git a/jena-fuseki2/jena-fuseki-mod-exectracker/pom.xml b/jena-fuseki2/jena-fuseki-mod-exectracker/pom.xml new file mode 100644 index 00000000000..114469703f3 --- /dev/null +++ b/jena-fuseki2/jena-fuseki-mod-exectracker/pom.xml @@ -0,0 +1,142 @@ + + + + + 4.0.0 + + org.apache.jena + jena-fuseki + 5.6.0-SNAPSHOT + + + jena-fuseki-mod-exectracker + + Apache Jena - Fuseki MOD - ExecTracker + HTML Execution Tracker Module for Fuseki + + + org.apache.jena.fuseki.mod.exectracker + + + + + + org.apache.jena + jena-fuseki-main + 5.6.0-SNAPSHOT + + + + + + org.junit.jupiter + junit-jupiter + test + + + + org.junit.platform + junit-platform-suite + test + + + + org.apache.logging.log4j + log4j-core + test + + + + org.apache.logging.log4j + log4j-slf4j2-impl + test + + + + org.seleniumhq.selenium + selenium-java + test + + + + io.github.bonigarcia + webdrivermanager + test + + + + + + bundle + + + + org.apache.jena + jena-fuseki-main + 5.6.0-SNAPSHOT + provided + + + + + + org.apache.maven.plugins + maven-shade-plugin + + + + + + false + + + + + *:* + + + META-INF/*.SF + META-INF/*.DSA + META-INF/*.RSA + META-INF/DEPENDENCIES + META-INF/MANIFEST.MF + **/module-info.class + META-INF/versions/9/OSGI-INF/MANIFEST.MF + + + + + + + package + + + shade + + + + + + + + + + diff --git a/jena-fuseki2/jena-fuseki-mod-exectracker/src/main/java/org/apache/jena/fuseki/mod/exectracker/ExecTrackerService.java b/jena-fuseki2/jena-fuseki-mod-exectracker/src/main/java/org/apache/jena/fuseki/mod/exectracker/ExecTrackerService.java new file mode 100644 index 00000000000..6711cce420d --- /dev/null +++ b/jena-fuseki2/jena-fuseki-mod-exectracker/src/main/java/org/apache/jena/fuseki/mod/exectracker/ExecTrackerService.java @@ -0,0 +1,407 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.fuseki.mod.exectracker; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.io.OutputStreamWriter; +import java.io.PrintWriter; +import java.nio.charset.StandardCharsets; +import java.util.Collections; +import java.util.IdentityHashMap; +import java.util.Iterator; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Objects; +import java.util.Optional; +import java.util.concurrent.ConcurrentHashMap; + +import org.apache.commons.io.IOUtils; +import org.apache.jena.fuseki.FusekiException; +import org.apache.jena.fuseki.server.Endpoint; +import org.apache.jena.fuseki.servlets.BaseActionREST; +import org.apache.jena.fuseki.servlets.HttpAction; +import org.apache.jena.riot.WebContent; +import org.apache.jena.sparql.exec.tracker.BasicTaskExec; +import org.apache.jena.sparql.exec.tracker.TaskEventHistory; +import org.apache.jena.sparql.exec.tracker.TaskListener; +import org.apache.jena.sparql.util.Context; +import org.apache.jena.web.HttpSC; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.gson.Gson; +import com.google.gson.JsonElement; +import com.google.gson.internal.bind.JsonTreeWriter; +import com.google.gson.stream.JsonWriter; + +import jakarta.servlet.AsyncContext; +import jakarta.servlet.AsyncEvent; +import jakarta.servlet.AsyncListener; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +/** + * REST action handler for listing running query executions and stopping them. + */ +public class ExecTrackerService extends BaseActionREST { + private static final Logger logger = LoggerFactory.getLogger(ExecTrackerService.class); + + // Gson for formatting server side event (SSE) JSON. + // Note: Pretty printing breaks SSE events due to newlines! + private static Gson gsonForSseEvents = new Gson(); + + /** Helper class to track SSE clients. */ + private class Clients { + // Lock to prevent concurrent addition/removal of listeners while broadcasting events. + Object listenerLock = new Object(); + + // The endpoint of the clients. + Endpoint endpoint; + + // Single listener on an TaskTrackerRegistry. - Not needed here; this listener initialized during FMOD init. + Runnable taskTrackerListenerDisposer; + + // Web clients on the ExecTracker. + Map eventListeners = Collections.synchronizedMap(new IdentityHashMap<>()); // new ConcurrentHashMap<>(); + + // The history tracker connected to the taskTracker. + // TaskEventHistory historyTracker; + } + + /** Registered clients listening to server side events for indexer status updates. */ + private Map trackerToClients = new ConcurrentHashMap<>(); // Collections.synchronizedMap(new IdentityHashMap<>()); + + public ExecTrackerService() {} + + private static long getExecId(HttpAction action) { + String str = action.getRequest().getParameter("requestId"); + Objects.requireNonNull(str); + long result = Long.parseLong(str); + return result; + } + + /** + * The GET command can serve: the website, the notification stream from task execution + * and the latest task execution status. + */ + @Override + protected void doGet(HttpAction action) { + String rawCommand = action.getRequestParameter("command"); + String command = Optional.ofNullable(rawCommand).orElse("page"); + switch (command) { + case "page": servePage(action); break; + case "events": serveEvents(action); break; + default: + throw new UnsupportedOperationException("Unsupported operation: " + command); + } + } + + @Override + protected void doPost(HttpAction action) { + String command = action.getRequestParameter("command"); + Objects.requireNonNull(command, "Request parameter 'command' required."); + switch (command) { + case "status": serveStatus(action); break; + case "stop": stopExec(action); break; + default: + throw new UnsupportedOperationException("Unsupported operation: " + command); + } + } + + protected void stopExec(HttpAction action) { + checkIsAbortAllowed(action); + + long execId = getExecId(action); + + TaskEventHistory taskEventHistory = requireTaskEventHistory(action); + BasicTaskExec task = taskEventHistory.getTaskBySerialId(execId); + + if (task != null) { + try { + task.abort(); + } catch (Throwable t) { + logger.warn("Exception raised during abort.", t); + } + logger.info("Sending stop request to execution: " + execId); + } else { + logger.warn("No such execution to abort: " + execId); + } + + respond(action, HttpSC.OK_200, WebContent.contentTypeTextPlain, "Abort request accepted."); + } + + protected void servePage(HttpAction action) { + // Serves the minimal graphql ui + String resourceName = "exectracker/index.html"; + String str = null; + try (InputStream in = ExecTrackerService.class.getClassLoader().getResourceAsStream(resourceName)) { + str = IOUtils.toString(in, StandardCharsets.UTF_8); + } catch (IOException e) { + throw new FusekiException(e); + } + + if (str == null) { + respond(action, HttpSC.INTERNAL_SERVER_ERROR_500, WebContent.contentTypeTextPlain, + "Failed to load classpath resource " + resourceName); + } else { + respond(action, HttpSC.OK_200, WebContent.contentTypeHTML, str); + } + } + + protected TaskEventHistory requireTaskEventHistory(HttpAction action) { + Context cxt = action.getEndpoint().getContext(); + TaskEventHistory taskEventHistory = TaskEventHistory.require(cxt); + return taskEventHistory; + } + + protected Runnable registerTaskEventListener(TaskEventHistory taskEventHistory, Clients clients) { + // Register the SSE handler to the history tracker + InternalListener listener = new InternalListener(taskEventHistory, clients); + Runnable disposeTaskEventListener = taskEventHistory.addListener(BasicTaskExec.class, listener); + return disposeTaskEventListener; + } + + protected class InternalListener implements TaskListener { + protected TaskEventHistory taskEvenHistory; + protected Clients clients; + + Long getTaskId(BasicTaskExec task) { + long taskId = taskEvenHistory.getId(task); + Long serialId = taskEvenHistory.getSerialId(taskId); + return serialId; + } + + public InternalListener(TaskEventHistory taskEventHistory, Clients clients) { + super(); + this.taskEvenHistory = taskEventHistory; + this.clients = clients; + } + + @Override + public void onStateChange(BasicTaskExec task) { + switch (task.getTaskState()) { + case STARTING: onStart(task); break; + case TERMINATED: onTerminated(task); break; + default: // ignored + } + } + + public void onStart(BasicTaskExec startRecord) { + Long serialId = getTaskId(startRecord); + if (serialId != null) { + try (JsonTreeWriter writer = new JsonTreeWriter()) { + writer.beginObject(); + TaskStatusWriter.writeStartRecordMembers(writer, serialId, startRecord); + TaskStatusWriter.writeCanAbort(writer, isAbortAllowed(clients.endpoint)); + writer.endObject(); + JsonElement json = writer.get(); + synchronized (clients.listenerLock) { + Iterator> it = clients.eventListeners.entrySet().iterator(); + broadcastJson(it, json); + } + } catch (IOException e) { + throw new RuntimeException(e); + } + } + } + + public void onTerminated(BasicTaskExec endRecord) { + Long serialId = getTaskId(endRecord); + if (serialId != null) { + try (JsonTreeWriter writer = new JsonTreeWriter()) { + TaskStatusWriter.writeCompletionRecordObject(writer, serialId, endRecord); + JsonElement json = writer.get(); + synchronized (clients.listenerLock) { + Iterator> it = clients.eventListeners.entrySet().iterator(); + broadcastJson(it, json); + } + } catch (IOException e) { + throw new RuntimeException(e); + } + } + } + } + protected void serveEvents(HttpAction action) { + HttpServletRequest request = action.getRequest(); + HttpServletResponse response = action.getResponse(); + Endpoint endpoint = action.getEndpoint(); + + response.setContentType("text/event-stream"); + response.setCharacterEncoding("UTF-8"); + response.setHeader("Cache-Control", "no-cache"); + + AsyncContext asyncContext = request.startAsync(); + asyncContext.setTimeout(0); + + TaskEventHistory taskTracker = requireTaskEventHistory(action); + + Runnable[] disposeSseListener = {null}; + + // Detect when client disconnects + asyncContext.addListener(new AsyncListener() { + @Override + public void onComplete(AsyncEvent event) { + disposeSseListener[0].run(); + } + + @Override + public void onTimeout(AsyncEvent event) { + disposeSseListener[0].run(); + } + + @Override + public void onError(AsyncEvent event) { + disposeSseListener[0].run(); + } + + @Override + public void onStartAsync(AsyncEvent event) { + // No-op + } + }); + + disposeSseListener[0] = () -> { + trackerToClients.compute(taskTracker, (et, clts) -> { + Clients r = clts; + if (clts != null) { + synchronized (clts.listenerLock) { + // Remove the listener for the async context. + clts.eventListeners.remove(asyncContext); + + // If no more listeners remain then dispose the exec tracker listener. + if (clts.eventListeners.isEmpty()) { + clts.taskTrackerListenerDisposer.run(); + r = null; + } + } + } + return r; + }); + }; + + // Atomically set up the new listener. + trackerToClients.compute(taskTracker, (et, clients) -> { + if (clients == null) { + clients = new Clients(); + clients.endpoint = endpoint; + Runnable disposer = registerTaskEventListener(taskTracker, clients); + // clients.eventListeners.put(asyncContext, disposer); + clients.taskTrackerListenerDisposer = disposer; + } + synchronized (clients.listenerLock) { + clients.eventListeners.put(asyncContext, disposeSseListener[0]); + } + return clients; + }); + } + + /** Check whether abort is allowed in the action's endpoint context. */ + protected static boolean isAbortAllowed(HttpAction action) { + Endpoint endpoint = action.getEndpoint(); + return isAbortAllowed(endpoint); + } + + /** Check whether abort is allowed in the endpoint's context. */ + protected static boolean isAbortAllowed(Endpoint endpoint) { + Context cxt = (endpoint == null) ? null : endpoint.getContext(); + return isAbortAllowed(cxt); + } + + /** Abort of executions is only allowed if explicitly enabled in the context. */ + protected static boolean isAbortAllowed(Context cxt) { + boolean result = (cxt == null) ? false : cxt.isTrue(FMod_ExecTracker.symAllowAbort); + return result; + } + + public static void checkIsAbortAllowed(HttpAction action) { + boolean isAbortAllowed = isAbortAllowed(action); + if (!isAbortAllowed) { + throw new UnsupportedOperationException("Abort is not allowed."); + } + } + + public static void setAllowAbort(Context cxt, Boolean value) { + cxt.set(FMod_ExecTracker.symAllowAbort, value); + } + + /** + * Serves a JSON object with the running and recently completed tasks. + */ + protected void serveStatus(HttpAction action) { + boolean isAbortAllowed = isAbortAllowed(action); + + TaskEventHistory taskEventHistory = requireTaskEventHistory(action); + + action.setResponseStatus(HttpSC.OK_200); + action.setResponseContentType(WebContent.contentTypeJSON); + try { + OutputStream out = action.getResponseOutputStream(); + JsonWriter writer = gsonForSseEvents.newJsonWriter(new OutputStreamWriter(out)); + new TaskStatusWriter(100, isAbortAllowed).writeStatusObject(writer, taskEventHistory); + writer.flush(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + public static void respond(HttpAction action, int status, String contentType, String value) { + action.setResponseStatus(status); + action.setResponseContentType(contentType); + try { + OutputStream out = action.getResponseOutputStream(); + IOUtils.write(value, out, StandardCharsets.UTF_8); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + protected void broadcastJson(Iterator> it, JsonElement jsonData) { + String str = gsonForSseEvents.toJson(jsonData); + broadcastLine(it, str); + } + + /** + * Broadcast a payload to all registered listeners. + * @param payload A string without newline characters. + */ + protected void broadcastLine(Iterator> it, String payload) { + while (it.hasNext()) { + Entry e = it.next(); + AsyncContext context = e.getKey(); + Runnable unregister = e.getValue(); + try { + PrintWriter writer = context.getResponse().getWriter(); + // Format demanded by server side events is: "data: \n\n". + writer.println("data: " + payload); + writer.println(); + writer.flush(); + } catch (Throwable x) { + it.remove(); // Remove first so that unregister does not cause concurrent modification. + logger.warn("Broadcast failed.", x); + try { + unregister.run(); + } finally { + context.complete(); + } + } + } + } +} diff --git a/jena-fuseki2/jena-fuseki-mod-exectracker/src/main/java/org/apache/jena/fuseki/mod/exectracker/FMod_ExecTracker.java b/jena-fuseki2/jena-fuseki-mod-exectracker/src/main/java/org/apache/jena/fuseki/mod/exectracker/FMod_ExecTracker.java new file mode 100644 index 00000000000..6349b563ea3 --- /dev/null +++ b/jena-fuseki2/jena-fuseki-mod-exectracker/src/main/java/org/apache/jena/fuseki/mod/exectracker/FMod_ExecTracker.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.fuseki.mod.exectracker; + +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; +import java.util.Set; + +import org.apache.jena.fuseki.Fuseki; +import org.apache.jena.fuseki.main.FusekiServer; +import org.apache.jena.fuseki.main.sys.FusekiAutoModule; +import org.apache.jena.fuseki.server.DataAccessPoint; +import org.apache.jena.fuseki.server.DataAccessPointRegistry; +import org.apache.jena.fuseki.server.DataService; +import org.apache.jena.fuseki.server.Endpoint; +import org.apache.jena.fuseki.server.FusekiVocab; +import org.apache.jena.fuseki.server.Operation; +import org.apache.jena.rdf.model.Model; +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.exec.tracker.TaskEventHistory; +import org.apache.jena.sparql.exec.tracker.TaskEventBroker; +import org.apache.jena.sparql.util.Context; +import org.apache.jena.sparql.util.Symbol; + +public class FMod_ExecTracker implements FusekiAutoModule { + public static final Symbol symAllowAbort = Symbol.create("allowAbort"); + + private static final Operation OPERATION = Operation.alloc( + FusekiVocab.NS + "exectracker", "ExecTracker", "Execution Tracker"); + + public static Operation getOperation() { + return OPERATION; + } + + public FMod_ExecTracker() { + super(); + } + + @Override + public String name() { + return "ExecTracker"; + } + + @Override + public void prepare(FusekiServer.Builder builder, Set datasetNames, Model configModel) { + Operation trackerOperation = getOperation(); + Fuseki.configLog.info(name() + ": Registering operation " + trackerOperation.getId()); + builder.registerOperation(trackerOperation, new ExecTrackerService()); + } + + /** + * For each dataset with a 'tracker' endpoint set up a task event broker. + */ + @Override + public void configured(FusekiServer.Builder serverBuilder, DataAccessPointRegistry dapRegistry, Model configModel) { + FusekiAutoModule.super.configured(serverBuilder, dapRegistry, configModel); + + Operation trackerOperation = getOperation(); + List newDataAccessPoints = new ArrayList<>(); + for (DataAccessPoint dap : dapRegistry.accessPoints()) { + DataService dataService = dap.getDataService(); + DatasetGraph dsg = dataService.getDataset(); + + List trackerEndpoints = Optional.ofNullable(dataService.getEndpoints(trackerOperation)).orElse(List.of()); + + if (!trackerEndpoints.isEmpty()) { + // Register a task tracker registry in the dataset context. + Context datasetCxt = dsg.getContext(); + if (datasetCxt != null) { + TaskEventBroker taskTrackerRegistry = TaskEventBroker.getOrCreate(datasetCxt); + + // Then register task trackers with history into the endpoint context. + for (Endpoint endpoint : trackerEndpoints) { + Context endpointCxt = endpoint.getContext(); + + TaskEventHistory historyTracker = TaskEventHistory.getOrCreate(endpointCxt); + historyTracker.connect(taskTrackerRegistry); + // XXX Should disconnect history tracker on server shutdown. + } + } + } else { + newDataAccessPoints.add(dap); + } + } + + // "replace" each DataAccessPoint + newDataAccessPoints.forEach(dap -> { + dapRegistry.remove(dap.getName()); + dapRegistry.register(dap); + }); + } + + @Override + public void serverStopped(FusekiServer server) { + // XXX Should disconnect history tracker on server shutdown. + } +} diff --git a/jena-fuseki2/jena-fuseki-mod-exectracker/src/main/java/org/apache/jena/fuseki/mod/exectracker/TaskStatusWriter.java b/jena-fuseki2/jena-fuseki-mod-exectracker/src/main/java/org/apache/jena/fuseki/mod/exectracker/TaskStatusWriter.java new file mode 100644 index 00000000000..47f5b6795d2 --- /dev/null +++ b/jena-fuseki2/jena-fuseki-mod-exectracker/src/main/java/org/apache/jena/fuseki/mod/exectracker/TaskStatusWriter.java @@ -0,0 +1,141 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.fuseki.mod.exectracker; + +import java.io.IOException; +import java.util.Map.Entry; + +import org.apache.commons.lang3.exception.ExceptionUtils; +import org.apache.jena.sparql.exec.tracker.BasicTaskExec; +import org.apache.jena.sparql.exec.tracker.TaskEventHistory; + +import com.google.gson.stream.JsonWriter; + +public class TaskStatusWriter { + + protected int maxHistorySize; + protected boolean allowAbort; + + /** + * Create a writer for writing out ExecTracker state as JSON. + * + * @param maxHistorySize Caps the number of completed tasks to write out. + */ + public TaskStatusWriter(int maxHistorySize, boolean allowAbort) { + super(); + this.maxHistorySize = maxHistorySize; + this.allowAbort = allowAbort; + } + + public void writeStatusObject(JsonWriter writer, TaskEventHistory execTracker) throws IOException { + writer.beginObject(); + writeStatusMembers(writer, execTracker); + writer.endObject(); + } + + public void writeStatusMembers(JsonWriter writer, TaskEventHistory execTracker) throws IOException { + writer.name("runningTasks"); + writer.beginArray(); + for (Entry entry : execTracker.getActiveTasks().entrySet()) { + long id = entry.getKey(); + BasicTaskExec item = entry.getValue(); + writer.beginObject(); + writeStartRecordMembers(writer, id, item); + writeCanAbort(writer, allowAbort); + writer.endObject(); + } + writer.endArray(); + + writer.name("completedTasks"); + writer.beginArray(); + Iterable> recentHistory = () -> + execTracker.getHistory().stream().limit(maxHistorySize).iterator(); + for (Entry entry : recentHistory) { + long id = entry.getKey(); + BasicTaskExec item = entry.getValue(); + writeCompletionRecordObject(writer, id, item); + } + writer.endArray(); + } + + public static void writeStartRecordObject(JsonWriter writer, Long id, BasicTaskExec item) throws IOException { + writer.beginObject(); + writeStartRecordMembers(writer, id, item); + writer.endObject(); + } + + public static void writeStartRecordMembers(JsonWriter writer, Long id, BasicTaskExec item) throws IOException { + writer.name("type"); + writer.value("StartRecord"); + + writer.name("requestId"); + // long id = System.identityHashCode(item); + writer.value(id); + + writer.name("payload"); + writePayloadObject(writer, item); + + writer.name("timestamp"); + writer.value(item.getStartTime()); + } + + public static void writeCanAbort(JsonWriter writer, Boolean canAbort) throws IOException { + if (canAbort != null) { + writer.name("canAbort"); + writer.value(canAbort); + } + } + + public static void writePayloadObject(JsonWriter writer, BasicTaskExec item) throws IOException { + writer.beginObject(); + writePayloadMembers(writer, item); + writer.endObject(); + } + + public static void writePayloadMembers(JsonWriter writer, BasicTaskExec item) throws IOException { + // XXX Change to description + String label = item.getLabel(); + writer.name("label"); + writer.value(label); + } + + public static void writeCompletionRecordObject(JsonWriter writer, long id, BasicTaskExec item) throws IOException { + writer.beginObject(); + writeCompletionRecordMembers(writer, id, item); + writer.endObject(); + } + + public static void writeCompletionRecordMembers(JsonWriter writer, long id, BasicTaskExec item) throws IOException { + writer.name("type"); + writer.value("CompletionRecord"); + + writer.name("startRecord"); + writeStartRecordObject(writer, id, item); + + Throwable throwable = item.getThrowable(); + if (throwable != null) { + String errorMessage = ExceptionUtils.getStackTrace(throwable); + writer.name("error"); + writer.value(errorMessage); + } + + writer.name("timestamp"); + writer.value(item.getFinishTime()); + } +} diff --git a/jena-fuseki2/jena-fuseki-mod-exectracker/src/main/resources/META-INF/services/org.apache.jena.fuseki.main.sys.FusekiAutoModule b/jena-fuseki2/jena-fuseki-mod-exectracker/src/main/resources/META-INF/services/org.apache.jena.fuseki.main.sys.FusekiAutoModule new file mode 100644 index 00000000000..de0e8e7c12d --- /dev/null +++ b/jena-fuseki2/jena-fuseki-mod-exectracker/src/main/resources/META-INF/services/org.apache.jena.fuseki.main.sys.FusekiAutoModule @@ -0,0 +1 @@ +org.apache.jena.fuseki.mod.exectracker.FMod_ExecTracker diff --git a/jena-fuseki2/jena-fuseki-mod-exectracker/src/main/resources/exectracker/index.html b/jena-fuseki2/jena-fuseki-mod-exectracker/src/main/resources/exectracker/index.html new file mode 100644 index 00000000000..c014ca31e8b --- /dev/null +++ b/jena-fuseki2/jena-fuseki-mod-exectracker/src/main/resources/exectracker/index.html @@ -0,0 +1,487 @@ + + + + + + Execution Tracker + + + + + + + + +

Ongoing Executions

+ + + + + +
IDStart TimeLabelAction
+ +

Completed Executions

+ + + + + +
IDStart TimeEnd TimeLabel
+ + + + diff --git a/jena-fuseki2/jena-fuseki-mod-exectracker/src/test/java/org/apache/jena/fuseki/mod/exectracker/TestFMod_ExecTracker.java b/jena-fuseki2/jena-fuseki-mod-exectracker/src/test/java/org/apache/jena/fuseki/mod/exectracker/TestFMod_ExecTracker.java new file mode 100644 index 00000000000..cdfc32963ff --- /dev/null +++ b/jena-fuseki2/jena-fuseki-mod-exectracker/src/test/java/org/apache/jena/fuseki/mod/exectracker/TestFMod_ExecTracker.java @@ -0,0 +1,168 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.fuseki.mod.exectracker; + +import java.io.IOException; +import java.time.Duration; +import java.util.stream.IntStream; + +import org.apache.jena.fuseki.main.FusekiServer; +import org.apache.jena.fuseki.main.cmds.FusekiMain; +import org.apache.jena.fuseki.mod.exectracker.ExecTrackerService; +import org.apache.jena.fuseki.mod.exectracker.FMod_ExecTracker; +import org.apache.jena.graph.Graph; +import org.apache.jena.graph.Node; +import org.apache.jena.graph.NodeFactory; +import org.apache.jena.graph.Triple; +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.core.DatasetGraphFactory; +import org.apache.jena.sparql.exec.tracker.TaskEventBroker; +import org.apache.jena.sparql.exec.tracker.TaskEventHistory; +import org.apache.jena.sparql.graph.GraphFactory; +import org.apache.jena.sparql.util.Context; +import org.apache.jena.system.G; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.openqa.selenium.By; +import org.openqa.selenium.JavascriptExecutor; +import org.openqa.selenium.WebDriver; +import org.openqa.selenium.WebElement; +import org.openqa.selenium.chrome.ChromeDriver; +import org.openqa.selenium.chrome.ChromeOptions; +import org.openqa.selenium.support.ui.WebDriverWait; + +import io.github.bonigarcia.wdm.WebDriverManager; + +/** + * Test cases that interact with the spatial indexer web UI via Selenium. + * + * This class is currently set to "ignore" because it requires local browser. + * Although, a headless Chrome should be started automatically, + * this step turns out to not yet work reliable across all environments. + */ +@Disabled +public class TestFMod_ExecTracker { + private WebDriver driver; + private JavascriptExecutor js; + + private DatasetGraph dsg; + private Node graphName1 = NodeFactory.createURI("http://www.example.org/graph1"); + + /** Create a model with 1000 triples. */ + static Graph createTestGraph() { + Graph graph = GraphFactory.createDefaultGraph(); + IntStream.range(0, 1000) + .mapToObj(i -> NodeFactory.createURI("http://www.example.org/r" + i)) + .forEach(node -> graph.add(node, node, node)); + return graph; + } + + @BeforeEach + public void setUp() throws IOException { + dsg = DatasetGraphFactory.create(); + IntStream.range(0, 1000) + .mapToObj(i -> NodeFactory.createURI("http://www.example.org/x" + i)) + .map(n -> Triple.create(n, n, n)) + .forEach(dsg.getDefaultGraph()::add); + + TaskEventBroker tracker = TaskEventBroker.getOrCreate(dsg.getContext()); + TaskEventHistory history = TaskEventHistory.getOrCreate(dsg.getContext()); + history.connect(tracker); + + G.addInto(dsg.getDefaultGraph(), createTestGraph()); + // setupTestData(dsg); + + Context endpointCxt = Context.create(); + ExecTrackerService.setAllowAbort(endpointCxt, true); + + String[] argv = new String[] { "--empty" }; + FusekiServer server = FusekiMain.builder(argv) + .add("test", dsg) + .registerOperation(FMod_ExecTracker.getOperation(), new ExecTrackerService()) + .addEndpoint("test", "tracker", FMod_ExecTracker.getOperation(), null, endpointCxt) + .build(); + + server.start(); + int port = server.getPort(); + String serverURL = "http://localhost:" + port + "/"; + String siteUrl = serverURL + "test/update-spatial"; + + ChromeOptions options = new ChromeOptions(); + options.addArguments("--headless=new"); // use "new" headless mode (better support) + options.addArguments("--no-sandbox"); + options.addArguments("--disable-dev-shm-usage"); + + WebDriverManager.chromedriver().setup(); // Automatically downloads and sets path + driver = new ChromeDriver(options); + driver.get(siteUrl); + js = (JavascriptExecutor) driver; + } + + @AfterEach + public void tearDown() { + if (driver != null) { + driver.quit(); + } + driver = null; + } + + /** + * Test that first clicks the "apply" button on HTML page to update the spatial index. + * Then, the test removes a graph from the dataset and clicks the "clean" button which + * should remove all entries from the index for which there is no corresponding graph in the dataset. + * @throws InterruptedException + */ + @Test + public void testIndexAndCleanButtons() throws InterruptedException { + if (false) { + // Index the test data (default graph and 1 named graph). + WebElement button = driver.findElement(By.id("apply-action")); + button.click(); + awaitEvent(); + // Assert.assertEquals(1, spatialIndex.query(queryEnvelope, Quad.defaultGraphIRI).size()); + // Assert.assertEquals(2, spatialIndex.query(queryEnvelope, graphName1).size()); + clearLastEvent(); + + // Remove the named graph and update the index. + dsg.removeGraph(graphName1); + WebElement cleanButton = driver.findElement(By.id("clean-action")); + cleanButton.click(); + awaitEvent(); + + // + clearLastEvent(); + } + + Thread.sleep(600_000); + } + + private void awaitEvent() { + WebDriverWait wait = new WebDriverWait(driver, Duration.ofSeconds(5)); + wait.until(d -> { + Object status = js.executeScript("return window.lastEvent;"); + return status != null; + }); + } + + private void clearLastEvent() { + js.executeScript("window.lastEvent = null"); + } +} diff --git a/jena-fuseki2/jena-fuseki-mod-exectracker/src/test/resources/log4j2-test.properties b/jena-fuseki2/jena-fuseki-mod-exectracker/src/test/resources/log4j2-test.properties new file mode 100644 index 00000000000..fb3efdb6deb --- /dev/null +++ b/jena-fuseki2/jena-fuseki-mod-exectracker/src/test/resources/log4j2-test.properties @@ -0,0 +1,56 @@ +## Licensed under the terms of http://www.apache.org/licenses/LICENSE-2.0 +status = error +name = PropertiesConfig +filters = threshold + +filter.threshold.type = ThresholdFilter +filter.threshold.level = ALL + +appender.console.type = Console +appender.console.name = OUT +appender.console.target = SYSTEM_OUT +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = %d{HH:mm:ss} %-5p %-10c{1} :: %m%n +#appender.console.layout.pattern = [%d{yyyy-MM-dd HH:mm:ss}] %-5p %-10c{1} :: %m%n + +rootLogger.level = INFO +rootLogger.appenderRef.stdout.ref = OUT + +logger.jena.name = org.apache.jena +logger.jena.level = INFO + +logger.arq-exec.name = org.apache.jena.arq.exec +logger.arq-exec.level = INFO + +logger.fuseki.name = org.apache.jena.fuseki +logger.fuseki.level = WARN + +## Some tests correctly log warnings. TS_PrefixesService +logger.fuseki-fuseki.name = org.apache.jena.fuseki.Fuseki +logger.fuseki-fuseki.level = ERROR + +logger.fuseki-autoload.name = org.apache.jena.fuseki.main.sys.FusekiAutoModules +logger.fuseki-autoload.level = ERROR + +logger.http.name = org.apache.jena.http +logger.http.level = INFO + +logger.riot.name = org.apache.jena.riot +logger.riot.level = INFO + +logger.riot.name = org.apache.shiro +logger.riot.level = WARN + +logger.jetty.name = org.eclipse.jetty +logger.jetty.level = WARN + +# This goes out in NCSA format +appender.plain.type = Console +appender.plain.name = PLAIN +appender.plain.layout.type = PatternLayout +appender.plain.layout.pattern = %m%n + +logger.request-log.name = org.apache.jena.fuseki.Request +logger.request-log.additivity = false +logger.request-log.level = OFF +logger.request-log.appenderRef.plain.ref = PLAIN diff --git a/jena-fuseki2/jena-fuseki-mod-geosparql/src/main/java/org/apache/jena/fuseki/mod/geosparql/FMod_SpatialIndexer.java b/jena-fuseki2/jena-fuseki-mod-geosparql/src/main/java/org/apache/jena/fuseki/mod/geosparql/FMod_SpatialIndexer.java index b17f50925af..7fc9f6ec7d0 100644 --- a/jena-fuseki2/jena-fuseki-mod-geosparql/src/main/java/org/apache/jena/fuseki/mod/geosparql/FMod_SpatialIndexer.java +++ b/jena-fuseki2/jena-fuseki-mod-geosparql/src/main/java/org/apache/jena/fuseki/mod/geosparql/FMod_SpatialIndexer.java @@ -30,16 +30,21 @@ import org.apache.jena.fuseki.server.DataAccessPointRegistry; import org.apache.jena.fuseki.server.DataService; import org.apache.jena.fuseki.server.Endpoint; +import org.apache.jena.fuseki.server.FusekiVocab; import org.apache.jena.fuseki.server.Operation; import org.apache.jena.rdf.model.Model; public class FMod_SpatialIndexer implements FusekiAutoModule { - public static Operation spatialIndexerOperation = - Operation.alloc("http://jena.apache.org/fuseki#spatial-indexer", + private static final Operation spatialIndexerOperation = + Operation.alloc(FusekiVocab.NS + "spatial-indexer", "spatial-indexer", "Spatial indexer service"); + public static Operation getOperation() { + return spatialIndexerOperation; + } + public FMod_SpatialIndexer() { super(); } @@ -49,14 +54,11 @@ public String name() { return "Spatial Indexer"; } - @Override - public void start() { - } - @Override public void prepare(FusekiServer.Builder builder, Set datasetNames, Model configModel) { - Fuseki.configLog.info(name() + ": Registering operation " + spatialIndexerOperation.getId()); - builder.registerOperation(spatialIndexerOperation, new SpatialIndexerService()); + Operation spatialIndexerOp = getOperation(); + Fuseki.configLog.info(name() + ": Registering operation " + spatialIndexerOp.getId()); + builder.registerOperation(spatialIndexerOp, new SpatialIndexerService()); } /** @@ -100,8 +102,9 @@ private void autoConfigure(FusekiServer.Builder serverBuilder, DataAccessPointRe Fuseki.configLog.info(logPrefix + "Registering spatial indexer endpoint: " + geoIndexerEndpointName); + Operation op = getOperation(); Endpoint geoIndexerEndpoint = Endpoint.create() - .operation(spatialIndexerOperation) + .operation(op) .endpointName(geoIndexerEndpointName) .authPolicy(authPolicy) .build(); diff --git a/jena-fuseki2/jena-fuseki-mod-geosparql/src/main/java/org/apache/jena/fuseki/mod/geosparql/SpatialIndexerService.java b/jena-fuseki2/jena-fuseki-mod-geosparql/src/main/java/org/apache/jena/fuseki/mod/geosparql/SpatialIndexerService.java index 9787f1dba72..f231ecf2313 100644 --- a/jena-fuseki2/jena-fuseki-mod-geosparql/src/main/java/org/apache/jena/fuseki/mod/geosparql/SpatialIndexerService.java +++ b/jena-fuseki2/jena-fuseki-mod-geosparql/src/main/java/org/apache/jena/fuseki/mod/geosparql/SpatialIndexerService.java @@ -51,8 +51,6 @@ import org.apache.jena.geosparql.spatial.index.v2.SpatialIndexLib; import org.apache.jena.geosparql.spatial.index.v2.SpatialIndexPerGraph; import org.apache.jena.geosparql.spatial.index.v2.SpatialIndexerComputation; -import org.apache.jena.geosparql.spatial.task.BasicTask; -import org.apache.jena.geosparql.spatial.task.BasicTask.TaskListener; import org.apache.jena.graph.Node; import org.apache.jena.graph.NodeFactory; import org.apache.jena.query.Query; @@ -65,6 +63,8 @@ import org.apache.jena.sparql.engine.binding.Binding; import org.apache.jena.sparql.exec.QueryExec; import org.apache.jena.sparql.exec.RowSet; +import org.apache.jena.sparql.exec.tracker.BasicTaskExec; +import org.apache.jena.sparql.exec.tracker.TaskListener; import org.apache.jena.sparql.syntax.syntaxtransform.QueryTransformOps; import org.apache.jena.sparql.util.Context; import org.apache.jena.system.Txn; @@ -215,10 +215,10 @@ protected void serveWebPage(HttpAction action) { } } - protected BasicTask getActiveTask(HttpAction action) { + protected BasicTaskExec getActiveTask(HttpAction action) { DatasetGraph dsg = action.getDataset(); Context cxt = dsg.getContext(); - BasicTask activeTask = cxt.get(SpatialIndexConstants.symSpatialIndexTask); + BasicTaskExec activeTask = cxt.get(SpatialIndexConstants.symSpatialIndexTask); return activeTask; } @@ -357,7 +357,7 @@ public void onStartAsync(AsyncEvent event) { if (clients == null) { clients = new EndpointClients(); } - clients.eventListeners.put(asyncContext, disposeSseListener[0]); + clients.eventListeners.put(asyncContext, disposeSseListener[0]); return clients; }); } @@ -369,7 +369,7 @@ protected void doClean(HttpAction action) throws Exception { DatasetGraph dsg = action.getDataset(); Endpoint endpoint = action.getEndpoint(); - TaskListener taskListener = task -> { + TaskListener taskListener = task -> { switch (task.getTaskState()) { case STARTING: { JsonObject json = toJsonTaskStart(task.getStartTime(), null); @@ -377,7 +377,7 @@ protected void doClean(HttpAction action) throws Exception { break; } case TERMINATED: { - JsonObject json = toJsonTaskEnd(task.getEndTime(), task.getThrowable(), task.getStatusMessage()); + JsonObject json = toJsonTaskEnd(task.getFinishTime(), task.getThrowable(), task.getStatusMessage()); broadcastJson(endpoint, json); break; } @@ -395,7 +395,7 @@ protected void doClean(HttpAction action) throws Exception { /** Send a stop request to a running task. Does not wait for the task to terminate. */ protected void doCancel(HttpAction action) { - BasicTask task = getActiveTask(action); + BasicTaskExec task = getActiveTask(action); String state; if (task != null) { state = "true"; @@ -422,7 +422,7 @@ protected void doCancel(HttpAction action) { * */ protected void serveStatus(HttpAction action) { - BasicTask task = getActiveTask(action); + BasicTaskExec task = getActiveTask(action); JsonObject status = new JsonObject(); long time; @@ -435,7 +435,7 @@ protected void serveStatus(HttpAction action) { status.addProperty("isAborting", task.isAborting()); time = !task.isAborting() ? task.getStartTime() : task.getAbortTime(); } else { - time = task.getEndTime(); + time = task.getFinishTime(); } Throwable throwable = task.getThrowable(); if (throwable != null) { @@ -455,29 +455,29 @@ protected void serveStatus(HttpAction action) { successJson(action, jsonStr); } - protected BasicTask scheduleIndexTask(HttpAction action, SpatialIndexerComputation indexComputation, Path targetFile, boolean isReplaceTask) { + protected BasicTaskExec scheduleIndexTask(HttpAction action, SpatialIndexerComputation indexComputation, Path targetFile, boolean isReplaceTask) { Endpoint endpoint = action.getEndpoint(); DatasetGraph dsg = action.getDataset(); long graphCount = indexComputation.getGraphNodes().size(); - TaskListener taskListener = new TaskListener<>() { + TaskListener taskListener = new TaskListener<>() { @Override - public void onStateChange(BasicTask task) { + public void onStateChange(BasicTaskExec task) { switch (task.getTaskState()) { case STARTING: { JsonObject json = toJsonTaskStart(task.getStartTime(), null); broadcastJson(endpoint, json); break; } - case ABORTING: { + case TERMINATING: { JsonObject json = toJsonTaskAbort(task.getAbortTime(), null); broadcastJson(endpoint, json); break; } case TERMINATED: { Throwable throwable = task.getThrowable(); - long endTime = task.getEndTime(); + long endTime = task.getFinishTime(); JsonObject json = toJsonTaskEnd(endTime, throwable, task.getStatusMessage()); broadcastJson(endpoint, json); if (logger.isInfoEnabled()) { diff --git a/jena-fuseki2/jena-fuseki-mod-geosparql/src/test/java/org/apache/jena/fuseki/mod/geosparql/TestFMod_SpatialIndexer.java b/jena-fuseki2/jena-fuseki-mod-geosparql/src/test/java/org/apache/jena/fuseki/mod/geosparql/TestFMod_SpatialIndexer.java index 7480be116e6..a5a041949a9 100644 --- a/jena-fuseki2/jena-fuseki-mod-geosparql/src/test/java/org/apache/jena/fuseki/mod/geosparql/TestFMod_SpatialIndexer.java +++ b/jena-fuseki2/jena-fuseki-mod-geosparql/src/test/java/org/apache/jena/fuseki/mod/geosparql/TestFMod_SpatialIndexer.java @@ -20,18 +20,11 @@ import static org.junit.jupiter.api.Assertions.assertEquals; -import java.io.IOException; import java.time.Duration; import java.util.HashMap; import java.util.Map; import java.util.stream.Stream; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Disabled; -import org.junit.jupiter.api.Test; - -import io.github.bonigarcia.wdm.WebDriverManager; import org.apache.jena.atlas.iterator.Iter; import org.apache.jena.fuseki.main.FusekiServer; import org.apache.jena.fuseki.main.cmds.FusekiMain; @@ -46,6 +39,10 @@ import org.apache.jena.sparql.core.DatasetGraph; import org.apache.jena.sparql.core.DatasetGraphFactory; import org.apache.jena.sparql.core.Quad; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; import org.locationtech.jts.geom.Envelope; import org.openqa.selenium.By; import org.openqa.selenium.JavascriptExecutor; @@ -55,6 +52,8 @@ import org.openqa.selenium.chrome.ChromeOptions; import org.openqa.selenium.support.ui.WebDriverWait; +import io.github.bonigarcia.wdm.WebDriverManager; + /** * Test cases that interact with the spatial indexer web UI via Selenium. * @@ -73,7 +72,7 @@ public class TestFMod_SpatialIndexer { private Node graphName1 = NodeFactory.createURI("http://www.example.org/graph1"); @BeforeEach - public void setUp() throws IOException, SpatialIndexException { + public void setUp() throws SpatialIndexException { dsg = DatasetGraphFactory.create(); setupTestData(dsg); @@ -83,8 +82,8 @@ public void setUp() throws IOException, SpatialIndexException { FusekiServer server = FusekiMain.builder(argv) .add("test", dsg) - .registerOperation(FMod_SpatialIndexer.spatialIndexerOperation, new SpatialIndexerService()) - .addEndpoint("test", "spatial-indexer", FMod_SpatialIndexer.spatialIndexerOperation) + .registerOperation(FMod_SpatialIndexer.getOperation(), new SpatialIndexerService()) + .addEndpoint("test", "spatial-indexer", FMod_SpatialIndexer.getOperation()) .build(); server.start(); int port = server.getPort(); diff --git a/jena-fuseki2/jena-fuseki-mod-geosparql/src/test/java/org/apache/jena/fuseki/mod/geosparql/TestSpatialIndexerTasks.java b/jena-fuseki2/jena-fuseki-mod-geosparql/src/test/java/org/apache/jena/fuseki/mod/geosparql/TestSpatialIndexerTasks.java index f0b57a38800..af7c35b3c00 100644 --- a/jena-fuseki2/jena-fuseki-mod-geosparql/src/test/java/org/apache/jena/fuseki/mod/geosparql/TestSpatialIndexerTasks.java +++ b/jena-fuseki2/jena-fuseki-mod-geosparql/src/test/java/org/apache/jena/fuseki/mod/geosparql/TestSpatialIndexerTasks.java @@ -24,11 +24,8 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.concurrent.ExecutionException; import java.util.function.Predicate; -import org.junit.jupiter.api.Test; - import org.apache.jena.geosparql.implementation.vocabulary.SRS_URI; import org.apache.jena.geosparql.spatial.SpatialIndexException; import org.apache.jena.geosparql.spatial.index.v2.GeometryGenerator; @@ -42,6 +39,7 @@ import org.apache.jena.sparql.core.DatasetGraph; import org.apache.jena.sparql.core.DatasetGraphFactory; import org.apache.jena.sparql.core.Quad; +import org.junit.jupiter.api.Test; import org.locationtech.jts.geom.Envelope; /** Test cases that check for whether the correct graphs are indexed - also when a user is only authorized to a certain subset of graphs. */ @@ -104,7 +102,7 @@ private static SpatialIndexPerGraph clean(DatasetGraph dsg, Predicate isAu } @Test - public void testDsgIndexUpdate() throws SpatialIndexException { + public void testDsgIndexUpdate() { DatasetGraph dsg = createTestData(); Set initialGraphs = Set.of(g2, g3, g4); @@ -131,7 +129,7 @@ public void testDsgIndexReplace() throws SpatialIndexException { } @Test - public void testDsgIndexClean() throws InterruptedException, ExecutionException, SpatialIndexException { + public void testDsgIndexClean() throws SpatialIndexException { DatasetGraph dsg = createTestData(); SpatialIndexLib.buildSpatialIndex(dsg, SRS_URI.DEFAULT_WKT_CRS84); @@ -140,12 +138,12 @@ public void testDsgIndexClean() throws InterruptedException, ExecutionException, SpatialIndexPerGraph spatialIndex = clean(dsg, null); Set actual = spatialIndex.getIndex().getTreeMap().keySet(); - Set expected = Set.of(dg, g4);; + Set expected = Set.of(dg, g4); assertEquals(expected, actual); } @Test - public void testProtectedDsgIndexUpdate() throws SpatialIndexException { + public void testProtectedDsgIndexUpdate() { DatasetGraph dsg = createTestData(); // User can only see g3 and g4 and requests to update both of them. diff --git a/jena-fuseki2/jena-fuseki-server/pom.xml b/jena-fuseki2/jena-fuseki-server/pom.xml index 245ae524646..27c5ba1b0b5 100644 --- a/jena-fuseki2/jena-fuseki-server/pom.xml +++ b/jena-fuseki2/jena-fuseki-server/pom.xml @@ -61,6 +61,12 @@ 5.7.0-SNAPSHOT + + org.apache.jena + jena-fuseki-mod-exectracker + 5.6.0-SNAPSHOT + + org.apache.jena jena-cmds diff --git a/jena-fuseki2/pom.xml b/jena-fuseki2/pom.xml index e46d8b2b792..0a4f5c2a3d9 100644 --- a/jena-fuseki2/pom.xml +++ b/jena-fuseki2/pom.xml @@ -54,6 +54,8 @@ jena-fuseki-access + jena-fuseki-mod-exectracker + jena-fuseki-main diff --git a/jena-geosparql/src/main/java/org/apache/jena/geosparql/spatial/index/v2/SpatialIndexLib.java b/jena-geosparql/src/main/java/org/apache/jena/geosparql/spatial/index/v2/SpatialIndexLib.java index 7541912e9de..fc0828317cb 100644 --- a/jena-geosparql/src/main/java/org/apache/jena/geosparql/spatial/index/v2/SpatialIndexLib.java +++ b/jena-geosparql/src/main/java/org/apache/jena/geosparql/spatial/index/v2/SpatialIndexLib.java @@ -38,8 +38,6 @@ import org.apache.jena.geosparql.spatial.SpatialIndexConstants; import org.apache.jena.geosparql.spatial.SpatialIndexException; import org.apache.jena.geosparql.spatial.task.TaskThread; -import org.apache.jena.geosparql.spatial.task.BasicTask; -import org.apache.jena.geosparql.spatial.task.BasicTask.TaskListener; import org.apache.jena.graph.Graph; import org.apache.jena.graph.Node; import org.apache.jena.query.Dataset; @@ -50,6 +48,8 @@ import org.apache.jena.sparql.core.NamedGraph; import org.apache.jena.sparql.core.Quad; import org.apache.jena.sparql.engine.ExecutionContext; +import org.apache.jena.sparql.exec.tracker.BasicTaskExec; +import org.apache.jena.sparql.exec.tracker.TaskListener; import org.apache.jena.sparql.util.Context; import org.apache.jena.system.AutoTxn; import org.apache.jena.system.Txn; @@ -217,12 +217,12 @@ public static Node unwrapGraphName(Graph graph) { return graphNode; } - public static BasicTask scheduleOnceIndexTask(DatasetGraph dsg, SpatialIndexerComputation indexComputation, Path targetFile, boolean isReplaceTask, - TaskListener taskListener) { + public static BasicTaskExec scheduleOnceIndexTask(DatasetGraph dsg, SpatialIndexerComputation indexComputation, Path targetFile, boolean isReplaceTask, + TaskListener taskListener) { Context cxt = dsg.getContext(); - BasicTask task = cxt.compute(SpatialIndexConstants.symSpatialIndexTask, (key, priorTaskObj) -> { - BasicTask priorTask = (BasicTask)priorTaskObj; + BasicTaskExec task = cxt.compute(SpatialIndexConstants.symSpatialIndexTask, (key, priorTaskObj) -> { + BasicTaskExec priorTask = (BasicTaskExec)priorTaskObj; if (priorTask != null && !priorTask.isTerminated()) { throw new RuntimeException("A spatial indexing task is already active for this dataset. Wait for completion or abort it."); } @@ -235,7 +235,7 @@ public static BasicTask scheduleOnceIndexTask(DatasetGraph dsg, SpatialIndexerCo return task; } - public static TaskThread createIndexerTask(DatasetGraph dsg, Predicate isAuthorizedGraph, SpatialIndexerComputation indexComputation, TaskListener taskListener, Path targetFile, boolean isReplaceTask) { + public static TaskThread createIndexerTask(DatasetGraph dsg, Predicate isAuthorizedGraph, SpatialIndexerComputation indexComputation, TaskListener taskListener, Path targetFile, boolean isReplaceTask) { Context cxt = dsg.getContext(); long graphCount = indexComputation.getGraphNodes().size(); boolean isEffectiveUpdate = !isReplaceTask || isAuthorizedGraph != null; @@ -325,10 +325,10 @@ public void requestCancel() { * Attempt to start a spatial index task that cleans the index of graphs not present in the given dataset. * This method fails if there is already another spatial index task running. */ - public static BasicTask scheduleOnceCleanTask(DatasetGraph dsg, TaskListener taskListener) { + public static BasicTaskExec scheduleOnceCleanTask(DatasetGraph dsg, TaskListener taskListener) { Context cxt = dsg.getContext(); - BasicTask task = cxt.compute(SpatialIndexConstants.symSpatialIndexTask, (key, priorTaskObj) -> { - BasicTask priorTask = (BasicTask) priorTaskObj; + BasicTaskExec task = cxt.compute(SpatialIndexConstants.symSpatialIndexTask, (key, priorTaskObj) -> { + BasicTaskExec priorTask = (BasicTaskExec) priorTaskObj; if (priorTask != null && !priorTask.isTerminated()) { throw new RuntimeException("A spatial indexing task is already active for this dataset. Wait for completion or abort it."); } @@ -340,7 +340,7 @@ public static BasicTask scheduleOnceCleanTask(DatasetGraph dsg, TaskListener isAuthorizedGraph, TaskListener taskListener) { + public static TaskThread createCleanTask(DatasetGraph dsg, Predicate isAuthorizedGraph, TaskListener taskListener) { Context cxt = dsg.getContext(); TaskThread thread = new TaskThread("Clean action", taskListener) { diff --git a/jena-geosparql/src/main/java/org/apache/jena/geosparql/spatial/task/TaskThread.java b/jena-geosparql/src/main/java/org/apache/jena/geosparql/spatial/task/TaskThread.java index 519778ea58d..98f467556fe 100644 --- a/jena-geosparql/src/main/java/org/apache/jena/geosparql/spatial/task/TaskThread.java +++ b/jena-geosparql/src/main/java/org/apache/jena/geosparql/spatial/task/TaskThread.java @@ -22,6 +22,9 @@ import java.util.concurrent.CancellationException; import java.util.concurrent.atomic.AtomicBoolean; +import org.apache.jena.sparql.exec.tracker.BasicTaskExec; +import org.apache.jena.sparql.exec.tracker.TaskListener; +import org.apache.jena.sparql.exec.tracker.TaskState; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -32,7 +35,7 @@ */ public abstract class TaskThread extends Thread - implements BasicTask + implements BasicTaskExec { private static final Logger logger = LoggerFactory.getLogger(TaskThread.class); @@ -40,7 +43,7 @@ public abstract class TaskThread private Object cancelLock = new Object(); private TaskState state = TaskState.CREATED; - private TaskListener taskListener; + private TaskListener taskListener; // protected List> completionHandlers = new ArrayList<>(); @@ -74,11 +77,11 @@ private void updateState(TaskState state) { } } - public TaskThread(String label, TaskListener taskListener) { + public TaskThread(String label, TaskListener taskListener) { this(label, taskListener, new AtomicBoolean()); } - public TaskThread(String label, TaskListener taskListener, AtomicBoolean requestingCancel) { + public TaskThread(String label, TaskListener taskListener, AtomicBoolean requestingCancel) { super(); this.label = label; this.taskListener = taskListener; @@ -114,7 +117,7 @@ public long getStartTime() { } @Override - public long getEndTime() { + public long getFinishTime() { return endTime; } @@ -182,7 +185,7 @@ protected void requestCancel() { this.throwable = new CancellationException(); updateState(TaskState.TERMINATED); } else { - updateState(TaskState.ABORTING); + updateState(TaskState.TERMINATING); this.interrupt(); } } diff --git a/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/RDFLink.java b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/RDFLink.java index 59554543dd9..849d20f8885 100644 --- a/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/RDFLink.java +++ b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/RDFLink.java @@ -29,7 +29,6 @@ import org.apache.jena.http.HttpEnv; import org.apache.jena.query.Query; import org.apache.jena.query.QueryExecution; -import org.apache.jena.query.QueryFactory; import org.apache.jena.rdfconnection.Isolation; import org.apache.jena.rdfconnection.JenaConnectionException; import org.apache.jena.rdfconnection.LibSec; @@ -355,7 +354,9 @@ public default boolean queryAsk(Query query) { * @return QueryExecution */ @Override - public QueryExec query(Query query); + default public QueryExec query(Query query) { + return newQuery().query(query).build(); + } /** * Setup a SPARQL query execution. @@ -374,7 +375,7 @@ public default boolean queryAsk(Query query) { */ @Override public default QueryExec query(String queryString) { - return query(QueryFactory.create(queryString)); + return newQuery().query(queryString).build(); } /** @@ -405,7 +406,7 @@ public default QueryExec query(String queryString) { */ @Override public default void update(Update update) { - update(new UpdateRequest(update)); + newUpdate().update(update).execute(); } /** @@ -413,7 +414,9 @@ public default void update(Update update) { * @param update */ @Override - public void update(UpdateRequest update); + public default void update(UpdateRequest update) { + newUpdate().update(update).execute(); + } /** * Execute a SPARQL Update. @@ -421,7 +424,7 @@ public default void update(Update update) { */ @Override public default void update(String updateString) { - update(UpdateFactory.create(updateString)); + newUpdate().update(updateString).execute(); } /** Fetch the default graph. diff --git a/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/RDFLinkDataset.java b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/RDFLinkDataset.java index c86b107bfb5..b684b5477e5 100644 --- a/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/RDFLinkDataset.java +++ b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/RDFLinkDataset.java @@ -41,6 +41,7 @@ import org.apache.jena.sparql.graph.GraphFactory; import org.apache.jena.sparql.graph.GraphReadOnly; import org.apache.jena.system.Txn; +import org.apache.jena.update.Update; import org.apache.jena.update.UpdateRequest; /** @@ -76,9 +77,8 @@ private RDFLinkDataset(DatasetGraph dataset) { @Override public QueryExec query(Query query) { checkOpen(); - //return QueryExec.newBuilder().dataset(dataset).query(query).build(); // Delayed. - return QueryExecApp.create(QueryExec.dataset(dataset).query(query), + return QueryExecApp.create(newQuery().query(query), dataset, query, null); @@ -101,10 +101,22 @@ public UpdateExecBuilder newUpdate() { return UpdateExec.dataset(dataset); } + @Override + public void update(Update update) { + checkOpen(); + Txn.executeWrite(dataset, ()->newUpdate().update(update).execute()); + } + @Override public void update(UpdateRequest update) { checkOpen(); - Txn.executeWrite(dataset, ()->UpdateExecDatasetBuilder.create().update(update).execute(dataset)); + Txn.executeWrite(dataset, ()->newUpdate().update(update).execute()); + } + + @Override + public void update(String updateString) { + checkOpen(); + Txn.executeWrite(dataset, ()->newUpdate().update(updateString).execute()); } @Override diff --git a/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/RDFLinkHTTP.java b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/RDFLinkHTTP.java index b4edd28ece8..69f6018ba3c 100644 --- a/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/RDFLinkHTTP.java +++ b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/RDFLinkHTTP.java @@ -43,6 +43,7 @@ import org.apache.jena.sparql.exec.http.UpdateExecHTTPBuilder; import org.apache.jena.sparql.exec.http.UpdateSendMode; import org.apache.jena.system.Txn; +import org.apache.jena.update.Update; import org.apache.jena.update.UpdateFactory; import org.apache.jena.update.UpdateRequest; @@ -303,6 +304,12 @@ public void update(String updateString) { updateExec(null, updateString); } + @Override + public void update(Update update) { + Objects.requireNonNull(update); + updateExec(new UpdateRequest(update), null); + } + @Override public void update(UpdateRequest update) { Objects.requireNonNull(update); @@ -313,10 +320,10 @@ private void updateExec(UpdateRequest update, String updateString ) { checkUpdate(); if ( update == null && updateString == null ) throw new InternalErrorException("Both update request and update string are null"); - UpdateRequest actual = null; + UpdateRequest parsed = null; // Kept for inspection if ( update == null ) { if ( parseCheckUpdates ) - actual = UpdateFactory.create(updateString); + parsed = UpdateFactory.create(updateString); } // Use the update string as provided if possible, otherwise serialize the update. String updateStringToSend = ( updateString != null ) ? updateString : update.toString(); diff --git a/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/ChainingQueryDispatcherForDatasetGraphOverRDFLink.java b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/ChainingQueryDispatcherForDatasetGraphOverRDFLink.java new file mode 100644 index 00000000000..e86a93719d3 --- /dev/null +++ b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/ChainingQueryDispatcherForDatasetGraphOverRDFLink.java @@ -0,0 +1,94 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.rdflink.dataset; + +import java.util.Optional; + +import org.apache.jena.query.Query; +import org.apache.jena.query.Syntax; +import org.apache.jena.rdflink.RDFLink; +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.engine.Timeouts; +import org.apache.jena.sparql.engine.Timeouts.Timeout; +import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.engine.dispatch.ChainingQueryDispatcher; +import org.apache.jena.sparql.engine.dispatch.QueryDispatcher; +import org.apache.jena.sparql.engine.dispatch.SparqlDispatcherRegistry; +import org.apache.jena.sparql.exec.QueryExec; +import org.apache.jena.sparql.exec.QueryExecBuilder; +import org.apache.jena.sparql.exec.QueryExecMod; +import org.apache.jena.sparql.util.Context; + +public class ChainingQueryDispatcherForDatasetGraphOverRDFLink + implements ChainingQueryDispatcher { + + @Override + public QueryExec create(Query query, DatasetGraph dsg, Binding initialBinding, Context context, + QueryDispatcher chain) { + QueryExec result = dsg instanceof DatasetGraphOverRDFLink d + ? newQuery(d, initialBinding, context).query(query).build() + : chain.create(query, dsg, initialBinding, context); + return result; + } + + @Override + public QueryExec create(String queryString, Syntax syntax, DatasetGraph dsg, Binding initialBinding, + Context context, QueryDispatcher chain) { + QueryExec result = dsg instanceof DatasetGraphOverRDFLink d + ? newQuery(d, initialBinding, context).query(queryString, syntax).build() + : chain.create(queryString, syntax, dsg, initialBinding, context); + return result; + } + + private static QueryExecBuilder newQuery(DatasetGraphOverRDFLink d, Binding binding, Context requestCxt) { + RDFLink link = d.newLink(); + try { + QueryExecBuilder qeBuilder = link.newQuery().context(requestCxt); + + if (binding != null) { + qeBuilder.substitution(binding); + } + + Optional parseCheck = SparqlDispatcherRegistry.getParseCheck(requestCxt); + if (parseCheck.isPresent()) { + qeBuilder.parseCheck(parseCheck.get()); + } + + Timeout timeout = Timeouts.extractQueryTimeout(requestCxt); + applyTimeouts(qeBuilder, timeout); + + return new QueryExecBuilderWrapperCloseRDFLink(qeBuilder, link); + } catch (Throwable t) { + link.close(); + t.addSuppressed(new RuntimeException("Failed to create query execution builder.")); + throw t; + } + } + + private static void applyTimeouts(QueryExecMod mod, Timeout t) { + if (t != null) { + if (t.hasInitialTimeout()) { + mod.initialTimeout(t.initialTimeout().amount(), t.initialTimeout().unit()); + } + if (t.hasOverallTimeout()) { + mod.overallTimeout(t.overallTimeout().amount(), t.overallTimeout().unit()); + } + } + } +} diff --git a/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/ChainingUpdateDispatcherForDatasetGraphOverRDFLink.java b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/ChainingUpdateDispatcherForDatasetGraphOverRDFLink.java new file mode 100644 index 00000000000..a54566cc7d6 --- /dev/null +++ b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/ChainingUpdateDispatcherForDatasetGraphOverRDFLink.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.rdflink.dataset; + +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.engine.dispatch.ChainingUpdateDispatcher; +import org.apache.jena.sparql.engine.dispatch.UpdateDispatcher; +import org.apache.jena.sparql.exec.UpdateExec; +import org.apache.jena.sparql.util.Context; +import org.apache.jena.update.UpdateRequest; + +public class ChainingUpdateDispatcherForDatasetGraphOverRDFLink + implements ChainingUpdateDispatcher +{ + @Override + public UpdateExec create(String updateRequestString, DatasetGraph dsg, Binding initialBinding, Context context, UpdateDispatcher chain) { + UpdateExec result = dsg instanceof DatasetGraphOverRDFLink d + ? newUpdate(d, initialBinding, context, null, updateRequestString) + : chain.create(updateRequestString, dsg, initialBinding, context); + return result; + } + + @Override + public UpdateExec create(UpdateRequest updateRequest, DatasetGraph dsg, Binding initialBinding, Context context, UpdateDispatcher chain) { + UpdateExec result = dsg instanceof DatasetGraphOverRDFLink d + ? newUpdate(d, initialBinding, context, updateRequest, null) + : chain.create(updateRequest, dsg, initialBinding, context); + return result; + } + + private static UpdateExec newUpdate(DatasetGraphOverRDFLink d, Binding binding, Context requestCxt, UpdateRequest updateRequest, String updateRequestString) { + return new UpdateExecOverRDFLink(d::newLink, binding, requestCxt, updateRequest, updateRequestString); + } +} diff --git a/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/DatasetGraphOverRDFLink.java b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/DatasetGraphOverRDFLink.java new file mode 100644 index 00000000000..1176706c757 --- /dev/null +++ b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/DatasetGraphOverRDFLink.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.rdflink.dataset; + +import org.apache.jena.atlas.lib.Creator; +import org.apache.jena.query.Query; +import org.apache.jena.rdflink.RDFLink; +import org.apache.jena.sparql.engine.dispatch.DatasetGraphOverSparql; +import org.apache.jena.sparql.exec.QueryExec; +import org.apache.jena.sparql.exec.UpdateExec; +import org.apache.jena.update.UpdateRequest; + +/** + * DatasetGraph implementation that implements all methods + * against an RDFLink. + * All returned iterators are backed by a fresh RDFLink instance. + * The iterators must be closed to free the resources. + */ +public class DatasetGraphOverRDFLink + extends DatasetGraphOverSparql +{ + private Creator rdfLinkCreator; + + public DatasetGraphOverRDFLink(Creator rdfLinkCreator) { + super(); + this.rdfLinkCreator = rdfLinkCreator; + } + + /** This method can be overridden. */ + public RDFLink newLink() { + RDFLink link = rdfLinkCreator.create(); + return link; + } + + public DatasetGraphOverRDFLink() { + initContext(); + } + + @Override + protected QueryExec query(Query query) { + RDFLink link = newLink(); + QueryExec base = link.query(query); + QueryExec result = new QueryExecWrapperCloseRDFLink(base, link); + return result; + } + + @Override + protected UpdateExec update(UpdateRequest update) { + return new UpdateExecOverRDFLink(this::newLink, null, null, update, null); + } +} diff --git a/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/InitDatasetGraphOverRDFLink.java b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/InitDatasetGraphOverRDFLink.java new file mode 100644 index 00000000000..ff35e48f3c9 --- /dev/null +++ b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/InitDatasetGraphOverRDFLink.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.rdflink.dataset; + +import org.apache.jena.rdflink.dataset.assembler.VocabAssemblerHTTP; +import org.apache.jena.sparql.engine.dispatch.SparqlDispatcherRegistry; +import org.apache.jena.sparql.system.InitARQ; +import org.apache.jena.sparql.system.InitExecTracking; +import org.apache.jena.sys.JenaSubsystemLifecycle; + +/** + * Initialize SPARQL dispatcher for {@link DatasetGraphOverRDFLink}. + */ +public class InitDatasetGraphOverRDFLink implements JenaSubsystemLifecycle { + + @Override + public void start() { + init(); + } + + @Override + public void stop() {} + + /** Initialize after {@link InitARQ} and before {@link InitExecTracking}. */ + @Override + public int level() { + return 40 ; + } + + private static boolean initialized = false; + + public synchronized static void init() { + if (!initialized) { + initialized = true; + + SparqlDispatcherRegistry.addDispatcher(new ChainingQueryDispatcherForDatasetGraphOverRDFLink()); + SparqlDispatcherRegistry.addDispatcher(new ChainingUpdateDispatcherForDatasetGraphOverRDFLink()); + + VocabAssemblerHTTP.init(); + } + } +} diff --git a/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/QueryExecBuilderWrapperCloseRDFLink.java b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/QueryExecBuilderWrapperCloseRDFLink.java new file mode 100644 index 00000000000..665044b1b3c --- /dev/null +++ b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/QueryExecBuilderWrapperCloseRDFLink.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.rdflink.dataset; + +import org.apache.jena.rdflink.RDFLink; +import org.apache.jena.sparql.exec.QueryExec; +import org.apache.jena.sparql.exec.QueryExecBuilder; +import org.apache.jena.sparql.exec.QueryExecBuilderWrapper; + +public class QueryExecBuilderWrapperCloseRDFLink + extends QueryExecBuilderWrapper { + + protected RDFLink link; + + public QueryExecBuilderWrapperCloseRDFLink(QueryExecBuilder delegate, RDFLink link) { + super(delegate); + this.link = link; + } + + @Override + public QueryExec build() { + try { + QueryExec core = super.build(); + return new QueryExecWrapperCloseRDFLink(core, link); + } catch (Throwable t) { + link.close(); + t.addSuppressed(new RuntimeException("Failed to build query execution.")); + throw t; + } + } +} diff --git a/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/QueryExecWrapperCloseRDFLink.java b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/QueryExecWrapperCloseRDFLink.java new file mode 100644 index 00000000000..e47294e0233 --- /dev/null +++ b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/QueryExecWrapperCloseRDFLink.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.rdflink.dataset; + +import org.apache.jena.rdflink.RDFLink; +import org.apache.jena.sparql.exec.QueryExec; +import org.apache.jena.sparql.exec.QueryExecWrapper; + +public class QueryExecWrapperCloseRDFLink + extends QueryExecWrapper +{ + protected RDFLink link; + + public QueryExecWrapperCloseRDFLink(QueryExec delegate, RDFLink link) { + super(delegate); + this.link = link; + } + + @Override + public void close() { + try { + super.close(); + } finally { + link.close(); + } + } +} diff --git a/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/UpdateExecOverRDFLink.java b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/UpdateExecOverRDFLink.java new file mode 100644 index 00000000000..70c3af64c8e --- /dev/null +++ b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/UpdateExecOverRDFLink.java @@ -0,0 +1,150 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.rdflink.dataset; + +import java.util.Optional; +import java.util.function.Supplier; + +import org.apache.jena.query.QueryCancelledException; +import org.apache.jena.rdflink.RDFLink; +import org.apache.jena.sparql.engine.Timeouts; +import org.apache.jena.sparql.engine.Timeouts.Timeout; +import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.engine.dispatch.SparqlDispatcherRegistry; +import org.apache.jena.sparql.exec.UpdateExec; +import org.apache.jena.sparql.exec.UpdateExecBuilder; +import org.apache.jena.sparql.util.Context; +import org.apache.jena.update.UpdateRequest; + +/** + * Deferred update execution that allocates all resources in the + * execute() method. Note, that UpdateExec does not have a close method. + */ +public class UpdateExecOverRDFLink + implements UpdateExec +{ + private Supplier linkSupplier; + private Binding binding; + private Context requestContext; + + private UpdateRequest updateRequest; + private String updateRequestString; + + private Object cancelLock = new Object(); + + private volatile boolean isAborted = false; + private volatile boolean isExecStarted = false; + private volatile UpdateExec delegate = null; + + public UpdateExecOverRDFLink(Supplier linkSupplier, Binding binding, Context context, + UpdateRequest updateRequest, String updateRequestString) { + super(); + this.linkSupplier = linkSupplier; + this.binding = binding; + this.requestContext = context; + this.updateRequest = updateRequest; + this.updateRequestString = updateRequestString; + } + + @Override + public UpdateRequest getUpdateRequest() { + return updateRequest; + } + + @Override + public String getUpdateRequestString() { + return updateRequestString; + } + + /** + * If the execution has not been started then the context configured with this instance + * is returned. Otherwise the context of the delegate is returned. + */ + @Override + public Context getContext() { + return delegate == null ? requestContext : delegate.getContext(); + } + + @Override + public void abort() { + synchronized (cancelLock) { + isAborted = true; + if (delegate != null) { + delegate.abort(); + } + } + } + + @Override + public void execute() { + RDFLink link = null; + try { + synchronized (cancelLock) { + if (isExecStarted) { + throw new IllegalStateException("Execution was already stated."); + } + isExecStarted = true; + + if (isAborted) { + throw new QueryCancelledException(); + } + + link = linkSupplier.get(); + UpdateExecBuilder r = link.newUpdate(); + + if (requestContext != null) { + r = r.context(requestContext); + Timeout timeout = Timeouts.extractUpdateTimeout(requestContext); + applyTimeouts(r, timeout); + } + + if (binding != null) { + r = r.substitution(binding); + } + + Optional parseCheck = SparqlDispatcherRegistry.getParseCheck(requestContext); + if (parseCheck.isPresent()) { + r = r.parseCheck(parseCheck.get()); + } + + if (updateRequest != null) { + r = r.update(updateRequest); + } else { + r = r.update(updateRequestString); + } + + delegate = r.build(); + } + + delegate.execute(); + } finally { + if (link != null) { + link.close(); + } + } + } + + private static void applyTimeouts(UpdateExecBuilder uExec, Timeout t) { + if (t != null) { + if (t.hasOverallTimeout()) { + uExec.timeout(t.overallTimeout().amount(), t.overallTimeout().unit()); + } + } + } +} diff --git a/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/assembler/DatasetAssemblerHTTP.java b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/assembler/DatasetAssemblerHTTP.java new file mode 100644 index 00000000000..79bdb273d03 --- /dev/null +++ b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/assembler/DatasetAssemblerHTTP.java @@ -0,0 +1,109 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.rdflink.dataset.assembler; + +import java.net.Authenticator; +import java.net.http.HttpClient; +import java.util.Arrays; + +import org.apache.jena.assembler.Assembler; +import org.apache.jena.assembler.exceptions.AssemblerException; +import org.apache.jena.atlas.lib.Creator; +import org.apache.jena.http.HttpEnv; +import org.apache.jena.http.auth.AuthLib; +import org.apache.jena.rdf.model.Resource; +import org.apache.jena.rdflink.RDFLink; +import org.apache.jena.rdflink.RDFLinkHTTP; +import org.apache.jena.rdflink.dataset.DatasetGraphOverRDFLink; +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.core.assembler.AssemblerUtils; +import org.apache.jena.sparql.core.assembler.DatasetAssembler; +import org.apache.jena.sparql.util.graph.GraphUtils; +import org.apache.jena.sys.JenaSystem; + +public class DatasetAssemblerHTTP extends DatasetAssembler +{ + static { JenaSystem.init(); } + + @Override + public DatasetGraph createDataset(Assembler a, Resource root) { + return make(a, root); + } + + public static DatasetGraph make(Assembler a, Resource root) { + String destination = GraphUtils.getStringValue(root, VocabAssemblerHTTP.pDestination); + + String queryEndpoint = destination; + String updateEndpoint = destination; + String gspEndpoint = destination; + + queryEndpoint = GraphUtils.getStringValue(root, VocabAssemblerHTTP.pQueryEndpoint); + updateEndpoint = GraphUtils.getStringValue(root, VocabAssemblerHTTP.pUpdateEndpoint); + gspEndpoint = GraphUtils.getStringValue(root, VocabAssemblerHTTP.pGspEndpoint); + + String q = queryEndpoint; + String u = updateEndpoint; + String g = gspEndpoint; + + if (q == null && u == null && g == null) { + throw new AssemblerException(root, "No destination set using any of the properties: " + + Arrays.asList(VocabAssemblerHTTP.pDestination, VocabAssemblerHTTP.pQueryEndpoint, VocabAssemblerHTTP.pUpdateEndpoint, VocabAssemblerHTTP.pGspEndpoint)); + } + + String user = GraphUtils.getStringValue(root, VocabAssemblerHTTP.pUser); + String pass = GraphUtils.getStringValue(root, VocabAssemblerHTTP.pPass); + + if ((user != null && pass == null)) { + throw new AssemblerException(root, "HTTP Credentials: Password is null."); + } + + if ((user == null && pass != null)) { + throw new AssemblerException(root, "HTTP Credentials: User is null."); + } + + HttpClient httpClient = null; + if (user != null || pass != null) { + Authenticator auth = AuthLib.authenticator(user, pass); + httpClient = HttpEnv.httpClientBuilder().authenticator(auth).build(); + } + + HttpClient h = httpClient; + + Creator linkCreator = () -> { + RDFLink link = RDFLinkHTTP.newBuilder() + .queryEndpoint(q) + .updateEndpoint(u) + .gspEndpoint(g) + .httpClient(h) + .build(); + return link; + }; + + DatasetGraph dsg = new DatasetGraphOverRDFLink(linkCreator); + + /* + rdf:type tdb:DatasetTDB2; + tdb:location "dir"; + //ja:context [ ja:cxtName "arq:queryTimeout"; ja:cxtValue "10000" ] ; + tdb:unionGraph true; # or "true" + */ + AssemblerUtils.mergeContext(root, dsg.getContext()); + return dsg; + } +} diff --git a/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/assembler/VocabAssemblerHTTP.java b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/assembler/VocabAssemblerHTTP.java new file mode 100644 index 00000000000..56f0cd3355f --- /dev/null +++ b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/assembler/VocabAssemblerHTTP.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.rdflink.dataset.assembler; + +import org.apache.jena.assembler.Assembler; +import org.apache.jena.assembler.JA; +import org.apache.jena.assembler.assemblers.AssemblerGroup; +import org.apache.jena.rdf.model.Property; +import org.apache.jena.rdf.model.Resource; +import org.apache.jena.sparql.core.assembler.AssemblerUtils; +import org.apache.jena.system.Vocab; + +public class VocabAssemblerHTTP +{ + private static final String NS = JA.getURI(); + + public static String getURI() { return NS; } + + // Types + public static final Resource tDatasetHTTP = Vocab.type(NS, "DatasetHTTP"); + + // Properties + public static final Property pUser = Vocab.property(NS, "user"); + public static final Property pPass = Vocab.property(NS, "pass"); + + public static final Property pDestination = Vocab.property(NS, "destination"); + public static final Property pQueryEndpoint = Vocab.property(NS, "queryEndpoint"); + public static final Property pUpdateEndpoint = Vocab.property(NS, "updateEndpoint"); + public static final Property pGspEndpoint = Vocab.property(NS, "gspEndpoint"); + + private static boolean initialized = false; + + static { init(); } + + static public synchronized void init() { + if ( initialized ) + return; + registerWith(Assembler.general()); + initialized = true; + } + + static void registerWith(AssemblerGroup g) { + // Wire in the assemblers. + AssemblerUtils.registerAssembler(g, tDatasetHTTP, new DatasetAssemblerHTTP()); + } +} diff --git a/jena-rdfconnection/src/main/resources/META-INF/services/org.apache.jena.sys.JenaSubsystemLifecycle b/jena-rdfconnection/src/main/resources/META-INF/services/org.apache.jena.sys.JenaSubsystemLifecycle new file mode 100644 index 00000000000..2b6ade72848 --- /dev/null +++ b/jena-rdfconnection/src/main/resources/META-INF/services/org.apache.jena.sys.JenaSubsystemLifecycle @@ -0,0 +1 @@ +org.apache.jena.rdflink.dataset.InitDatasetGraphOverRDFLink diff --git a/jena-rdfconnection/src/test/java/org/apache/jena/rdfconnection/TS_RDFConnection.java b/jena-rdfconnection/src/test/java/org/apache/jena/rdfconnection/TS_RDFConnection.java index 629de0d2153..a486c0c89a4 100644 --- a/jena-rdfconnection/src/test/java/org/apache/jena/rdfconnection/TS_RDFConnection.java +++ b/jena-rdfconnection/src/test/java/org/apache/jena/rdfconnection/TS_RDFConnection.java @@ -28,6 +28,7 @@ TestRDFConnectionLocalTxnMem.class , TestRDFConnectionLocalMRSW.class , TestLibRDFConn.class + , TestRDFConnectionToDatasetGraphOverRDFLink.class , TestRDFConnectionRewrapping.class , TestRDFConnectionHTTPHeaders.class }) diff --git a/jena-rdfconnection/src/test/java/org/apache/jena/rdfconnection/TestRDFConnectionToDatasetGraphOverRDFLink.java b/jena-rdfconnection/src/test/java/org/apache/jena/rdfconnection/TestRDFConnectionToDatasetGraphOverRDFLink.java new file mode 100644 index 00000000000..c3b45a8a395 --- /dev/null +++ b/jena-rdfconnection/src/test/java/org/apache/jena/rdfconnection/TestRDFConnectionToDatasetGraphOverRDFLink.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.rdfconnection; + +import org.apache.jena.query.Dataset; +import org.apache.jena.query.DatasetFactory; +import org.apache.jena.rdflink.RDFLink; +import org.apache.jena.rdflink.dataset.DatasetGraphOverRDFLink; +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.core.DatasetGraphFactory; + +public class TestRDFConnectionToDatasetGraphOverRDFLink extends AbstractTestRDFConnection { + + @Override + protected boolean supportsAbort() { return false; } + + @Override + protected RDFConnection connection() { + DatasetGraph backendDsg = DatasetGraphFactory.create(); + DatasetGraph frontendDsg = new DatasetGraphOverRDFLink(() -> RDFLink.connect(backendDsg)); + Dataset dataset = DatasetFactory.wrap(frontendDsg); + return RDFConnection.connect(dataset); + } +}