From 33273fc9bbcb655cb359dcbc6c27df0ba3fd51c8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martynas=20Jusevi=C4=8Dius?= Date: Wed, 15 Oct 2025 17:43:36 +0200 Subject: [PATCH 1/7] Updated ACL queries to only retrieve authorizations for the current dataspace That is done by injecting `$base` into the queries and adding `FILTER(strstarts(str(?g), str($base)))` --- platform/datasets/admin.trig | 18 --- .../filter/request/AuthorizationFilter.java | 17 ++- .../server/model/impl/ProxyResourceBase.java | 41 ++++++- src/main/webapp/WEB-INF/web.xml | 116 +++++++++++------- .../xsl/bootstrap/2.3.2/client/modal.xsl | 12 +- 5 files changed, 129 insertions(+), 75 deletions(-) diff --git a/platform/datasets/admin.trig b/platform/datasets/admin.trig index 76fa774bb..fc59849c2 100644 --- a/platform/datasets/admin.trig +++ b/platform/datasets/admin.trig @@ -618,24 +618,6 @@ WHERE # AUTHORIZATIONS -# public - - -{ - - a dh:Item ; - sioc:has_container ; - dct:title "Public access" ; - foaf:primaryTopic . - - a acl:Authorization ; - rdfs:label "Public access" ; - rdfs:comment "Allows non-authenticated access" ; - acl:mode acl:Read ; - acl:agentClass foaf:Agent, acl:AuthenticatedAgent . - -} - # access endpoint diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/AuthorizationFilter.java b/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/AuthorizationFilter.java index 430887a45..c6c87c2e1 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/AuthorizationFilter.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/AuthorizationFilter.java @@ -211,7 +211,7 @@ public Model authorize(ContainerRequestContext request, Resource agent, Resource assert pss.toString().contains("VALUES"); // note we're not setting the $mode value on the ACL queries as we want to provide the AuthorizationContext with all of the agent's authorizations - authorizations.add(loadModel(getAdminService(), pss, new AuthorizationParams(getApplication().getBase(), accessTo, agent).get())); + authorizations.add(loadModel(getAdminService(), pss, new AuthorizationParams(getAdminBase(), accessTo, agent).get())); // access denied if the agent has no authorization to the requested document with the requested ACL mode if (getAuthorizationByMode(authorizations, accessMode) == null) return null; @@ -356,7 +356,7 @@ public Resource createOwnerAuthorization(Model model, Resource accessTo, Resourc /** * Returns the SPARQL service for agent data. - * + * * @return service resource */ protected Service getAdminService() @@ -365,6 +365,19 @@ protected Service getAdminService() getApplication().as(EndUserApplication.class).getAdminApplication().getService() : getApplication().getService(); } + + /** + * Returns the base URI of the admin application. + * Authorization data is always stored in the admin application's dataspace. + * + * @return admin application's base URI + */ + protected Resource getAdminBase() + { + return getApplication().canAs(EndUserApplication.class) ? + getApplication().as(EndUserApplication.class).getAdminApplication().getBase() : + getApplication().getBase(); + } /** * Returns currently matched application. diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/ProxyResourceBase.java b/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/ProxyResourceBase.java index 5e14266ac..e1478d088 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/ProxyResourceBase.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/ProxyResourceBase.java @@ -39,6 +39,7 @@ import jakarta.ws.rs.Consumes; import jakarta.ws.rs.NotAcceptableException; import jakarta.ws.rs.NotFoundException; +import jakarta.ws.rs.PATCH; import jakarta.ws.rs.POST; import jakarta.ws.rs.PUT; import jakarta.ws.rs.ProcessingException; @@ -215,7 +216,7 @@ public Response get(WebTarget target, Invocation.Builder builder) /** * Forwards POST request with SPARQL query body and returns response from remote resource. - * + * * @param sparqlQuery SPARQL query string * @return response */ @@ -224,9 +225,9 @@ public Response get(WebTarget target, Invocation.Builder builder) public Response post(String sparqlQuery) { if (getWebTarget() == null) throw new NotFoundException("Resource URI not supplied"); - + if (log.isDebugEnabled()) log.debug("POSTing SPARQL query to URI: {}", getWebTarget().getUri()); - + try (Response cr = getWebTarget().request() .accept(getReadableMediaTypes()) .post(Entity.entity(sparqlQuery, com.atomgraph.core.MediaType.APPLICATION_SPARQL_QUERY_TYPE))) @@ -244,7 +245,39 @@ public Response post(String sparqlQuery) throw new BadGatewayException(ex); } } - + + /** + * Forwards PATCH request with SPARQL query body and returns response from remote resource. + * + * @param sparqlQuery SPARQL query string + * @return response + */ + @PATCH + @Consumes(com.atomgraph.core.MediaType.APPLICATION_SPARQL_QUERY) + public Response patch(String sparqlQuery) + { + if (getWebTarget() == null) throw new NotFoundException("Resource URI not supplied"); + + if (log.isDebugEnabled()) log.debug("PATCHing SPARQL query to URI: {}", getWebTarget().getUri()); + + try (Response cr = getWebTarget().request() + .accept(getReadableMediaTypes()) + .method("PATCH", Entity.entity(sparqlQuery, com.atomgraph.core.MediaType.APPLICATION_SPARQL_QUERY_TYPE))) + { + return getResponse(cr); + } + catch (MessageBodyProviderNotFoundException ex) + { + if (log.isWarnEnabled()) log.debug("Dereferenced URI {} returned non-RDF media type", getWebTarget().getUri()); + throw new NotAcceptableException(ex); + } + catch (ProcessingException ex) + { + if (log.isWarnEnabled()) log.debug("Could not dereference URI: {}", getWebTarget().getUri()); + throw new BadGatewayException(ex); + } + } + /** * Forwards a multipart POST request returns RDF response from remote resource. * diff --git a/src/main/webapp/WEB-INF/web.xml b/src/main/webapp/WEB-INF/web.xml index 606e76265..94731adc2 100644 --- a/src/main/webapp/WEB-INF/web.xml +++ b/src/main/webapp/WEB-INF/web.xml @@ -57,29 +57,39 @@ PREFIX acl: PREFIX foaf: DESCRIBE ?auth -FROM WHERE { - { ?auth acl:agent $agent } - UNION - { ?auth acl:agentGroup ?Group . - ?Group foaf:member $agent - } - UNION - { ?auth acl:agentClass foaf:Agent } - UNION - { ?auth acl:agentClass $AuthenticatedAgentClass } + GRAPH ?g { + { ?auth acl:agent $agent } + UNION + { ?auth acl:agentGroup ?Group - ?auth acl:mode ?Mode . - - { ?auth acl:accessTo $this } - UNION - { { ?auth acl:accessToClass $Type } - UNION - { ?auth acl:accessToClass ?Class . - $Type (rdfs:subClassOf)* ?Class + GRAPH ?groupG { + ?Group foaf:member $agent } - } + FILTER(strstarts(str(?groupG), str($base))) + + } + UNION + { ?auth acl:agentClass foaf:Agent } + UNION + { ?auth acl:agentClass $AuthenticatedAgentClass } + + ?auth acl:mode ?Mode . + + { ?auth acl:accessTo $this } + UNION + { { ?auth acl:accessToClass $Type } + UNION + { ?auth acl:accessToClass ?Class . + GRAPH { + $Type (rdfs:subClassOf)* ?Class + } + } + } + } + + FILTER(strstarts(str(?g), str($base))) } ]]> @@ -91,41 +101,53 @@ PREFIX acl: PREFIX foaf: DESCRIBE ?auth -FROM WHERE - { { ?auth acl:mode acl:Control . - { ?auth acl:agent $agent } - UNION - { ?auth acl:agentGroup ?Group . - ?Group foaf:member $agent - } - } - UNION - { ?auth acl:agentClass $AuthenticatedAgentClass ; - acl:mode ?Mode - { ?auth acl:accessTo $this + { GRAPH ?g + { { ?auth acl:mode acl:Control . + { ?auth acl:agent $agent } + UNION + { ?auth acl:agentGroup ?Group + + GRAPH ?groupG { + ?Group foaf:member $agent + } + FILTER(strstarts(str(?groupG), str($base))) + + } } UNION - { ?auth acl:accessToClass $Type } - UNION - { ?auth acl:accessToClass ?Class . - $Type (rdfs:subClassOf)* ?Class - } - } - UNION - { ?auth acl:agentClass foaf:Agent ; - acl:mode acl:Read - { ?auth acl:accessTo $this + { ?auth acl:agentClass $AuthenticatedAgentClass ; + acl:mode ?Mode + { ?auth acl:accessTo $this + } + UNION + { ?auth acl:accessToClass $Type } + UNION + { ?auth acl:accessToClass ?Class . + GRAPH { + $Type (rdfs:subClassOf)* ?Class + } + } } UNION - { ?auth acl:accessToClass $Type } - UNION - { ?auth acl:accessToClass ?Class . - $Type (rdfs:subClassOf)* ?Class + { ?auth acl:agentClass foaf:Agent ; + acl:mode acl:Read + { ?auth acl:accessTo $this + } + UNION + { ?auth acl:accessToClass $Type } + UNION + { ?auth acl:accessToClass ?Class . + GRAPH { + $Type (rdfs:subClassOf)* ?Class + } + } + # only namespace, signup, OAuth2 login and WebID profiles can be public in admin app, nothing else + FILTER ( $this IN (uri(concat(str($base), "ns")), uri(concat(str($base), "sign%20up")), uri(concat(str($base), "oauth2/login")), uri(concat(str($base), "oauth2/authorize/google")), uri(concat(str($base), "transform"))) || strstarts(str($this), concat(str($base), "acl/agents/")) || strstarts(str($this), concat(str($base), "acl/public-keys/"))) } - # only namespace, signup, OAuth2 login and WebID profiles can be public in admin app, nothing else - FILTER ( $this IN (uri(concat(str($base), "ns")), uri(concat(str($base), "sign%20up")), uri(concat(str($base), "oauth2/login")), uri(concat(str($base), "oauth2/authorize/google")), uri(concat(str($base), "transform"))) || strstarts(str($this), concat(str($base), "acl/agents/")) || strstarts(str($this), concat(str($base), "acl/public-keys/"))) } + + FILTER(strstarts(str(?g), str($base))) } ]]> diff --git a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/modal.xsl b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/modal.xsl index c8bd03eec..2645e62ec 100644 --- a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/modal.xsl +++ b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/modal.xsl @@ -343,7 +343,7 @@ LIMIT 10 - + @@ -412,7 +412,7 @@ LIMIT 10 - +
@@ -706,12 +706,14 @@ LIMIT 10 - - + + + + @@ -1183,6 +1185,7 @@ LIMIT 10 + @@ -1200,6 +1203,7 @@ LIMIT 10 + From 743ca8f3937595c90141244d106475d4a740ae41 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martynas=20Jusevi=C4=8Dius?= Date: Wed, 15 Oct 2025 17:46:08 +0200 Subject: [PATCH 2/7] Adding fake cross-dataspace authorizations to test dataset --- http-tests/run.sh | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/http-tests/run.sh b/http-tests/run.sh index 9b071ca4c..e2409ff02 100755 --- a/http-tests/run.sh +++ b/http-tests/run.sh @@ -136,6 +136,39 @@ export TMP_ADMIN_DATASET=$(mktemp) download_dataset "$END_USER_ENDPOINT_URL" > "$TMP_END_USER_DATASET" download_dataset "$ADMIN_ENDPOINT_URL" > "$TMP_ADMIN_DATASET" +# Add fake cross-dataspace authorizations to test isolation +# These should be filtered out by FILTER(strstarts(str(?g), str($base))) +printf "### Adding fake cross-dataspace authorizations to test dataset\n" +cat >> "$TMP_ADMIN_DATASET" < { + + a ; + "Fake READ authorization from test dataspace" ; + <$AGENT_URI> ; + <$END_USER_BASE_URL> ; + . +} + + { + + a ; + "Fake APPEND authorization from test dataspace" ; + <$AGENT_URI> ; + <$END_USER_BASE_URL> ; + . +} + + { + + a ; + "Fake WRITE authorization from test dataspace" ; + <$AGENT_URI> ; + <$END_USER_BASE_URL> ; + . +} +EOF + ### Other tests ### run_tests $(find ./add/ -type f -name '*.sh') From ce5d8e32801d39b6ad37398825fdd76d223bac47 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martynas=20Jusevi=C4=8Dius?= Date: Mon, 20 Oct 2025 00:13:17 +0200 Subject: [PATCH 3/7] Throw `NotFoundException` when non dataspace (application) is matched Inject `Optional` instead of just `Application` --- bin/admin/acl/add-agent-to-group.sh | 2 +- bin/admin/add-ontology-import.sh | 2 +- bin/imports/create-file.sh | 2 +- bin/patch.sh | 2 +- bin/post.sh | 2 +- bin/put.sh | 2 +- .../admin/acl/add-delete-authorization.sh | 24 ++++++- .../acl/add-delete-class-authorization.sh | 24 ++++++- .../acl/add-delete-group-authorization.sh | 24 ++++++- http-tests/admin/acl/add-get-authorization.sh | 23 ++++++- .../admin/acl/add-get-class-authorization.sh | 23 ++++++- .../admin/acl/add-get-group-authorization.sh | 23 ++++++- .../admin/acl/add-post-authorization.sh | 30 ++++++++- .../admin/acl/add-post-class-authorization.sh | 30 ++++++++- .../admin/acl/add-post-group-authorization.sh | 30 ++++++++- http-tests/admin/acl/add-put-authorization.sh | 30 ++++++++- .../admin/acl/add-put-class-authorization.sh | 30 ++++++++- .../admin/acl/add-put-group-authorization.sh | 30 ++++++++- http-tests/admin/acl/make-public.sh | 22 ++++++- .../dataspaces/non-existent-dataspace.sh | 18 ++++++ http-tests/run.sh | 35 +---------- .../atomgraph/linkeddatahub/Application.java | 11 +++- .../linkeddatahub/resource/Namespace.java | 4 +- .../server/factory/ApplicationFactory.java | 19 +++--- .../server/factory/ServiceFactory.java | 9 ++- .../factory/UnwrappedApplicationFactory.java | 62 +++++++++++++++++++ .../filter/request/ApplicationFilter.java | 28 +++++---- .../filter/request/AuthenticationFilter.java | 16 ++--- .../filter/request/AuthorizationFilter.java | 32 +++++----- .../server/filter/request/OntologyFilter.java | 18 +++--- .../filter/request/auth/IDTokenFilter.java | 13 ++-- .../response/BackendInvalidationFilter.java | 45 +++++++------- .../response/ResponseHeadersFilter.java | 35 ++++++----- .../filter/response/XsltExecutableFilter.java | 15 +++-- .../server/io/ValidatingModelProvider.java | 20 +++--- .../auth/AuthorizationExceptionMapper.java | 15 ++--- .../oauth2/TokenExpiredExceptionMapper.java | 36 +++++++---- .../linkeddatahub/writer/ModelXSLTWriter.java | 4 +- .../linkeddatahub/writer/XSLTWriterBase.java | 19 ++++-- .../writer/factory/DataManagerFactory.java | 32 +++++----- .../writer/factory/ModeFactory.java | 3 + 41 files changed, 620 insertions(+), 224 deletions(-) create mode 100755 http-tests/dataspaces/non-existent-dataspace.sh create mode 100644 src/main/java/com/atomgraph/linkeddatahub/server/factory/UnwrappedApplicationFactory.java diff --git a/bin/admin/acl/add-agent-to-group.sh b/bin/admin/acl/add-agent-to-group.sh index 0cc212b19..bdd790113 100755 --- a/bin/admin/acl/add-agent-to-group.sh +++ b/bin/admin/acl/add-agent-to-group.sh @@ -79,4 +79,4 @@ sparql+="}\n" # PATCH SPARQL to the named graph -echo -e "$sparql" | curl -X PATCH --data-binary @- -s -k -E "$cert_pem_file":"$cert_password" "$target" -H "Content-Type: application/sparql-update" \ No newline at end of file +echo -e "$sparql" | curl -f -X PATCH --data-binary @- -s -k -E "$cert_pem_file":"$cert_password" "$target" -H "Content-Type: application/sparql-update" \ No newline at end of file diff --git a/bin/admin/add-ontology-import.sh b/bin/admin/add-ontology-import.sh index 3333c9ab4..f349e6368 100755 --- a/bin/admin/add-ontology-import.sh +++ b/bin/admin/add-ontology-import.sh @@ -80,4 +80,4 @@ sparql+="}\n" # PATCH SPARQL to the named graph -echo -e "$sparql" | curl -X PATCH --data-binary @- -v -k -E "$cert_pem_file":"$cert_password" "$target" -H "Content-Type: application/sparql-update" \ No newline at end of file +echo -e "$sparql" | curl -f -X PATCH --data-binary @- -v -k -E "$cert_pem_file":"$cert_password" "$target" -H "Content-Type: application/sparql-update" \ No newline at end of file diff --git a/bin/imports/create-file.sh b/bin/imports/create-file.sh index bbb21670d..ae1067c33 100755 --- a/bin/imports/create-file.sh +++ b/bin/imports/create-file.sh @@ -176,7 +176,7 @@ if [ -n "$proxy" ]; then fi # POST RDF/POST multipart form and capture the effective URL -effective_url=$(echo -e "$rdf_post" | curl -w '%{url_effective}' -v -s -k -X PUT -H "Accept: text/turtle" -E "$cert_pem_file":"$cert_password" -o /dev/null --config - "$target") +effective_url=$(echo -e "$rdf_post" | curl -w '%{url_effective}' -f -v -s -k -X PUT -H "Accept: text/turtle" -E "$cert_pem_file":"$cert_password" -o /dev/null --config - "$target") # If using proxy, rewrite the effective URL back to original hostname if [ -n "$proxy" ]; then diff --git a/bin/patch.sh b/bin/patch.sh index 93c821c08..e4fa67b83 100755 --- a/bin/patch.sh +++ b/bin/patch.sh @@ -70,4 +70,4 @@ fi # resolve SPARQL update from stdin against base URL and PATCH it to the server # uparse currently does not support --base: https://github.com/apache/jena/issues/3296 -cat - | curl -v -k -E "$cert_pem_file":"$cert_password" --data-binary @- -H "Content-Type: application/sparql-update" -X PATCH -o /dev/null "$final_url" +cat - | curl -f -v -k -E "$cert_pem_file":"$cert_password" --data-binary @- -H "Content-Type: application/sparql-update" -X PATCH -o /dev/null "$final_url" diff --git a/bin/post.sh b/bin/post.sh index a820065d7..1e0c54c27 100755 --- a/bin/post.sh +++ b/bin/post.sh @@ -80,7 +80,7 @@ else fi # resolve RDF document from stdin against base URL and POST to the server and print request URL -effective_url=$(cat - | turtle --base="$url" | curl -w '%{url_effective}' -v -k -E "$cert_pem_file":"$cert_password" -d @- -H "Content-Type: ${content_type}" -H "Accept: text/turtle" -o /dev/null "$final_url") +effective_url=$(cat - | turtle --base="$url" | curl -w '%{url_effective}' -f -v -k -E "$cert_pem_file":"$cert_password" -d @- -H "Content-Type: ${content_type}" -H "Accept: text/turtle" -o /dev/null "$final_url") # If using proxy, rewrite the effective URL back to original hostname if [ -n "$proxy" ]; then diff --git a/bin/put.sh b/bin/put.sh index 3f890a369..b372cb851 100755 --- a/bin/put.sh +++ b/bin/put.sh @@ -80,7 +80,7 @@ else fi # resolve RDF document from stdin against base URL and PUT to the server and print request URL -effective_url=$(cat - | turtle --base="$url" | curl -w '%{url_effective}' -v -k -E "$cert_pem_file":"$cert_password" -d @- -X PUT -H "Content-Type: ${content_type}" -H "Accept: text/turtle" -o /dev/null "$final_url") +effective_url=$(cat - | turtle --base="$url" | curl -w '%{url_effective}' -f -v -k -E "$cert_pem_file":"$cert_password" -d @- -X PUT -H "Content-Type: ${content_type}" -H "Accept: text/turtle" -o /dev/null "$final_url") # If using proxy, rewrite the effective URL back to original hostname if [ -n "$proxy" ]; then diff --git a/http-tests/admin/acl/add-delete-authorization.sh b/http-tests/admin/acl/add-delete-authorization.sh index 4e4cf1b19..0692735f7 100755 --- a/http-tests/admin/acl/add-delete-authorization.sh +++ b/http-tests/admin/acl/add-delete-authorization.sh @@ -28,7 +28,27 @@ container=$(create-container.sh \ --slug "$slug" \ --parent "$END_USER_BASE_URL") -# create authorization +# create fake test.localhost authorization (should be filtered out) + +create-authorization.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + -b "https://admin.test.localhost:4443/" \ + --label "Fake DELETE authorization from test.localhost" \ + --agent "$AGENT_URI" \ + --to "$container" \ + --write + +# access is still denied (fake authorization filtered out) + +curl -k -w "%{http_code}\n" -o /dev/null -s \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H "Accept: application/n-triples" \ + -X DELETE \ + "$container" \ +| grep -q "$STATUS_FORBIDDEN" + +# create real localhost authorization create-authorization.sh \ -f "$OWNER_CERT_FILE" \ @@ -39,7 +59,7 @@ create-authorization.sh \ --to "$container" \ --write -# access is allowed after authorization is created +# access is allowed after real authorization is created curl -k -w "%{http_code}\n" -o /dev/null -f -s \ -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ diff --git a/http-tests/admin/acl/add-delete-class-authorization.sh b/http-tests/admin/acl/add-delete-class-authorization.sh index a814147e0..b763c5c5b 100755 --- a/http-tests/admin/acl/add-delete-class-authorization.sh +++ b/http-tests/admin/acl/add-delete-class-authorization.sh @@ -28,7 +28,27 @@ container=$(create-container.sh \ --slug "$slug" \ --parent "$END_USER_BASE_URL") -# create authorization +# create fake test.localhost authorization (should be filtered out) + +create-authorization.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + -b "https://admin.test.localhost:4443/" \ + --label "Fake DELETE class authorization from test.localhost" \ + --agent "$AGENT_URI" \ + --to-all-in "https://www.w3.org/ns/ldt/document-hierarchy#Container" \ + --write + +# access is still denied (fake authorization filtered out) + +curl -k -w "%{http_code}\n" -o /dev/null -s \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H "Accept: application/n-triples" \ + -X DELETE \ + "$container" \ +| grep -q "$STATUS_FORBIDDEN" + +# create real localhost authorization create-authorization.sh \ -f "$OWNER_CERT_FILE" \ @@ -39,7 +59,7 @@ create-authorization.sh \ --to-all-in "https://www.w3.org/ns/ldt/document-hierarchy#Container" \ --write -# access is allowed after authorization is created +# access is allowed after real authorization is created curl -k -w "%{http_code}\n" -o /dev/null -f -s \ -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ diff --git a/http-tests/admin/acl/add-delete-group-authorization.sh b/http-tests/admin/acl/add-delete-group-authorization.sh index ae55921ca..c6fe39bff 100755 --- a/http-tests/admin/acl/add-delete-group-authorization.sh +++ b/http-tests/admin/acl/add-delete-group-authorization.sh @@ -44,7 +44,27 @@ container=$(create-container.sh \ --slug "$slug" \ --parent "$END_USER_BASE_URL") -# create authorization +# create fake test.localhost authorization (should be filtered out) + +create-authorization.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + -b "https://admin.test.localhost:4443/" \ + --label "Fake DELETE group authorization from test.localhost" \ + --agent-group "$group" \ + --to "$container" \ + --write + +# access is still denied (fake authorization filtered out) + +curl -k -w "%{http_code}\n" -o /dev/null -s \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H "Accept: application/n-triples" \ + -X DELETE \ + "$container" \ +| grep -q "$STATUS_FORBIDDEN" + +# create real localhost authorization create-authorization.sh \ -f "$OWNER_CERT_FILE" \ @@ -55,7 +75,7 @@ create-authorization.sh \ --to "$container" \ --write -# access is allowed after authorization is created +# access is allowed after real authorization is created curl -k -w "%{http_code}\n" -o /dev/null -f -s \ -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ diff --git a/http-tests/admin/acl/add-get-authorization.sh b/http-tests/admin/acl/add-get-authorization.sh index 9273104e9..5f9b0c701 100755 --- a/http-tests/admin/acl/add-get-authorization.sh +++ b/http-tests/admin/acl/add-get-authorization.sh @@ -15,7 +15,26 @@ curl -k -w "%{http_code}\n" -o /dev/null -s \ "$END_USER_BASE_URL" \ | grep -q "$STATUS_FORBIDDEN" -# create authorization +# create fake test.localhost authorization (should be filtered out) + +create-authorization.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + -b "https://admin.test.localhost:4443/" \ + --label "Fake GET authorization from test.localhost" \ + --agent "$AGENT_URI" \ + --to "$END_USER_BASE_URL" \ + --read + +# access is still denied (fake authorization filtered out) + +curl -k -w "%{http_code}\n" -o /dev/null -s \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H "Accept: application/n-triples" \ + "$END_USER_BASE_URL" \ +| grep -q "$STATUS_FORBIDDEN" + +# create real localhost authorization create-authorization.sh \ -f "$OWNER_CERT_FILE" \ @@ -26,7 +45,7 @@ create-authorization.sh \ --to "$END_USER_BASE_URL" \ --read -# access is allowed after authorization is created +# access is allowed after real authorization is created curl -k -w "%{http_code}\n" -o /dev/null -f -s \ -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ diff --git a/http-tests/admin/acl/add-get-class-authorization.sh b/http-tests/admin/acl/add-get-class-authorization.sh index 0f2b099c1..2d975c739 100755 --- a/http-tests/admin/acl/add-get-class-authorization.sh +++ b/http-tests/admin/acl/add-get-class-authorization.sh @@ -15,7 +15,26 @@ curl -k -w "%{http_code}\n" -o /dev/null -s \ "$END_USER_BASE_URL" \ | grep -q "$STATUS_FORBIDDEN" -# create authorization +# create fake test.localhost authorization (should be filtered out) + +create-authorization.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + -b "https://admin.test.localhost:4443/" \ + --label "Fake GET Container authorization from test.localhost" \ + --agent "$AGENT_URI" \ + --to-all-in "https://w3id.org/atomgraph/linkeddatahub/default#Root" \ + --read + +# access is still denied (fake authorization filtered out) + +curl -k -w "%{http_code}\n" -o /dev/null -s \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H "Accept: application/n-triples" \ + "$END_USER_BASE_URL" \ +| grep -q "$STATUS_FORBIDDEN" + +# create real localhost authorization create-authorization.sh \ -f "$OWNER_CERT_FILE" \ @@ -26,7 +45,7 @@ create-authorization.sh \ --to-all-in "https://w3id.org/atomgraph/linkeddatahub/default#Root" \ --read -# access is allowed after authorization is created +# access is allowed after real authorization is created curl -k -w "%{http_code}\n" -o /dev/null -f -s \ -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ diff --git a/http-tests/admin/acl/add-get-group-authorization.sh b/http-tests/admin/acl/add-get-group-authorization.sh index 8e99c9e8c..6c890a6ea 100755 --- a/http-tests/admin/acl/add-get-group-authorization.sh +++ b/http-tests/admin/acl/add-get-group-authorization.sh @@ -31,7 +31,26 @@ group=$(curl -s -k \ | cat \ | sed -rn "s/<${group_doc//\//\\/}> <(.*)> \./\1/p") -# create authorization +# create fake test.localhost authorization (should be filtered out) + +create-authorization.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + -b "https://admin.test.localhost:4443/" \ + --label "Fake GET group authorization from test.localhost" \ + --agent-group "$group" \ + --to "$END_USER_BASE_URL" \ + --read + +# access is still denied (fake authorization filtered out) + +curl -k -w "%{http_code}\n" -o /dev/null -s \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H "Accept: application/n-triples" \ + "$END_USER_BASE_URL" \ +| grep -q "$STATUS_FORBIDDEN" + +# create real localhost authorization create-authorization.sh \ -f "$OWNER_CERT_FILE" \ @@ -42,7 +61,7 @@ create-authorization.sh \ --to "$END_USER_BASE_URL" \ --read -# access is allowed after authorization is created +# access is allowed after real authorization is created curl -k -w "%{http_code}\n" -o /dev/null -f -s \ -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ diff --git a/http-tests/admin/acl/add-post-authorization.sh b/http-tests/admin/acl/add-post-authorization.sh index d6abdcf62..c07bcf864 100755 --- a/http-tests/admin/acl/add-post-authorization.sh +++ b/http-tests/admin/acl/add-post-authorization.sh @@ -22,7 +22,33 @@ EOF ) \ | grep -q "$STATUS_FORBIDDEN" -# create authorization +# create fake test.localhost authorization (should be filtered out) + +create-authorization.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + -b "https://admin.test.localhost:4443/" \ + --label "Fake POST authorization from test.localhost" \ + --agent "$AGENT_URI" \ + --to "$END_USER_BASE_URL" \ + --append + +# access is still denied (fake authorization filtered out) + +( +curl -k -w "%{http_code}\n" -o /dev/null -s \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H "Content-Type: application/n-triples" \ + -H "Accept: application/n-triples" \ + -X POST \ + --data-binary @- \ + "$END_USER_BASE_URL" < . +EOF +) \ +| grep -q "$STATUS_FORBIDDEN" + +# create real localhost authorization create-authorization.sh \ -f "$OWNER_CERT_FILE" \ @@ -33,7 +59,7 @@ create-authorization.sh \ --to "$END_USER_BASE_URL" \ --append -# access is allowed after authorization is created +# access is allowed after real authorization is created ( curl -k -w "%{http_code}\n" -o /dev/null -s \ diff --git a/http-tests/admin/acl/add-post-class-authorization.sh b/http-tests/admin/acl/add-post-class-authorization.sh index 50f4f304e..f09d3102c 100755 --- a/http-tests/admin/acl/add-post-class-authorization.sh +++ b/http-tests/admin/acl/add-post-class-authorization.sh @@ -22,7 +22,33 @@ EOF ) \ | grep -q "$STATUS_FORBIDDEN" -# create authorization +# create fake test.localhost authorization (should be filtered out) + +create-authorization.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + -b "https://admin.test.localhost:4443/" \ + --label "Fake POST class authorization from test.localhost" \ + --agent "$AGENT_URI" \ + --to-all-in "https://w3id.org/atomgraph/linkeddatahub/default#Root" \ + --append + +# access is still denied (fake authorization filtered out) + +( +curl -k -w "%{http_code}\n" -o /dev/null -s \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H "Content-Type: application/n-triples" \ + -H "Accept: application/n-triples" \ + -X POST \ + --data-binary @- \ + "$END_USER_BASE_URL" < . +EOF +) \ +| grep -q "$STATUS_FORBIDDEN" + +# create real localhost authorization create-authorization.sh \ -f "$OWNER_CERT_FILE" \ @@ -33,7 +59,7 @@ create-authorization.sh \ --to-all-in "https://w3id.org/atomgraph/linkeddatahub/default#Root" \ --append -# access is allowed after authorization is created +# access is allowed after real authorization is created ( curl -k -w "%{http_code}\n" -o /dev/null -s \ diff --git a/http-tests/admin/acl/add-post-group-authorization.sh b/http-tests/admin/acl/add-post-group-authorization.sh index e3e05ad9e..a6d048f7c 100755 --- a/http-tests/admin/acl/add-post-group-authorization.sh +++ b/http-tests/admin/acl/add-post-group-authorization.sh @@ -38,7 +38,33 @@ group=$(curl -s -k \ | cat \ | sed -rn "s/<${group_doc//\//\\/}> <(.*)> \./\1/p") -# create authorization +# create fake test.localhost authorization (should be filtered out) + +create-authorization.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + -b "https://admin.test.localhost:4443/" \ + --label "Fake POST group authorization from test.localhost" \ + --agent-group "$group" \ + --to "$END_USER_BASE_URL" \ + --append + +# access is still denied (fake authorization filtered out) + +( +curl -k -w "%{http_code}\n" -o /dev/null -s \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H "Content-Type: application/n-triples" \ + -H "Accept: application/n-triples" \ + -X POST \ + --data-binary @- \ + "$END_USER_BASE_URL" < . +EOF +) \ +| grep -q "$STATUS_FORBIDDEN" + +# create real localhost authorization create-authorization.sh \ -f "$OWNER_CERT_FILE" \ @@ -49,7 +75,7 @@ create-authorization.sh \ --to "$END_USER_BASE_URL" \ --append -# access is allowed after authorization is created +# access is allowed after real authorization is created ( curl -k -w "%{http_code}\n" -o /dev/null -s \ diff --git a/http-tests/admin/acl/add-put-authorization.sh b/http-tests/admin/acl/add-put-authorization.sh index 60340973a..f35bbc4b4 100755 --- a/http-tests/admin/acl/add-put-authorization.sh +++ b/http-tests/admin/acl/add-put-authorization.sh @@ -22,7 +22,33 @@ EOF ) \ | grep -q "$STATUS_FORBIDDEN" -# create authorization +# create fake test.localhost authorization (should be filtered out) + +create-authorization.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + -b "https://admin.test.localhost:4443/" \ + --label "Fake PUT authorization from test.localhost" \ + --agent "$AGENT_URI" \ + --to "$END_USER_BASE_URL" \ + --write + +# access is still denied (fake authorization filtered out) + +( +curl -k -w "%{http_code}\n" -o /dev/null -s \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H "Content-Type: application/n-triples" \ + -H "Accept: application/n-triples" \ + -X PUT \ + --data-binary @- \ + "$END_USER_BASE_URL" < . +EOF +) \ +| grep -q "$STATUS_FORBIDDEN" + +# create real localhost authorization create-authorization.sh \ -f "$OWNER_CERT_FILE" \ @@ -41,7 +67,7 @@ root_ntriples=$(get.sh \ --accept 'application/n-triples' \ "$END_USER_BASE_URL") -# access is allowed after authorization is created +# access is allowed after real authorization is created # request body with document instance is required echo "$root_ntriples" \ diff --git a/http-tests/admin/acl/add-put-class-authorization.sh b/http-tests/admin/acl/add-put-class-authorization.sh index 86a782969..a23c4cb75 100755 --- a/http-tests/admin/acl/add-put-class-authorization.sh +++ b/http-tests/admin/acl/add-put-class-authorization.sh @@ -22,7 +22,33 @@ EOF ) \ | grep -q "$STATUS_FORBIDDEN" -# create authorization +# create fake test.localhost authorization (should be filtered out) + +create-authorization.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + -b "https://admin.test.localhost:4443/" \ + --label "Fake PUT class authorization from test.localhost" \ + --agent "$AGENT_URI" \ + --to-all-in "https://w3id.org/atomgraph/linkeddatahub/default#Root" \ + --write + +# access is still denied (fake authorization filtered out) + +( +curl -k -w "%{http_code}\n" -o /dev/null -s \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H "Content-Type: application/n-triples" \ + -H "Accept: application/n-triples" \ + -X PUT \ + --data-binary @- \ + "$END_USER_BASE_URL" < . +EOF +) \ +| grep -q "$STATUS_FORBIDDEN" + +# create real localhost authorization create-authorization.sh \ -f "$OWNER_CERT_FILE" \ @@ -41,7 +67,7 @@ root_ntriples=$(get.sh \ --accept 'application/n-triples' \ "$END_USER_BASE_URL") -# access is allowed after authorization is created +# access is allowed after real authorization is created # request body with document instance is required echo "$root_ntriples" \ diff --git a/http-tests/admin/acl/add-put-group-authorization.sh b/http-tests/admin/acl/add-put-group-authorization.sh index d97d92caf..1d5ccf9d3 100755 --- a/http-tests/admin/acl/add-put-group-authorization.sh +++ b/http-tests/admin/acl/add-put-group-authorization.sh @@ -38,7 +38,33 @@ group=$(curl -s -k \ | cat \ | sed -rn "s/<${group_doc//\//\\/}> <(.*)> \./\1/p") -# create authorization +# create fake test.localhost authorization (should be filtered out) + +create-authorization.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + -b "https://admin.test.localhost:4443/" \ + --label "Fake PUT group authorization from test.localhost" \ + --agent-group "$group" \ + --to "$END_USER_BASE_URL" \ + --write + +# access is still denied (fake authorization filtered out) + +( +curl -k -w "%{http_code}\n" -o /dev/null -s \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H "Content-Type: application/n-triples" \ + -H "Accept: application/n-triples" \ + -X PUT \ + --data-binary @- \ + "$END_USER_BASE_URL" < . +EOF +) \ +| grep -q "$STATUS_FORBIDDEN" + +# create real localhost authorization create-authorization.sh \ -f "$OWNER_CERT_FILE" \ @@ -57,7 +83,7 @@ root_ntriples=$(get.sh \ --accept 'application/n-triples' \ "$END_USER_BASE_URL") -# access is allowed after authorization is created +# access is allowed after real authorization is created # request body with document instance is required echo "$root_ntriples" \ diff --git a/http-tests/admin/acl/make-public.sh b/http-tests/admin/acl/make-public.sh index b9398e029..a3900b107 100755 --- a/http-tests/admin/acl/make-public.sh +++ b/http-tests/admin/acl/make-public.sh @@ -14,7 +14,25 @@ curl -k -w "%{http_code}\n" -o /dev/null -v \ "$END_USER_BASE_URL" \ | grep -q "$STATUS_FORBIDDEN" -# create public authorization +# create fake test.localhost public authorization (should be filtered out) + +create-authorization.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + -b "https://admin.test.localhost:4443/" \ + --label "Fake public access from test.localhost" \ + --agent-class 'http://xmlns.com/foaf/0.1/Agent' \ + --to "$END_USER_BASE_URL" \ + --read + +# public access is still forbidden (fake authorization filtered out) + +curl -k -w "%{http_code}\n" -o /dev/null -v \ + -H "Accept: application/n-triples" \ + "$END_USER_BASE_URL" \ +| grep -q "$STATUS_FORBIDDEN" + +# create real localhost public authorization create-authorization.sh \ -f "$OWNER_CERT_FILE" \ @@ -25,7 +43,7 @@ create-authorization.sh \ --to "$END_USER_BASE_URL" \ --read -# public access is allowed after authorization is created +# public access is allowed after real authorization is created curl -k -w "%{http_code}\n" -o /dev/null -f -v \ -H "Accept: application/n-triples" \ diff --git a/http-tests/dataspaces/non-existent-dataspace.sh b/http-tests/dataspaces/non-existent-dataspace.sh new file mode 100755 index 000000000..86b6c2e5e --- /dev/null +++ b/http-tests/dataspaces/non-existent-dataspace.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Test that accessing a non-configured dataspace returns 404, not 500 + +# Try to access admin on non-existent test.localhost dataspace +curl -k -w "%{http_code}\n" -o /dev/null -s \ + -H "Accept: application/n-triples" \ + "https://admin.test.localhost:4443/" \ +| grep -q "$STATUS_NOT_FOUND" + +# Try to access end-user on non-existent test.localhost dataspace +curl -k -w "%{http_code}\n" -o /dev/null -s \ + -H "Accept: application/n-triples" \ + "https://test.localhost:4443/" \ +| grep -q "$STATUS_NOT_FOUND" + +echo "Non-existent dataspaces correctly return 404" diff --git a/http-tests/run.sh b/http-tests/run.sh index e2409ff02..49e8ca193 100755 --- a/http-tests/run.sh +++ b/http-tests/run.sh @@ -136,45 +136,14 @@ export TMP_ADMIN_DATASET=$(mktemp) download_dataset "$END_USER_ENDPOINT_URL" > "$TMP_END_USER_DATASET" download_dataset "$ADMIN_ENDPOINT_URL" > "$TMP_ADMIN_DATASET" -# Add fake cross-dataspace authorizations to test isolation -# These should be filtered out by FILTER(strstarts(str(?g), str($base))) -printf "### Adding fake cross-dataspace authorizations to test dataset\n" -cat >> "$TMP_ADMIN_DATASET" < { - - a ; - "Fake READ authorization from test dataspace" ; - <$AGENT_URI> ; - <$END_USER_BASE_URL> ; - . -} - - { - - a ; - "Fake APPEND authorization from test dataspace" ; - <$AGENT_URI> ; - <$END_USER_BASE_URL> ; - . -} - - { - - a ; - "Fake WRITE authorization from test dataspace" ; - <$AGENT_URI> ; - <$END_USER_BASE_URL> ; - . -} -EOF - ### Other tests ### run_tests $(find ./add/ -type f -name '*.sh') (( error_count += $? )) run_tests $(find ./admin/ -type f -name '*.sh') (( error_count += $? )) +run_tests $(find ./dataspaces/ -type f -name '*.sh') +(( error_count += $? )) run_tests $(find ./access/ -type f -name '*.sh') (( error_count += $? )) run_tests $(find ./imports/ -type f -name '*.sh') diff --git a/src/main/java/com/atomgraph/linkeddatahub/Application.java b/src/main/java/com/atomgraph/linkeddatahub/Application.java index 07bce7672..c554be6ac 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/Application.java +++ b/src/main/java/com/atomgraph/linkeddatahub/Application.java @@ -901,7 +901,16 @@ protected void configure() @Override protected void configure() { - bindFactory(ApplicationFactory.class).to(com.atomgraph.linkeddatahub.apps.model.Application.class). + bindFactory(ApplicationFactory.class).to(new TypeLiteral>() {}). + in(RequestScoped.class); + } + }); + register(new AbstractBinder() + { + @Override + protected void configure() + { + bindFactory(com.atomgraph.linkeddatahub.server.factory.UnwrappedApplicationFactory.class).to(com.atomgraph.linkeddatahub.apps.model.Application.class). in(RequestScoped.class); } }); diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/Namespace.java b/src/main/java/com/atomgraph/linkeddatahub/resource/Namespace.java index d70b63f16..7f86014e9 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/resource/Namespace.java +++ b/src/main/java/com/atomgraph/linkeddatahub/resource/Namespace.java @@ -76,7 +76,7 @@ public class Namespace extends com.atomgraph.core.model.impl.SPARQLEndpointImpl /** * Constructs endpoint from the in-memory ontology model. - * + * * @param request current request * @param uriInfo current request's URI info * @param application current end-user application @@ -86,7 +86,7 @@ public class Namespace extends com.atomgraph.core.model.impl.SPARQLEndpointImpl * @param system system application */ @Inject - public Namespace(@Context Request request, @Context UriInfo uriInfo, + public Namespace(@Context Request request, @Context UriInfo uriInfo, Application application, Optional ontology, MediaTypes mediaTypes, @Context SecurityContext securityContext, com.atomgraph.linkeddatahub.Application system) { diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/factory/ApplicationFactory.java b/src/main/java/com/atomgraph/linkeddatahub/server/factory/ApplicationFactory.java index cc9dd4fea..3f112a08e 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/factory/ApplicationFactory.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/factory/ApplicationFactory.java @@ -20,6 +20,7 @@ import jakarta.ws.rs.container.ContainerRequestContext; import jakarta.ws.rs.core.Context; import jakarta.ws.rs.ext.Provider; +import java.util.Optional; import org.glassfish.hk2.api.Factory; import org.glassfish.hk2.api.ServiceLocator; import org.slf4j.Logger; @@ -32,32 +33,32 @@ * @see com.atomgraph.linkeddatahub.server.model.impl.Dispatcher */ @Provider -public class ApplicationFactory implements Factory +public class ApplicationFactory implements Factory> { private static final Logger log = LoggerFactory.getLogger(ApplicationFactory.class); - + @Context private ServiceLocator serviceLocator; - + @Override - public com.atomgraph.linkeddatahub.apps.model.Application provide() + public Optional provide() { return getApplication(); } @Override - public void dispose(com.atomgraph.linkeddatahub.apps.model.Application t) + public void dispose(Optional t) { } /** * Retrieves application from the request context. - * - * @return application resource + * + * @return optional application resource */ - public com.atomgraph.linkeddatahub.apps.model.Application getApplication() + public Optional getApplication() { - return (com.atomgraph.linkeddatahub.apps.model.Application)getContainerRequestContext().getProperty(LAPP.Application.getURI()); + return (Optional)getContainerRequestContext().getProperty(LAPP.Application.getURI()); } /** diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/factory/ServiceFactory.java b/src/main/java/com/atomgraph/linkeddatahub/server/factory/ServiceFactory.java index 52da8c31b..d3e510bb6 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/factory/ServiceFactory.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/factory/ServiceFactory.java @@ -54,13 +54,16 @@ public void dispose(Optional t) /** * Retrieves (optional) service from container request context. - * + * * @return optional service */ public Optional getService() { - Application app = (Application)getContainerRequestContext().getProperty(LAPP.Application.getURI()); - Service service = app.getService(); + Optional appOpt = (Optional)getContainerRequestContext().getProperty(LAPP.Application.getURI()); + + if (!appOpt.isPresent()) return Optional.empty(); + + Service service = appOpt.get().getService(); return Optional.of(service); } diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/factory/UnwrappedApplicationFactory.java b/src/main/java/com/atomgraph/linkeddatahub/server/factory/UnwrappedApplicationFactory.java new file mode 100644 index 000000000..179550fc7 --- /dev/null +++ b/src/main/java/com/atomgraph/linkeddatahub/server/factory/UnwrappedApplicationFactory.java @@ -0,0 +1,62 @@ +/** + * Copyright 2025 Martynas Jusevičius + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package com.atomgraph.linkeddatahub.server.factory; + +import com.atomgraph.linkeddatahub.apps.model.Application; +import jakarta.inject.Inject; +import jakarta.ws.rs.ext.Provider; +import java.util.Optional; +import org.glassfish.hk2.api.Factory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * JAX-RS factory that unwraps Optional<Application> for direct injection. + * This allows resource constructors to inject Application directly while + * filters and providers can inject Optional<Application>. + * + * @author Martynas Jusevičius {@literal } + * @see ApplicationFactory + */ +@Provider +public class UnwrappedApplicationFactory implements Factory +{ + + private static final Logger log = LoggerFactory.getLogger(UnwrappedApplicationFactory.class); + + @Inject jakarta.inject.Provider> optionalApp; + + @Override + public Application provide() + { + Optional appOpt = optionalApp.get(); + + if (!appOpt.isPresent()) + { + if (log.isErrorEnabled()) log.error("Application not present when unwrapping in UnwrappedApplicationFactory"); + return null; // This should only happen if ApplicationFilter threw NotFoundException + } + + return appOpt.get(); + } + + @Override + public void dispose(Application t) + { + } + +} diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/ApplicationFilter.java b/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/ApplicationFilter.java index 183f6d8b2..358e1491d 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/ApplicationFilter.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/ApplicationFilter.java @@ -27,6 +27,7 @@ import jakarta.annotation.Priority; import jakarta.inject.Inject; import jakarta.ws.rs.BadRequestException; +import jakarta.ws.rs.NotFoundException; import jakarta.ws.rs.container.ContainerRequestContext; import jakarta.ws.rs.container.ContainerRequestFilter; import jakarta.ws.rs.container.PreMatching; @@ -58,9 +59,23 @@ public class ApplicationFilter implements ContainerRequestFilter @Override public void filter(ContainerRequestContext request) throws IOException { + // used by ModeFactory and ModelXSLTWriterBase - set early so it's available even if app matching fails + if (request.getUriInfo().getQueryParameters().containsKey(AC.mode.getLocalName())) + { + List modeUris = request.getUriInfo().getQueryParameters().get(AC.mode.getLocalName()); + List modes = modeUris.stream().map(Mode::new).collect(Collectors.toList()); + request.setProperty(AC.mode.getURI(), modes); + } + else request.setProperty(AC.mode.getURI(), Collections.emptyList()); + // there always have to be an app Resource appResource = getSystem().matchApp(request.getUriInfo().getAbsolutePath()); - if (appResource == null) throw new IllegalStateException("Request URI '" + request.getUriInfo().getAbsolutePath() + "' has not matched any lapp:Application"); + if (appResource == null) + { + // Set empty Optional so response filters can safely check + request.setProperty(LAPP.Application.getURI(), Optional.empty()); + throw new NotFoundException("Request URI '" + request.getUriInfo().getAbsolutePath() + "' has not matched any lapp:Application"); + } // instead of InfModel, do faster explicit checks for subclasses and add rdf:type if (!appResource.canAs(com.atomgraph.linkeddatahub.apps.model.Application.class) && @@ -69,7 +84,7 @@ public void filter(ContainerRequestContext request) throws IOException throw new IllegalStateException("Resource <" + appResource + "> cannot be cast to lapp:Application"); com.atomgraph.linkeddatahub.apps.model.Application app = appResource.as(com.atomgraph.linkeddatahub.apps.model.Application.class); - request.setProperty(LAPP.Application.getURI(), app); // wrap into a helper class so it doesn't interfere with injection of Application + request.setProperty(LAPP.Application.getURI(), Optional.of(app)); // wrap in Optional so response filters can handle missing applications // use the ?uri URL parameter to override the effective request URI if its URI value is relative to the app's base URI final URI requestURI; @@ -107,15 +122,6 @@ public void filter(ContainerRequestContext request) throws IOException if (request.getUriInfo().getQueryParameters().containsKey(AC.accept.getLocalName())) request.getHeaders().putSingle(HttpHeaders.ACCEPT, request.getUriInfo().getQueryParameters().getFirst(AC.accept.getLocalName())); - // used by ModeFactory and ModelXSLTWriterBase - if (request.getUriInfo().getQueryParameters().containsKey(AC.mode.getLocalName())) - { - List modeUris = request.getUriInfo().getQueryParameters().get(AC.mode.getLocalName()); - List modes = modeUris.stream().map(Mode::new).collect(Collectors.toList()); - request.setProperty(AC.mode.getURI(), modes); - } - else request.setProperty(AC.mode.getURI(), Collections.emptyList()); - // TO-DO: move Dataset logic to a separate ContainerRequestFilter? Resource datasetResource = getSystem().matchDataset(LAPP.Dataset, request.getUriInfo().getAbsolutePath()); if (datasetResource != null) diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/AuthenticationFilter.java b/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/AuthenticationFilter.java index eeac51513..a556d2a07 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/AuthenticationFilter.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/AuthenticationFilter.java @@ -54,7 +54,7 @@ public abstract class AuthenticationFilter implements ContainerRequestFilter public static final String ON_BEHALF_OF = "On-Behalf-Of"; @Inject com.atomgraph.linkeddatahub.Application system; - @Inject jakarta.inject.Provider app; + @Inject jakarta.inject.Provider> app; @Inject jakarta.inject.Provider> dataset; /** @@ -111,14 +111,14 @@ public void filter(ContainerRequestContext request) throws IOException /** * Returns the SPARQL service for agent data. - * + * * @return service resource */ protected Service getAgentService() { - return getApplication().canAs(EndUserApplication.class) ? - getApplication().as(EndUserApplication.class).getAdminApplication().getService() : - getApplication().getService(); + return getApplication().get().canAs(EndUserApplication.class) ? + getApplication().get().as(EndUserApplication.class).getAdminApplication().getService() : + getApplication().get().getService(); } /** @@ -183,10 +183,10 @@ protected Resource getResourceByPropertyValue(Model model, Property property, RD /** * Returns currently matched application. - * - * @return application resource + * + * @return optional application resource */ - public Application getApplication() + public Optional getApplication() { return app.get(); } diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/AuthorizationFilter.java b/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/AuthorizationFilter.java index c6c87c2e1..2ddbda545 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/AuthorizationFilter.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/AuthorizationFilter.java @@ -83,7 +83,7 @@ public class AuthorizationFilter implements ContainerRequestFilter ); @Inject com.atomgraph.linkeddatahub.Application system; - @Inject jakarta.inject.Provider app; + @Inject jakarta.inject.Provider> app; @Inject jakarta.inject.Provider> dataset; private ParameterizedSparqlString documentTypeQuery, documentOwnerQuery, aclQuery, ownerAclQuery; @@ -120,8 +120,8 @@ public void filter(ContainerRequestContext request) throws IOException if (log.isWarnEnabled()) log.warn("Skipping authentication/authorization, request method not recognized: {}", request.getMethod()); return; } - - if (getApplication().isReadAllowed()) + + if (getApplication().isPresent() && getApplication().get().isReadAllowed()) { if (request.getMethod().equals(HttpMethod.GET) || request.getMethod().equals(HttpMethod.HEAD)) // allow read-only methods { @@ -169,7 +169,7 @@ public Model authorize(ContainerRequestContext request, Resource agent, Resource createOwnerAuthorization(authorizations, accessTo, agent); } - ResultSetRewindable docTypesResult = loadResultSet(getApplication().getService(), getDocumentTypeQuery(), thisQsm); + ResultSetRewindable docTypesResult = loadResultSet(getApplication().get().getService(), getDocumentTypeQuery(), thisQsm); try { if (!docTypesResult.hasNext()) // if the document resource has no types, we assume the document does not exist @@ -185,7 +185,7 @@ public Model authorize(ContainerRequestContext request, Resource agent, Resource thisQsm.add(SPIN.THIS_VAR_NAME, accessTo); docTypesResult.close(); - docTypesResult = loadResultSet(getApplication().getService(), getDocumentTypeQuery(), thisQsm); + docTypesResult = loadResultSet(getApplication().get().getService(), getDocumentTypeQuery(), thisQsm); Set parentTypes = new HashSet<>(); docTypesResult.forEachRemaining(qs -> parentTypes.add(qs.getResource("Type"))); @@ -205,7 +205,7 @@ public Model authorize(ContainerRequestContext request, Resource agent, Resource else return null; } - ParameterizedSparqlString pss = getApplication().canAs(EndUserApplication.class) ? getACLQuery() : getOwnerACLQuery(); + ParameterizedSparqlString pss = getApplication().get().canAs(EndUserApplication.class) ? getACLQuery() : getOwnerACLQuery(); Query query = new SetResultSetValues().apply(pss.asQuery(), docTypesResult); pss = new ParameterizedSparqlString(query.toString()); // make sure VALUES are now part of the query string assert pss.toString().contains("VALUES"); @@ -256,7 +256,7 @@ protected boolean isOwner(Resource accessTo, Resource agent) ParameterizedSparqlString pss = getDocumentOwnerQuery(); pss.setParams(qsm); - ResultSetRewindable ownerResult = loadResultSet(getApplication().getService(), getDocumentOwnerQuery(), qsm); // could use ASK query in principle + ResultSetRewindable ownerResult = loadResultSet(getApplication().get().getService(), getDocumentOwnerQuery(), qsm); // could use ASK query in principle try { return ownerResult.hasNext() && agent.equals(ownerResult.next().getResource("owner")); @@ -361,9 +361,9 @@ public Resource createOwnerAuthorization(Model model, Resource accessTo, Resourc */ protected Service getAdminService() { - return getApplication().canAs(EndUserApplication.class) ? - getApplication().as(EndUserApplication.class).getAdminApplication().getService() : - getApplication().getService(); + return getApplication().get().canAs(EndUserApplication.class) ? + getApplication().get().as(EndUserApplication.class).getAdminApplication().getService() : + getApplication().get().getService(); } /** @@ -374,17 +374,17 @@ protected Service getAdminService() */ protected Resource getAdminBase() { - return getApplication().canAs(EndUserApplication.class) ? - getApplication().as(EndUserApplication.class).getAdminApplication().getBase() : - getApplication().getBase(); + return getApplication().get().canAs(EndUserApplication.class) ? + getApplication().get().as(EndUserApplication.class).getAdminApplication().getBase() : + getApplication().get().getBase(); } /** * Returns currently matched application. - * - * @return application resource + * + * @return optional application resource */ - public com.atomgraph.linkeddatahub.apps.model.Application getApplication() + public Optional getApplication() { return app.get(); } diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/OntologyFilter.java b/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/OntologyFilter.java index 0e9676b32..c996d5214 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/OntologyFilter.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/OntologyFilter.java @@ -65,17 +65,19 @@ public void filter(ContainerRequestContext crc) throws IOException /** * Retrieves (optional) ontology from the container request context. - * + * * @param crc request context * @return optional ontology */ public Optional getOntology(ContainerRequestContext crc) { - Application app = getApplication(crc); - + Optional appOpt = getApplication(crc); + + if (!appOpt.isPresent()) return Optional.empty(); + try { - return Optional.ofNullable(getOntology(app)); + return Optional.ofNullable(getOntology(appOpt.get())); } catch (OntologyException ex) { @@ -184,13 +186,13 @@ public static void addDocumentModel(OntDocumentManager odm, String importURI) /** * Retrieves application from the container request context. - * + * * @param crc request context - * @return application resource + * @return optional application resource */ - public Application getApplication(ContainerRequestContext crc) + public Optional getApplication(ContainerRequestContext crc) { - return ((Application)crc.getProperty(LAPP.Application.getURI())); + return ((Optional)crc.getProperty(LAPP.Application.getURI())); } /** diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/auth/IDTokenFilter.java b/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/auth/IDTokenFilter.java index 078c6f7dd..045f18fb9 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/auth/IDTokenFilter.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/auth/IDTokenFilter.java @@ -107,12 +107,13 @@ public String getScheme() public void filter(ContainerRequestContext request) throws IOException { if (request.getSecurityContext().getUserPrincipal() != null) return; // skip filter if agent already authorized - if (!getApplication().canAs(EndUserApplication.class) && !getApplication().canAs(AdminApplication.class)) return; // skip "primitive" apps + if (!getApplication().isPresent()) return; // skip if no application matched + if (!getApplication().get().canAs(EndUserApplication.class) && !getApplication().get().canAs(AdminApplication.class)) return; // skip "primitive" apps // do not verify token for auth endpoints as that will lead to redirect loops if (request.getUriInfo().getAbsolutePath().equals(getLoginURL())) return; if (request.getUriInfo().getAbsolutePath().equals(getAuthorizeGoogleURL())) return; - + super.filter(request); } @@ -299,15 +300,15 @@ public URI getAuthorizeGoogleURL() /** * Returns the admin application of the current dataspace. - * + * * @return admin application resource */ public AdminApplication getAdminApplication() { - if (getApplication().canAs(EndUserApplication.class)) - return getApplication().as(EndUserApplication.class).getAdminApplication(); + if (getApplication().get().canAs(EndUserApplication.class)) + return getApplication().get().as(EndUserApplication.class).getAdminApplication(); else - return getApplication().as(AdminApplication.class); + return getApplication().get().as(AdminApplication.class); } /** diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/filter/response/BackendInvalidationFilter.java b/src/main/java/com/atomgraph/linkeddatahub/server/filter/response/BackendInvalidationFilter.java index 4d95fda1e..702f924f8 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/filter/response/BackendInvalidationFilter.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/filter/response/BackendInvalidationFilter.java @@ -31,6 +31,7 @@ import jakarta.ws.rs.container.ContainerResponseFilter; import jakarta.ws.rs.core.HttpHeaders; import jakarta.ws.rs.core.Response; +import java.util.Optional; import org.apache.jena.rdf.model.Resource; import org.glassfish.jersey.uri.UriComponent; @@ -50,29 +51,32 @@ public class BackendInvalidationFilter implements ContainerResponseFilter public static final String HEADER_NAME = "X-Escaped-Request-URI"; @Inject com.atomgraph.linkeddatahub.Application system; - @Inject jakarta.inject.Provider app; - + @Inject jakarta.inject.Provider> app; + @Override public void filter(ContainerRequestContext req, ContainerResponseContext resp) throws IOException { + // If no application was matched (e.g., non-existent dataspace), skip cache invalidation + if (!getApplication().isPresent()) return; + if (getAdminApplication().getService().getBackendProxy() == null) return; if (req.getMethod().equals(HttpMethod.POST) && resp.getHeaderString(HttpHeaders.LOCATION) != null) { URI location = (URI)resp.getHeaders().get(HttpHeaders.LOCATION).get(0); URI parentURI = location.resolve("..").normalize(); - - ban(getApplication().getService().getBackendProxy(), location.toString()).close(); + + ban(getApplication().get().getService().getBackendProxy(), location.toString()).close(); // ban URI from authorization query results ban(getAdminApplication().getService().getBackendProxy(), location.toString()).close(); // ban parent resource URI in order to avoid stale children data in containers - ban(getApplication().getService().getBackendProxy(), parentURI.toString()).close(); - ban(getApplication().getService().getBackendProxy(), getApplication().getBaseURI().relativize(parentURI).toString()).close(); // URIs can be relative in queries + ban(getApplication().get().getService().getBackendProxy(), parentURI.toString()).close(); + ban(getApplication().get().getService().getBackendProxy(), getApplication().get().getBaseURI().relativize(parentURI).toString()).close(); // URIs can be relative in queries // ban all results of queries that use forClass type if (req.getUriInfo().getQueryParameters().containsKey(AC.forClass.getLocalName())) { String forClass = req.getUriInfo().getQueryParameters().getFirst(AC.forClass.getLocalName()); - ban(getApplication().getService().getBackendProxy(), forClass).close(); + ban(getApplication().get().getService().getBackendProxy(), forClass).close(); } } @@ -82,25 +86,23 @@ public void filter(ContainerRequestContext req, ContainerResponseContext resp) t if (!getAdminApplication().getBaseURI().relativize(req.getUriInfo().getAbsolutePath()).isAbsolute()) // URL is relative to the admin app's base URI { ban(getAdminApplication().getService().getBackendProxy(), getAdminApplication().getBaseURI().toString()).close(); -// ban(getAdminApplication().getService().getBackendProxy(), FOAF.Agent.getURI()).close(); ban(getAdminApplication().getService().getBackendProxy(), "foaf:Agent").close(); // queries use prefixed names instead of absolute URIs -// ban(getAdminApplication().getService().getBackendProxy(), ACL.AuthenticatedAgent.getURI()).close(); ban(getAdminApplication().getService().getBackendProxy(), "acl:AuthenticatedAgent").close(); } if (req.getUriInfo().getAbsolutePath().toString().endsWith("/")) { - ban(getApplication().getService().getBackendProxy(), req.getUriInfo().getAbsolutePath().toString()).close(); + ban(getApplication().get().getService().getBackendProxy(), req.getUriInfo().getAbsolutePath().toString()).close(); // ban URI from authorization query results ban(getAdminApplication().getService().getBackendProxy(), req.getUriInfo().getAbsolutePath().toString()).close(); - + // ban parent document URIs (those that have a trailing slash) in order to avoid stale children data in containers - if (!req.getUriInfo().getAbsolutePath().equals(getApplication().getBaseURI())) + if (!req.getUriInfo().getAbsolutePath().equals(getApplication().get().getBaseURI())) { URI parentURI = req.getUriInfo().getAbsolutePath().resolve("..").normalize(); - ban(getApplication().getService().getBackendProxy(), parentURI.toString()).close(); - ban(getApplication().getService().getBackendProxy(), getApplication().getBaseURI().relativize(parentURI).toString()).close(); // URIs can be relative in queries + ban(getApplication().get().getService().getBackendProxy(), parentURI.toString()).close(); + ban(getApplication().get().getService().getBackendProxy(), getApplication().get().getBaseURI().relativize(parentURI).toString()).close(); // URIs can be relative in queries } } } @@ -125,23 +127,24 @@ public Response ban(Resource proxy, String url) /** * Returns admin application of the current dataspace. - * + * * @return admin application resource */ public AdminApplication getAdminApplication() { - if (getApplication().canAs(EndUserApplication.class)) - return getApplication().as(EndUserApplication.class).getAdminApplication(); + com.atomgraph.linkeddatahub.apps.model.Application application = getApplication().get(); + if (application.canAs(EndUserApplication.class)) + return application.as(EndUserApplication.class).getAdminApplication(); else - return getApplication().as(AdminApplication.class); + return application.as(AdminApplication.class); } /** * Returns the current application. - * - * @return application resource + * + * @return optional application resource */ - public com.atomgraph.linkeddatahub.apps.model.Application getApplication() + public Optional getApplication() { return app.get(); } diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/filter/response/ResponseHeadersFilter.java b/src/main/java/com/atomgraph/linkeddatahub/server/filter/response/ResponseHeadersFilter.java index 5d954a016..f444ae697 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/filter/response/ResponseHeadersFilter.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/filter/response/ResponseHeadersFilter.java @@ -56,7 +56,7 @@ public class ResponseHeadersFilter implements ContainerResponseFilter private static final Logger log = LoggerFactory.getLogger(ResponseHeadersFilter.class); private static final Pattern LINK_SPLITTER = Pattern.compile(",(?=\\s*<)"); // split on commas before next '<' - @Inject jakarta.inject.Provider app; + @Inject jakarta.inject.Provider> app; @Inject jakarta.inject.Provider> dataset; @Inject jakarta.inject.Provider> authorizationContext; @@ -65,31 +65,36 @@ public void filter(ContainerRequestContext request, ContainerResponseContext res { if (response.getStatusInfo().equals(Response.Status.NO_CONTENT)) response.getHeaders().remove(HttpHeaders.CONTENT_TYPE); // needs to be explicitly unset for some reason - + if (request.getSecurityContext().getUserPrincipal() instanceof Agent) { Agent agent = ((Agent)(request.getSecurityContext().getUserPrincipal())); response.getHeaders().add(HttpHeaders.LINK, new Link(URI.create(agent.getURI()), ACL.agent.getURI(), null)); } - + if (getAuthorizationContext().isPresent()) getAuthorizationContext().get().getModeURIs().forEach(mode -> response.getHeaders().add(HttpHeaders.LINK, new Link(mode, ACL.mode.getURI(), null))); - + List linkValues = response.getHeaders().get(HttpHeaders.LINK); List links = parseLinkHeaderValues(linkValues); - + if (getLinksByRel(links, SD.endpoint.getURI()).isEmpty()) // add Link rel=sd:endpoint. // TO-DO: The external SPARQL endpoint URL is different from the internal one currently specified as sd:endpoint in the context dataset response.getHeaders().add(HttpHeaders.LINK, new Link(request.getUriInfo().getBaseUriBuilder().path(Dispatcher.class, "getSPARQLEndpoint").build(), SD.endpoint.getURI(), null)); - // add Link rel=ldt:ontology, if the ontology URI is specified - if (getApplication().getOntology() != null) - response.getHeaders().add(HttpHeaders.LINK, new Link(URI.create(getApplication().getOntology().getURI()), LDT.ontology.getURI(), null)); - // add Link rel=ac:stylesheet, if the stylesheet URI is specified - if (getApplication().getStylesheet() != null) - response.getHeaders().add(HttpHeaders.LINK, new Link(URI.create(getApplication().getStylesheet().getURI()), AC.stylesheet.getURI(), null)); - + // Only add application-specific links if application is present + if (getApplication().isPresent()) + { + Application application = getApplication().get(); + // add Link rel=ldt:ontology, if the ontology URI is specified + if (application.getOntology() != null) + response.getHeaders().add(HttpHeaders.LINK, new Link(URI.create(application.getOntology().getURI()), LDT.ontology.getURI(), null)); + // add Link rel=ac:stylesheet, if the stylesheet URI is specified + if (application.getStylesheet() != null) + response.getHeaders().add(HttpHeaders.LINK, new Link(URI.create(application.getStylesheet().getURI()), AC.stylesheet.getURI(), null)); + } + if (response.getHeaders().get(HttpHeaders.LINK) != null) { // combine Link header values into a single value because Saxon-JS 2.x is not able to deal with duplicate header names: https://saxonica.plan.io/issues/5199 @@ -149,10 +154,10 @@ protected List getLinksByRel(List links, String rel) /** * Returns the current application. - * - * @return application resource. + * + * @return optional application resource */ - public com.atomgraph.linkeddatahub.apps.model.Application getApplication() + public Optional getApplication() { return app.get(); } diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/filter/response/XsltExecutableFilter.java b/src/main/java/com/atomgraph/linkeddatahub/server/filter/response/XsltExecutableFilter.java index 9d7b66560..ee3b9d288 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/filter/response/XsltExecutableFilter.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/filter/response/XsltExecutableFilter.java @@ -39,6 +39,7 @@ import jakarta.ws.rs.core.Response; import jakarta.ws.rs.core.UriInfo; import java.net.URISyntaxException; +import java.util.Optional; import javax.xml.transform.Source; import javax.xml.transform.stream.StreamSource; import net.sf.saxon.s9api.SaxonApiException; @@ -60,8 +61,8 @@ public class XsltExecutableFilter implements ContainerResponseFilter private static final Logger log = LoggerFactory.getLogger(XsltExecutableFilter.class); @Inject com.atomgraph.linkeddatahub.Application system; - @Inject jakarta.inject.Provider application; - + @Inject jakarta.inject.Provider> application; + @Context UriInfo uriInfo; @Override @@ -71,7 +72,9 @@ public void filter(ContainerRequestContext req, ContainerResponseContext resp) t if (resp.getMediaType() != null && (resp.getMediaType().isCompatible(MediaType.TEXT_HTML_TYPE) || resp.getMediaType().isCompatible(MediaType.APPLICATION_XHTML_XML_TYPE))) { - URI stylesheet = getApplication().getStylesheet() != null ? URI.create(getApplication().getStylesheet().getURI()) : null; + URI stylesheet = null; + if (getApplication().isPresent() && getApplication().get().getStylesheet() != null) + stylesheet = URI.create(getApplication().get().getStylesheet().getURI()); if (stylesheet != null) req.setProperty(AC.stylesheet.getURI(), getXsltExecutable(stylesheet)); else req.setProperty(AC.stylesheet.getURI(), getSystem().getXsltExecutable()); @@ -282,10 +285,10 @@ public com.atomgraph.linkeddatahub.Application getSystem() /** * Returns current application. - * - * @return application resource + * + * @return optional application resource */ - public com.atomgraph.linkeddatahub.apps.model.Application getApplication() + public Optional getApplication() { return application.get(); } diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/io/ValidatingModelProvider.java b/src/main/java/com/atomgraph/linkeddatahub/server/io/ValidatingModelProvider.java index 2db8f677b..15307e87f 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/io/ValidatingModelProvider.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/io/ValidatingModelProvider.java @@ -82,7 +82,7 @@ public class ValidatingModelProvider extends com.atomgraph.server.io.ValidatingM @Context UriInfo uriInfo; @Context SecurityContext securityContext; - @Inject jakarta.inject.Provider application; + @Inject jakarta.inject.Provider> application; @Inject com.atomgraph.linkeddatahub.Application system; @Inject jakarta.inject.Provider> agentContextProvider; @@ -237,7 +237,7 @@ public Resource processRead(Resource resource) // this logic really belongs in a } } - if (getApplication().canAs(AdminApplication.class) && resource.hasProperty(RDF.type, OWL.Ontology)) + if (getApplication().get().canAs(AdminApplication.class) && resource.hasProperty(RDF.type, OWL.Ontology)) { // clear cached OntModel if ontology is updated. TO-DO: send event instead getSystem().getOntModelSpec().getDocumentManager().getFileManager().removeCacheModel(resource.getURI()); @@ -258,7 +258,7 @@ public Resource processRead(Resource resource) // this logic really belongs in a public Model processWrite(Model model) { // show foaf:mbox in end-user apps - if (getApplication().canAs(EndUserApplication.class)) return model; + if (getApplication().get().canAs(EndUserApplication.class)) return model; // show foaf:mbox for authenticated agents if (getSecurityContext() != null && getSecurityContext().getUserPrincipal() instanceof Agent) return model; @@ -317,15 +317,15 @@ public static Statement mboxHashStmt(Statement stmt, MessageDigest messageDigest /** * Returns the end-user application of the current dataspace. - * + * * @return end-user application resource */ public EndUserApplication getEndUserApplication() { - if (getApplication().canAs(EndUserApplication.class)) - return getApplication().as(EndUserApplication.class); + if (getApplication().get().canAs(EndUserApplication.class)) + return getApplication().get().as(EndUserApplication.class); else - return getApplication().as(AdminApplication.class).getEndUserApplication(); + return getApplication().get().as(AdminApplication.class).getEndUserApplication(); } @Override @@ -336,10 +336,10 @@ public UriInfo getUriInfo() /** * Returns current application. - * - * @return application resource + * + * @return optional application resource */ - public com.atomgraph.linkeddatahub.apps.model.Application getApplication() + public Optional getApplication() { return application.get(); } diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/mapper/auth/AuthorizationExceptionMapper.java b/src/main/java/com/atomgraph/linkeddatahub/server/mapper/auth/AuthorizationExceptionMapper.java index 6c6e02cff..a709a168b 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/mapper/auth/AuthorizationExceptionMapper.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/mapper/auth/AuthorizationExceptionMapper.java @@ -29,6 +29,7 @@ import com.atomgraph.server.mapper.ExceptionMapperBase; import com.atomgraph.server.vocabulary.HTTP; import java.net.URI; +import java.util.Optional; import jakarta.inject.Inject; import jakarta.ws.rs.core.Context; import jakarta.ws.rs.core.EntityTag; @@ -46,7 +47,7 @@ public class AuthorizationExceptionMapper extends ExceptionMapperBase implements { @Context SecurityContext securityContext; - @Inject jakarta.inject.Provider application; + @Inject jakarta.inject.Provider> application; /** * Constructs mapper from media types. @@ -67,11 +68,11 @@ public Response toResponse(AuthorizationException ex) addLiteral(HTTP.absoluteURI, ex.getAbsolutePath().toString()); // add link to the endpoint for access requests. TO-DO: make the URIs configurable or best - retrieve from sitemap/dataset - if (getSecurityContext().getUserPrincipal() != null) + if (getSecurityContext().getUserPrincipal() != null && getApplication().isPresent()) { - if (getApplication().canAs(EndUserApplication.class)) + if (getApplication().get().canAs(EndUserApplication.class)) { - Resource adminBase = getApplication().as(EndUserApplication.class).getAdminApplication().getBase(); + Resource adminBase = getApplication().get().as(EndUserApplication.class).getAdminApplication().getBase(); URI requestAccessURI = UriBuilder.fromUri(adminBase.getURI()). path(REQUEST_ACCESS_PATH). @@ -99,10 +100,10 @@ public SecurityContext getSecurityContext() /** * Returns associated application. - * - * @return application resource + * + * @return optional application resource */ - public Application getApplication() + public Optional getApplication() { return application.get(); } diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/mapper/auth/oauth2/TokenExpiredExceptionMapper.java b/src/main/java/com/atomgraph/linkeddatahub/server/mapper/auth/oauth2/TokenExpiredExceptionMapper.java index 2f67e5324..1e588f20e 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/mapper/auth/oauth2/TokenExpiredExceptionMapper.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/mapper/auth/oauth2/TokenExpiredExceptionMapper.java @@ -24,6 +24,7 @@ import com.atomgraph.server.mapper.ExceptionMapperBase; import com.auth0.jwt.exceptions.TokenExpiredException; import java.net.URI; +import java.util.Optional; import jakarta.inject.Inject; import jakarta.ws.rs.core.Context; import jakarta.ws.rs.core.NewCookie; @@ -44,7 +45,7 @@ public class TokenExpiredExceptionMapper extends ExceptionMapperBase implements { @Context UriInfo uriInfo; - @Inject jakarta.inject.Provider application; + @Inject jakarta.inject.Provider> application; /** * Constructs mapper from media types. @@ -60,45 +61,54 @@ public TokenExpiredExceptionMapper(MediaTypes mediaTypes) @Override public Response toResponse(TokenExpiredException ex) { - String path = getApplication().getBaseURI().getPath(); + if (!getApplication().isPresent()) + { + // If no application is present, just return a BAD_REQUEST response without redirect + return getResponseBuilder(toResource(ex, Response.Status.BAD_REQUEST, + ResourceFactory.createResource("http://www.w3.org/2011/http-statusCodes#BadRequest")). + getModel()). + build(); + } + + String path = getApplication().get().getBaseURI().getPath(); NewCookie expiredCookie = new NewCookie(IDTokenFilter.COOKIE_NAME, "", path, null, NewCookie.DEFAULT_VERSION, null, 0, false); ResponseBuilder builder = getResponseBuilder(toResource(ex, Response.Status.BAD_REQUEST, ResourceFactory.createResource("http://www.w3.org/2011/http-statusCodes#BadRequest")). getModel()). cookie(expiredCookie); - + URI redirectUri = UriBuilder.fromUri(getAdminApplication().getBaseURI()). path("/oauth2/authorize/google"). // TO-DO: move to config? queryParam(REFERER_PARAM_NAME, getUriInfo().getRequestUri()). // we need to retain URL query parameters build(); - + if (!getUriInfo().getAbsolutePath().equals(redirectUri)) // prevent a perpetual redirect loop builder.status(Status.SEE_OTHER). location(redirectUri); // TO-DO: extract - + return builder.build(); } /** * Returns admin application of the current dataspace. - * + * * @return admin application resource */ public AdminApplication getAdminApplication() { - if (getApplication().canAs(EndUserApplication.class)) - return getApplication().as(EndUserApplication.class).getAdminApplication(); + if (getApplication().get().canAs(EndUserApplication.class)) + return getApplication().get().as(EndUserApplication.class).getAdminApplication(); else - return getApplication().as(AdminApplication.class); + return getApplication().get().as(AdminApplication.class); } - + /** * Returns current application. - * - * @return application resource + * + * @return optional application resource */ - public com.atomgraph.linkeddatahub.apps.model.Application getApplication() + public Optional getApplication() { return application.get(); } diff --git a/src/main/java/com/atomgraph/linkeddatahub/writer/ModelXSLTWriter.java b/src/main/java/com/atomgraph/linkeddatahub/writer/ModelXSLTWriter.java index 2eb22962a..bdf1ebb5c 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/writer/ModelXSLTWriter.java +++ b/src/main/java/com/atomgraph/linkeddatahub/writer/ModelXSLTWriter.java @@ -124,14 +124,14 @@ public void writeTo(Model model, Class type, Type genericType, Annotation[] a /** * Hook for RDF model processing before write. - * + * * @param model RDF model * @return RDF model */ public Model processWrite(Model model) { // show foaf:mbox in end-user apps - if (getApplication().get().canAs(EndUserApplication.class)) return model; + if (getApplication().get().isPresent() && getApplication().get().get().canAs(EndUserApplication.class)) return model; // show foaf:mbox for authenticated agents if (getSecurityContext() != null && getSecurityContext().getUserPrincipal() instanceof Agent) return model; diff --git a/src/main/java/com/atomgraph/linkeddatahub/writer/XSLTWriterBase.java b/src/main/java/com/atomgraph/linkeddatahub/writer/XSLTWriterBase.java index 85c0011e6..66f13acb9 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/writer/XSLTWriterBase.java +++ b/src/main/java/com/atomgraph/linkeddatahub/writer/XSLTWriterBase.java @@ -93,7 +93,7 @@ public abstract class XSLTWriterBase extends com.atomgraph.client.writer.XSLTWri @Context SecurityContext securityContext; @Inject com.atomgraph.linkeddatahub.Application system; - @Inject jakarta.inject.Provider application; + @Inject jakarta.inject.Provider> application; @Inject jakarta.inject.Provider dataManager; @Inject jakarta.inject.Provider xsltExecSupplier; @Inject jakarta.inject.Provider> modes; @@ -126,8 +126,15 @@ public Map getParameters(MultivaluedMap appOpt = getApplication().get(); + if (!appOpt.isPresent()) + { + if (log.isWarnEnabled()) log.warn("Application not present in XSLTWriterBase.getParameters()"); + return params; // return early if no application + } + + com.atomgraph.linkeddatahub.apps.model.Application app = appOpt.get(); if (log.isDebugEnabled()) log.debug("Passing $lapp:Application to XSLT: <{}>", app); params.put(new QName("ldt", LDT.base.getNameSpace(), LDT.base.getLocalName()), new XdmAtomicValue(app.getBaseURI())); params.put(new QName("ldh", LDH.origin.getNameSpace(), LDH.origin.getLocalName()), new XdmAtomicValue(app.getOriginURI())); @@ -324,7 +331,7 @@ public DataManager getDataManager() /** * Returns a JAX-RS provider for the RDF data manager. - * + * * @return provider */ public jakarta.inject.Provider getDataManagerProvider() @@ -371,10 +378,10 @@ public Set getSupportedNamespaces() /** * Returns a JAX-RS provider for the current application. - * + * * @return provider */ - public jakarta.inject.Provider getApplication() + public jakarta.inject.Provider> getApplication() { return application; } diff --git a/src/main/java/com/atomgraph/linkeddatahub/writer/factory/DataManagerFactory.java b/src/main/java/com/atomgraph/linkeddatahub/writer/factory/DataManagerFactory.java index 5b9bc9cae..c854e9d54 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/writer/factory/DataManagerFactory.java +++ b/src/main/java/com/atomgraph/linkeddatahub/writer/factory/DataManagerFactory.java @@ -28,6 +28,7 @@ import com.atomgraph.linkeddatahub.writer.impl.DataManagerImpl; import java.net.URI; import java.util.HashMap; +import java.util.Optional; import jakarta.inject.Inject; import jakarta.servlet.http.HttpServletRequest; import jakarta.ws.rs.container.ContainerRequestContext; @@ -53,12 +54,13 @@ public class DataManagerFactory implements Factory @Context HttpServletRequest httpServletRequest; @Context Providers providers; @Context ServiceLocator serviceLocator; - + @Inject com.atomgraph.linkeddatahub.Application system; - + @Override public DataManager provide() { + // Always return DataManager, falling back to system DataManager if no Application (e.g., for error responses) return getDataManager(getApplication()); } @@ -66,25 +68,25 @@ public DataManager provide() public void dispose(DataManager t) { } - + /** * Returns RDF data manager. - * - * @param app end-user application + * + * @param appOpt optional end-user application (if empty, system DataManager is used) * @return data manager */ - public DataManager getDataManager(Application app) + public DataManager getDataManager(Optional appOpt) { final com.atomgraph.core.util.jena.DataManager baseManager; - - if (app.canAs(EndUserApplication.class)) - baseManager = (com.atomgraph.core.util.jena.DataManager)getSystem().getOntModelSpec(app.as(EndUserApplication.class)).getDocumentManager().getFileManager(); + + if (appOpt.isPresent() && appOpt.get().canAs(EndUserApplication.class)) + baseManager = (com.atomgraph.core.util.jena.DataManager)getSystem().getOntModelSpec(appOpt.get().as(EndUserApplication.class)).getDocumentManager().getFileManager(); else baseManager = getSystem().getDataManager(); - + LinkedDataClient ldc = LinkedDataClient.create(getSystem().getClient(), getSystem().getMediaTypes()). delegation(getUriInfo().getBaseUri(), getAgentContext()); - + // copy cached models over from the app's FileManager return new DataManagerImpl(LocationMapper.get(), new HashMap<>(baseManager.getModelCache()), ldc, true, getSystem().isPreemptiveAuth(), getSystem().isResolvingUncached(), @@ -155,12 +157,12 @@ public ContainerRequestContext getContainerRequestContext() /** * Retrieves LDT application from the request context. - * - * @return LDT application + * + * @return optional LDT application */ - public Application getApplication() + public Optional getApplication() { - return (Application)getContainerRequestContext().getProperty(LAPP.Application.getURI()); + return (Optional)getContainerRequestContext().getProperty(LAPP.Application.getURI()); } } \ No newline at end of file diff --git a/src/main/java/com/atomgraph/linkeddatahub/writer/factory/ModeFactory.java b/src/main/java/com/atomgraph/linkeddatahub/writer/factory/ModeFactory.java index 303020eb5..e7016b906 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/writer/factory/ModeFactory.java +++ b/src/main/java/com/atomgraph/linkeddatahub/writer/factory/ModeFactory.java @@ -18,11 +18,13 @@ import com.atomgraph.client.vocabulary.AC; import com.atomgraph.linkeddatahub.writer.Mode; +import java.util.Collections; import java.util.List; import jakarta.ws.rs.container.ContainerRequestContext; import jakarta.ws.rs.core.Context; import org.glassfish.hk2.api.Factory; import org.glassfish.hk2.api.ServiceLocator; +import jakarta.ws.rs.ext.Provider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -31,6 +33,7 @@ * * @author {@literal Martynas Jusevičius } */ +@Provider public class ModeFactory implements Factory> { From e5f4580a60dd99ec5adb6c3543e06bdea361e5cf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martynas=20Jusevi=C4=8Dius?= Date: Wed, 22 Oct 2025 23:05:28 +0200 Subject: [PATCH 4/7] Set `-f` on curl calls in tests --- bin/get.sh | 4 ++-- bin/post.sh | 2 +- bin/put.sh | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/bin/get.sh b/bin/get.sh index 50f43f3c9..09ebb262f 100755 --- a/bin/get.sh +++ b/bin/get.sh @@ -84,7 +84,7 @@ fi # GET RDF document if [ -n "$head" ] ; then - curl -v -k -E "$cert_pem_file":"$cert_password" -H "Accept: ${accept}" "$target" --head + curl -f -v -k -E "$cert_pem_file":"$cert_password" -H "Accept: ${accept}" "$target" --head else - curl -v -k -E "$cert_pem_file":"$cert_password" -H "Accept: ${accept}" "$target" + curl -f -v -k -E "$cert_pem_file":"$cert_password" -H "Accept: ${accept}" "$target" fi \ No newline at end of file diff --git a/bin/post.sh b/bin/post.sh index 1e0c54c27..54e49eafe 100755 --- a/bin/post.sh +++ b/bin/post.sh @@ -80,7 +80,7 @@ else fi # resolve RDF document from stdin against base URL and POST to the server and print request URL -effective_url=$(cat - | turtle --base="$url" | curl -w '%{url_effective}' -f -v -k -E "$cert_pem_file":"$cert_password" -d @- -H "Content-Type: ${content_type}" -H "Accept: text/turtle" -o /dev/null "$final_url") +effective_url=$(cat - | turtle --base="$url" | curl -w '%{url_effective}' -f -v -k -E "$cert_pem_file":"$cert_password" -d @- -H "Content-Type: ${content_type}" -H "Accept: text/turtle" -o /dev/null "$final_url") || exit $? # If using proxy, rewrite the effective URL back to original hostname if [ -n "$proxy" ]; then diff --git a/bin/put.sh b/bin/put.sh index b372cb851..799d81d2b 100755 --- a/bin/put.sh +++ b/bin/put.sh @@ -80,7 +80,7 @@ else fi # resolve RDF document from stdin against base URL and PUT to the server and print request URL -effective_url=$(cat - | turtle --base="$url" | curl -w '%{url_effective}' -f -v -k -E "$cert_pem_file":"$cert_password" -d @- -X PUT -H "Content-Type: ${content_type}" -H "Accept: text/turtle" -o /dev/null "$final_url") +effective_url=$(cat - | turtle --base="$url" | curl -w '%{url_effective}' -f -v -k -E "$cert_pem_file":"$cert_password" -d @- -X PUT -H "Content-Type: ${content_type}" -H "Accept: text/turtle" -o /dev/null "$final_url") || exit $? # If using proxy, rewrite the effective URL back to original hostname if [ -n "$proxy" ]; then From a8f3c6396ae4fc65a659c1693ec45b990884a4b1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martynas=20Jusevi=C4=8Dius?= Date: Wed, 22 Oct 2025 23:50:56 +0200 Subject: [PATCH 5/7] Refactor agent metadata and authorization handling Introduces separate templates for owner and secretary authorizations, updates entrypoint.sh to extract and manage agent metadata and authorizations per app, and adds a SPARQL query for agent metadata extraction. Updates Dockerfile and test configurations to support new dataset and metadata handling. Refactors Java `ValidatingModelProvider` to improve application presence checks and mbox processing. Also improves XSLT title rendering and test scripts for non-existent dataspaces. --- Dockerfile | 6 + bin/admin/acl/create-authorization.sh | 1 + http-tests/config/system.trig | 92 +++++++++++++++ .../dataspaces/non-existent-dataspace.sh | 6 +- http-tests/docker-compose.http-tests.yml | 6 +- http-tests/root-owner.trig.template | 88 ++++++++++++++ platform/entrypoint.sh | 109 ++++++++++++++++-- .../root-owner-authorization.trig.template | 33 ++++++ platform/root-owner.trig.template | 35 +----- ...root-secretary-authorization.trig.template | 34 ++++++ platform/root-secretary.trig.template | 36 +----- platform/select-agent-metadata.rq | 13 +++ .../server/io/ValidatingModelProvider.java | 17 +-- .../xsl/bootstrap/2.3.2/layout.xsl | 10 +- 14 files changed, 398 insertions(+), 88 deletions(-) create mode 100644 http-tests/config/system.trig create mode 100644 http-tests/root-owner.trig.template create mode 100644 platform/root-owner-authorization.trig.template create mode 100644 platform/root-secretary-authorization.trig.template create mode 100644 platform/select-agent-metadata.rq diff --git a/Dockerfile b/Dockerfile index f6aac80b8..e5df99152 100644 --- a/Dockerfile +++ b/Dockerfile @@ -145,12 +145,18 @@ COPY platform/import-letsencrypt-stg-roots.sh import-letsencrypt-stg-roots.sh COPY platform/select-root-services.rq select-root-services.rq +COPY platform/select-agent-metadata.rq select-agent-metadata.rq + # copy the metadata of built-in agents COPY platform/root-secretary.trig.template root-secretary.trig.template COPY platform/root-owner.trig.template root-owner.trig.template +COPY platform/root-secretary-authorization.trig.template root-secretary-authorization.trig.template + +COPY platform/root-owner-authorization.trig.template root-owner-authorization.trig.template + # copy the metadata of the namespace ontology COPY platform/namespace-ontology.trig.template namespace-ontology.trig.template diff --git a/bin/admin/acl/create-authorization.sh b/bin/admin/acl/create-authorization.sh index 82e5cd909..b2a1b6d68 100755 --- a/bin/admin/acl/create-authorization.sh +++ b/bin/admin/acl/create-authorization.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { diff --git a/http-tests/config/system.trig b/http-tests/config/system.trig new file mode 100644 index 000000000..47ed5c76a --- /dev/null +++ b/http-tests/config/system.trig @@ -0,0 +1,92 @@ +@prefix lapp: . +@prefix ldh: . +@prefix a: . +@prefix ac: . +@prefix rdf: . +@prefix rdfs: . +@prefix xsd: . +@prefix ldt: . +@prefix sd: . +@prefix dct: . +@prefix foaf: . + +### do not use blank nodes to identify resources! ### +### urn: URI scheme is used because applications/services are not accessible in their own dataspace (under $BASE_URI) ### + +# root admin + + a lapp:Application, lapp:AdminApplication ; + dct:title "LinkedDataHub admin" ; + # ldt:base ; + ldh:origin ; + ldt:ontology ; + ldt:service ; + ac:stylesheet ; + lapp:endUserApplication ; + lapp:frontendProxy . + + a sd:Service ; + dct:title "LinkedDataHub admin service" ; + sd:supportedLanguage sd:SPARQL11Query, sd:SPARQL11Update ; + sd:endpoint ; + a:graphStore ; + a:quadStore ; + lapp:backendProxy . + +# root end-user + + a lapp:Application, lapp:EndUserApplication ; + dct:title "LinkedDataHub" ; + # ldt:base ; + ldh:origin ; + ldt:ontology ; + ldt:service ; + lapp:adminApplication ; + lapp:frontendProxy ; + lapp:public true . + + a sd:Service ; + dct:title "LinkedDataHub service" ; + sd:supportedLanguage sd:SPARQL11Query, sd:SPARQL11Update ; + sd:endpoint ; + a:graphStore ; + a:quadStore ; + lapp:backendProxy . + +# test admin + + a lapp:Application, lapp:AdminApplication ; + dct:title "Test admin" ; + ldh:origin ; + ldt:ontology ; + ldt:service ; + ac:stylesheet ; + lapp:endUserApplication ; + lapp:frontendProxy . + + a sd:Service ; + dct:title "Test admin service" ; + sd:supportedLanguage sd:SPARQL11Query, sd:SPARQL11Update ; + sd:endpoint ; + a:graphStore ; + a:quadStore ; + lapp:backendProxy . + +# test end-user + + a lapp:Application, lapp:EndUserApplication ; + dct:title "Test" ; + ldh:origin ; + ldt:ontology ; + ldt:service ; + lapp:adminApplication ; + lapp:frontendProxy ; + lapp:public true . + + a sd:Service ; + dct:title "Test service" ; + sd:supportedLanguage sd:SPARQL11Query, sd:SPARQL11Update ; + sd:endpoint ; + a:graphStore ; + a:quadStore ; + lapp:backendProxy . diff --git a/http-tests/dataspaces/non-existent-dataspace.sh b/http-tests/dataspaces/non-existent-dataspace.sh index 86b6c2e5e..ae443f7d3 100755 --- a/http-tests/dataspaces/non-existent-dataspace.sh +++ b/http-tests/dataspaces/non-existent-dataspace.sh @@ -6,13 +6,11 @@ set -euo pipefail # Try to access admin on non-existent test.localhost dataspace curl -k -w "%{http_code}\n" -o /dev/null -s \ -H "Accept: application/n-triples" \ - "https://admin.test.localhost:4443/" \ + "https://admin.non-existing.localhost:4443/" \ | grep -q "$STATUS_NOT_FOUND" # Try to access end-user on non-existent test.localhost dataspace curl -k -w "%{http_code}\n" -o /dev/null -s \ -H "Accept: application/n-triples" \ - "https://test.localhost:4443/" \ + "https://non-existing.localhost:4443/" \ | grep -q "$STATUS_NOT_FOUND" - -echo "Non-existent dataspaces correctly return 404" diff --git a/http-tests/docker-compose.http-tests.yml b/http-tests/docker-compose.http-tests.yml index 0d8e28d3a..158c2e29c 100644 --- a/http-tests/docker-compose.http-tests.yml +++ b/http-tests/docker-compose.http-tests.yml @@ -11,8 +11,10 @@ services: environment: - JPDA_ADDRESS=*:8000 # debugger host - performance hit when enabled volumes: - - ./http-tests/datasets/owner:/var/linkeddatahub/datasets/owner - - ./http-tests/datasets/secretary:/var/linkeddatahub/datasets/secretary + - ./http-tests/config/system.trig:/var/linkeddatahub/datasets/system.trig:ro + - ./http-tests/root-owner.trig.template:/var/linkeddatahub/root-owner.trig.template:ro + - ./datasets/owner:/var/linkeddatahub/datasets/owner + - ./datasets/secretary:/var/linkeddatahub/datasets/secretary - ./http-tests/uploads:/var/www/linkeddatahub/uploads - ./http-tests/ssl/server:/var/linkeddatahub/ssl/server - ./http-tests/ssl/owner:/var/linkeddatahub/ssl/owner diff --git a/http-tests/root-owner.trig.template b/http-tests/root-owner.trig.template new file mode 100644 index 000000000..1b78aad03 --- /dev/null +++ b/http-tests/root-owner.trig.template @@ -0,0 +1,88 @@ +@prefix rdfs: . +@prefix xsd: . +@prefix acl: . +@prefix cert: . +@prefix dh: . +@prefix sioc: . +@prefix foaf: . +@prefix dct: . + +# AGENT + +<${OWNER_DOC_URI}> +{ + + <${OWNER_DOC_URI}> a dh:Item ; + foaf:primaryTopic <${OWNER_URI}> ; + sioc:has_container ; + dct:title "${OWNER_COMMON_NAME}" . + + <${OWNER_URI}> a foaf:Agent ; + foaf:name "${OWNER_COMMON_NAME}" ; + foaf:mbox ; + cert:key . + + # secretary delegates the owner agent + + <${SECRETARY_URI}> acl:delegates <${OWNER_URI}> . + +} + +# PUBLIC KEY + + +{ + + a dh:Item ; + foaf:primaryTopic ; + sioc:has_container ; + dct:title "${OWNER_COMMON_NAME}" . + + a cert:PublicKey ; + rdfs:label "${OWNER_COMMON_NAME}" ; + cert:modulus "${OWNER_PUBLIC_KEY_MODULUS}"^^xsd:hexBinary; + cert:exponent 65537 . + +} + +# AUTHORIZATIONS + +# root owner is a member of the owners group + + +{ + + foaf:member <${OWNER_URI}> . +} + + # TO-DO: use $OWNER_AUTH_UUID +{ + + a dh:Item ; + foaf:primaryTopic ; + sioc:has_container ; + dct:title "Public owner's WebID" . + + a acl:Authorization ; + acl:accessTo <${OWNER_DOC_URI}>, ; + acl:mode acl:Read ; + acl:agentClass foaf:Agent, acl:AuthenticatedAgent . + +} + +# test.localhost owner authorization (for HTTP tests) + + +{ + + a dh:Item ; + foaf:primaryTopic ; + dct:title "Test owner Control authorization" . + + a acl:Authorization ; + acl:accessTo ; + acl:accessToClass ; + acl:mode acl:Control ; + acl:agent <${OWNER_URI}> . + +} diff --git a/platform/entrypoint.sh b/platform/entrypoint.sh index b651224d8..2e642429c 100755 --- a/platform/entrypoint.sh +++ b/platform/entrypoint.sh @@ -429,11 +429,13 @@ if [ ! -f "$OWNER_PUBLIC_KEY" ]; then OWNER_DOC_URI="${ADMIN_BASE_URI}acl/agents/${OWNER_UUID}/" OWNER_KEY_UUID=$(uuidgen | tr '[:upper:]' '[:lower:]') # lowercase + OWNER_KEY_DOC_URI="${ADMIN_BASE_URI}acl/public-keys/${OWNER_KEY_UUID}/" + OWNER_KEY_URI="${OWNER_KEY_DOC_URI}#this" OWNER_PUBLIC_KEY_MODULUS=$(get_modulus "$OWNER_PUBLIC_KEY") printf "\n### Root owner WebID public key modulus: %s\n" "$OWNER_PUBLIC_KEY_MODULUS" - export OWNER_COMMON_NAME OWNER_URI OWNER_DOC_URI OWNER_PUBLIC_KEY_MODULUS OWNER_KEY_UUID SECRETARY_URI + export OWNER_COMMON_NAME OWNER_URI OWNER_DOC_URI OWNER_KEY_DOC_URI OWNER_KEY_URI OWNER_PUBLIC_KEY_MODULUS SECRETARY_URI envsubst < root-owner.trig.template > "$OWNER_DATASET_PATH" fi @@ -466,14 +468,53 @@ if [ ! -f "$SECRETARY_PUBLIC_KEY" ]; then SECRETARY_DOC_URI="${ADMIN_BASE_URI}acl/agents/${SECRETARY_UUID}/" SECRETARY_KEY_UUID=$(uuidgen | tr '[:upper:]' '[:lower:]') # lowercase + SECRETARY_KEY_DOC_URI="${ADMIN_BASE_URI}acl/public-keys/${SECRETARY_KEY_UUID}/" + SECRETARY_KEY_URI="${SECRETARY_KEY_DOC_URI}#this" SECRETARY_PUBLIC_KEY_MODULUS=$(get_modulus "$SECRETARY_PUBLIC_KEY") printf "\n### Secretary WebID public key modulus: %s\n" "$SECRETARY_PUBLIC_KEY_MODULUS" - export SECRETARY_URI SECRETARY_DOC_URI SECRETARY_PUBLIC_KEY_MODULUS SECRETARY_KEY_UUID + export SECRETARY_URI SECRETARY_DOC_URI SECRETARY_KEY_DOC_URI SECRETARY_KEY_URI SECRETARY_PUBLIC_KEY_MODULUS envsubst < root-secretary.trig.template > "$SECRETARY_DATASET_PATH" fi +mkdir -p /var/linkeddatahub/based-datasets + +# If certs already exist, extract metadata from existing .trig files using SPARQL and create .nq files +printf "\n### Reading owner metadata from existing file: %s\n" /var/linkeddatahub/based-datasets/root-owner.nq + +trig --base="$ADMIN_BASE_URI" --output=nq "$OWNER_DATASET_PATH" > /var/linkeddatahub/based-datasets/root-owner.nq + +owner_metadata=$(sparql --data=/var/linkeddatahub/based-datasets/root-owner.nq --query=select-agent-metadata.rq --results=XML) + +OWNER_URI=$(echo "$owner_metadata" | xmlstarlet sel -N srx="http://www.w3.org/2005/sparql-results#" -T -t -v "/srx:sparql/srx:results/srx:result/srx:binding[@name='agent']/srx:uri") +OWNER_DOC_URI=$(echo "$owner_metadata" | xmlstarlet sel -N srx="http://www.w3.org/2005/sparql-results#" -T -t -v "/srx:sparql/srx:results/srx:result/srx:binding[@name='doc']/srx:uri") +OWNER_KEY_URI=$(echo "$owner_metadata" | xmlstarlet sel -N srx="http://www.w3.org/2005/sparql-results#" -T -t -v "/srx:sparql/srx:results/srx:result/srx:binding[@name='key']/srx:uri") +OWNER_KEY_DOC_URI=$(echo "$OWNER_KEY_URI" | sed 's|#this$||') +OWNER_KEY_URI="${OWNER_KEY_DOC_URI}#this" + +printf "\n### Extracted OWNER_URI: %s\n" "$OWNER_URI" +printf "\n### Extracted OWNER_DOC_URI: %s\n" "$OWNER_DOC_URI" +printf "\n### Extracted OWNER_KEY_URI: %s\n" "$OWNER_KEY_URI" +printf "\n### Extracted OWNER_KEY_DOC_URI: %s\n" "$OWNER_KEY_DOC_URI" + +printf "\n### Reading secretary metadata from existing file: %s\n" /var/linkeddatahub/based-datasets/root-secretary.nq + +trig --base="$ADMIN_BASE_URI" --output=nq "$SECRETARY_DATASET_PATH" > /var/linkeddatahub/based-datasets/root-secretary.nq + +secretary_metadata=$(sparql --data=/var/linkeddatahub/based-datasets/root-secretary.nq --query=select-agent-metadata.rq --results=XML) + +SECRETARY_URI=$(echo "$secretary_metadata" | xmlstarlet sel -N srx="http://www.w3.org/2005/sparql-results#" -T -t -v "/srx:sparql/srx:results/srx:result/srx:binding[@name='agent']/srx:uri") +SECRETARY_DOC_URI=$(echo "$secretary_metadata" | xmlstarlet sel -N srx="http://www.w3.org/2005/sparql-results#" -T -t -v "/srx:sparql/srx:results/srx:result/srx:binding[@name='doc']/srx:uri") +SECRETARY_KEY_URI=$(echo "$secretary_metadata" | xmlstarlet sel -N srx="http://www.w3.org/2005/sparql-results#" -T -t -v "/srx:sparql/srx:results/srx:result/srx:binding[@name='key']/srx:uri") +SECRETARY_KEY_DOC_URI=$(echo "$SECRETARY_KEY_URI" | sed 's|#this$||') +SECRETARY_KEY_URI="${SECRETARY_KEY_DOC_URI}#this" + +printf "\n### Extracted SECRETARY_URI: %s\n" "$SECRETARY_URI" +printf "\n### Extracted SECRETARY_DOC_URI: %s\n" "$SECRETARY_DOC_URI" +printf "\n### Extracted SECRETARY_KEY_URI: %s\n" "$SECRETARY_KEY_URI" +printf "\n### Extracted SECRETARY_KEY_DOC_URI: %s\n" "$SECRETARY_KEY_DOC_URI" + # Note: LOAD_DATASETS check is now done per-app inside the loop # base the $CONTEXT_DATASET @@ -604,8 +645,18 @@ for app in "${apps[@]}"; do # Create app-specific subfolder based on end-user origin app_folder=$(echo "$end_user_origin" | sed 's|https://||' | sed 's|http://||' | sed 's|[:/]|-|g') - # Check if this specific app's datasets have been loaded - if [ ! -d "/var/linkeddatahub/based-datasets/${app_folder}" ]; then + # Determine whether to load datasets for this app + load_datasets_for_app="$LOAD_DATASETS" + if [ -z "$load_datasets_for_app" ]; then + if [ ! -d "/var/linkeddatahub/based-datasets/${app_folder}" ]; then + load_datasets_for_app=true + else + load_datasets_for_app=false + fi + fi + + # Check if this specific app's datasets should be loaded + if [ "$load_datasets_for_app" = true ]; then printf "\n### Loading datasets for app: %s\n" "$app_folder" mkdir -p "/var/linkeddatahub/based-datasets/${app_folder}" @@ -663,15 +714,53 @@ for app in "${apps[@]}"; do printf "\n### Loading namespace ontology into the admin triplestore...\n" append_quads "$admin_quad_store_url" "$admin_service_auth_user" "$admin_service_auth_pwd" "/var/linkeddatahub/based-datasets/${app_folder}/namespace-ontology.nq" "application/n-quads" - trig --base="$ADMIN_BASE_URI" --output=nq "$OWNER_DATASET_PATH" > /var/linkeddatahub/based-datasets/root-owner.nq + # Load full owner/secretary metadata (agent + key) only for root app + if [ "$end_user_origin" = "$ORIGIN" ]; then + cat /var/linkeddatahub/based-datasets/root-owner.nq + cat /var/linkeddatahub/based-datasets/root-secretary.nq + + printf "\n### Uploading the metadata of the owner agent...\n\n" + append_quads "$admin_quad_store_url" "$admin_service_auth_user" "$admin_service_auth_pwd" /var/linkeddatahub/based-datasets/root-owner.nq "application/n-quads" + + printf "\n### Uploading the metadata of the secretary agent...\n\n" + append_quads "$admin_quad_store_url" "$admin_service_auth_user" "$admin_service_auth_pwd" /var/linkeddatahub/based-datasets/root-secretary.nq "application/n-quads" + fi + + # Load owner/secretary authorizations for this app (with app-specific UUIDs) + # Note: OWNER_URI and SECRETARY_URI reference the root admin URIs + owner_auth_dataset_path="/var/linkeddatahub/datasets/${app_folder}/owner-authorization.trig" + mkdir -p "$(dirname "$owner_auth_dataset_path")" + + OWNER_AUTH_UUID=$(uuidgen | tr '[:upper:]' '[:lower:]') + OWNER_AUTH_DOC_URI="${admin_origin}/acl/authorizations/${OWNER_AUTH_UUID}/" + OWNER_AUTH_URI="${OWNER_AUTH_DOC_URI}#auth" + + export OWNER_URI OWNER_DOC_URI OWNER_KEY_DOC_URI OWNER_AUTH_DOC_URI OWNER_AUTH_URI + envsubst < root-owner-authorization.trig.template > "$owner_auth_dataset_path" + + cat "$owner_auth_dataset_path" + + trig --base="${admin_origin}/" --output=nq "$owner_auth_dataset_path" > "/var/linkeddatahub/based-datasets/${app_folder}/owner-authorization.nq" + + printf "\n### Uploading owner authorizations for this app...\n\n" + append_quads "$admin_quad_store_url" "$admin_service_auth_user" "$admin_service_auth_pwd" "/var/linkeddatahub/based-datasets/${app_folder}/owner-authorization.nq" "application/n-quads" + + secretary_auth_dataset_path="/var/linkeddatahub/datasets/${app_folder}/secretary-authorization.trig" + mkdir -p "$(dirname "$secretary_auth_dataset_path")" + + SECRETARY_AUTH_UUID=$(uuidgen | tr '[:upper:]' '[:lower:]') + SECRETARY_AUTH_DOC_URI="${admin_origin}/acl/authorizations/${SECRETARY_AUTH_UUID}/" + SECRETARY_AUTH_URI="${SECRETARY_AUTH_DOC_URI}#auth" + + export SECRETARY_URI SECRETARY_DOC_URI SECRETARY_KEY_DOC_URI SECRETARY_AUTH_DOC_URI SECRETARY_AUTH_URI + envsubst < root-secretary-authorization.trig.template > "$secretary_auth_dataset_path" - printf "\n### Uploading the metadata of the owner agent...\n\n" - append_quads "$admin_quad_store_url" "$admin_service_auth_user" "$admin_service_auth_pwd" /var/linkeddatahub/based-datasets/root-owner.nq "application/n-quads" + cat "$secretary_auth_dataset_path" - trig --base="$ADMIN_BASE_URI" --output=nq "$SECRETARY_DATASET_PATH" > /var/linkeddatahub/based-datasets/root-secretary.nq + trig --base="${admin_origin}/" --output=nq "$secretary_auth_dataset_path" > "/var/linkeddatahub/based-datasets/${app_folder}/secretary-authorization.nq" - printf "\n### Uploading the metadata of the secretary agent...\n\n" - append_quads "$admin_quad_store_url" "$admin_service_auth_user" "$admin_service_auth_pwd" /var/linkeddatahub/based-datasets/root-secretary.nq "application/n-quads" + printf "\n### Uploading secretary authorizations for this app...\n\n" + append_quads "$admin_quad_store_url" "$admin_service_auth_user" "$admin_service_auth_pwd" "/var/linkeddatahub/based-datasets/${app_folder}/secretary-authorization.nq" "application/n-quads" fi done diff --git a/platform/root-owner-authorization.trig.template b/platform/root-owner-authorization.trig.template new file mode 100644 index 000000000..35357cd70 --- /dev/null +++ b/platform/root-owner-authorization.trig.template @@ -0,0 +1,33 @@ +@prefix rdfs: . +@prefix xsd: . +@prefix acl: . +@prefix cert: . +@prefix dh: . +@prefix sioc: . +@prefix foaf: . +@prefix dct: . + +# AUTHORIZATIONS + +# root owner is a member of the owners group + + +{ + + foaf:member <${OWNER_URI}> . +} + +<${OWNER_AUTH_DOC_URI}> +{ + + <${OWNER_AUTH_DOC_URI}> a dh:Item ; + foaf:primaryTopic <${OWNER_AUTH_URI}> ; + sioc:has_container ; + dct:title "Public owner's WebID" . + + <${OWNER_AUTH_URI}> a acl:Authorization ; + acl:accessTo <${OWNER_DOC_URI}>, <${OWNER_KEY_DOC_URI}> ; + acl:mode acl:Read ; + acl:agentClass foaf:Agent, acl:AuthenticatedAgent . + +} diff --git a/platform/root-owner.trig.template b/platform/root-owner.trig.template index 5a0196568..64567bd60 100644 --- a/platform/root-owner.trig.template +++ b/platform/root-owner.trig.template @@ -20,7 +20,7 @@ <${OWNER_URI}> a foaf:Agent ; foaf:name "${OWNER_COMMON_NAME}" ; foaf:mbox ; - cert:key . + cert:key <${OWNER_KEY_URI}> . # secretary delegates the owner agent @@ -30,42 +30,17 @@ # PUBLIC KEY - +<${OWNER_KEY_DOC_URI}> { - a dh:Item ; - foaf:primaryTopic ; + <${OWNER_KEY_DOC_URI}> a dh:Item ; + foaf:primaryTopic <${OWNER_KEY_URI}> ; sioc:has_container ; dct:title "${OWNER_COMMON_NAME}" . - a cert:PublicKey ; + <${OWNER_KEY_URI}> a cert:PublicKey ; rdfs:label "${OWNER_COMMON_NAME}" ; cert:modulus "${OWNER_PUBLIC_KEY_MODULUS}"^^xsd:hexBinary; cert:exponent 65537 . -} - -# AUTHORIZATIONS - -# root owner is a member of the owners group - - -{ - - foaf:member <${OWNER_URI}> . -} - - # TO-DO: use $OWNER_AUTH_UUID -{ - - a dh:Item ; - foaf:primaryTopic ; - sioc:has_container ; - dct:title "Public owner's WebID" . - - a acl:Authorization ; - acl:accessTo <${OWNER_DOC_URI}>, ; - acl:mode acl:Read ; - acl:agentClass foaf:Agent, acl:AuthenticatedAgent . - } \ No newline at end of file diff --git a/platform/root-secretary-authorization.trig.template b/platform/root-secretary-authorization.trig.template new file mode 100644 index 000000000..4bedeb5cb --- /dev/null +++ b/platform/root-secretary-authorization.trig.template @@ -0,0 +1,34 @@ +@prefix rdfs: . +@prefix xsd: . +@prefix acl: . +@prefix cert: . +@prefix dh: . +@prefix sioc: . +@prefix foaf: . +@prefix dct: . + +# AUTHORIZATION + +# secretary is a member of the writers group + + +{ + + foaf:member <${SECRETARY_URI}> . + +} + +<${SECRETARY_AUTH_DOC_URI}> +{ + + <${SECRETARY_AUTH_DOC_URI}> a dh:Item ; + foaf:primaryTopic <${SECRETARY_AUTH_URI}> ; + sioc:has_container ; + dct:title "Public secretary's WebID" . + + <${SECRETARY_AUTH_URI}> a acl:Authorization ; + acl:accessTo <${SECRETARY_DOC_URI}>, <${SECRETARY_KEY_DOC_URI}> ; + acl:mode acl:Read ; + acl:agentClass foaf:Agent, acl:AuthenticatedAgent . + +} diff --git a/platform/root-secretary.trig.template b/platform/root-secretary.trig.template index a6579251c..4aa9a333b 100644 --- a/platform/root-secretary.trig.template +++ b/platform/root-secretary.trig.template @@ -19,49 +19,23 @@ <${SECRETARY_URI}> a foaf:Agent ; foaf:name "LinkedDataHub" ; - cert:key . + cert:key <${SECRETARY_KEY_URI}> . } # PUBLIC KEY - +<${SECRETARY_KEY_DOC_URI}> { - a dh:Item ; - foaf:primaryTopic ; + <${SECRETARY_KEY_DOC_URI}> a dh:Item ; + foaf:primaryTopic <${SECRETARY_KEY_URI}> ; sioc:has_container ; dct:title "LinkedDataHub" . - a cert:PublicKey ; + <${SECRETARY_KEY_URI}> a cert:PublicKey ; rdfs:label "LinkedDataHub" ; cert:modulus "${SECRETARY_PUBLIC_KEY_MODULUS}"^^xsd:hexBinary; cert:exponent 65537 . -} - -# AUTHORIZATION - -# secretary is a member of the writers group - - -{ - - foaf:member <${SECRETARY_URI}> . - -} - - # TO-DO: use $SECRETARY_AUTH_UUID -{ - - a dh:Item ; - foaf:primaryTopic ; - sioc:has_container ; - dct:title "Public secretary's WebID" . - - a acl:Authorization ; - acl:accessTo <${SECRETARY_DOC_URI}>, ; - acl:mode acl:Read ; - acl:agentClass foaf:Agent, acl:AuthenticatedAgent . - } \ No newline at end of file diff --git a/platform/select-agent-metadata.rq b/platform/select-agent-metadata.rq new file mode 100644 index 000000000..bb01ebe55 --- /dev/null +++ b/platform/select-agent-metadata.rq @@ -0,0 +1,13 @@ +PREFIX foaf: +PREFIX cert: +SELECT ?agent ?doc ?key +WHERE { +GRAPH ?g1 { + ?agent a foaf:Agent . + ?agent cert:key ?key . +} +GRAPH ?g2 { + ?doc foaf:primaryTopic ?agent . +} +} +LIMIT 1 diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/io/ValidatingModelProvider.java b/src/main/java/com/atomgraph/linkeddatahub/server/io/ValidatingModelProvider.java index 15307e87f..db5ac8908 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/io/ValidatingModelProvider.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/io/ValidatingModelProvider.java @@ -236,31 +236,34 @@ public Resource processRead(Resource resource) // this logic really belongs in a throw new SPINConstraintViolationException(cvs, resource.getModel()); } } - - if (getApplication().get().canAs(AdminApplication.class) && resource.hasProperty(RDF.type, OWL.Ontology)) + + if (getApplication().isPresent() && getApplication().get().canAs(AdminApplication.class) && resource.hasProperty(RDF.type, OWL.Ontology)) { // clear cached OntModel if ontology is updated. TO-DO: send event instead getSystem().getOntModelSpec().getDocumentManager().getFileManager().removeCacheModel(resource.getURI()); } - - if (resource.hasProperty(RDF.type, ACL.Authorization)) + + if (getApplication().isPresent() && resource.hasProperty(RDF.type, ACL.Authorization)) { LinkedDataClient ldc = LinkedDataClient.create(getSystem().getClient(), getSystem().getMediaTypes()). delegation(getUriInfo().getBaseUri(), getAgentContextProvider().get().orElse(null)); getSystem().getEventBus().post(new com.atomgraph.linkeddatahub.server.event.AuthorizationCreated(getEndUserApplication(), ldc, resource)); } - + return resource; } @Override public Model processWrite(Model model) { + // If no application (e.g., error responses), skip mbox processing + if (!getApplication().isPresent()) return super.processWrite(model); + // show foaf:mbox in end-user apps - if (getApplication().get().canAs(EndUserApplication.class)) return model; + if (getApplication().get().canAs(EndUserApplication.class)) return super.processWrite(model); // show foaf:mbox for authenticated agents - if (getSecurityContext() != null && getSecurityContext().getUserPrincipal() instanceof Agent) return model; + if (getSecurityContext() != null && getSecurityContext().getUserPrincipal() instanceof Agent) return super.processWrite(model); // show foaf:mbox_sha1sum for all other agents (in admin apps) return super.processWrite(hashMboxes(getMessageDigest()).apply(model)); // apply processing from superclasses diff --git a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/layout.xsl b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/layout.xsl index 09a8dbc35..0692980c9 100644 --- a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/layout.xsl +++ b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/layout.xsl @@ -268,10 +268,12 @@ LIMIT 100 - <xsl:value-of> - <xsl:apply-templates select="$lapp:Application//*[ldh:origin/@rdf:resource = $ldh:origin]" mode="ac:label"/> - </xsl:value-of> - <xsl:text> - </xsl:text> + <xsl:if test="$lapp:Application//*[ldh:origin/@rdf:resource = $ldh:origin]"> + <xsl:value-of> + <xsl:apply-templates select="$lapp:Application//*[ldh:origin/@rdf:resource = $ldh:origin]" mode="ac:label"/> + </xsl:value-of> + <xsl:text> - </xsl:text> + </xsl:if> <xsl:apply-templates mode="#current"/> From 9c3ba1b41cf4d6e3299d075af611ec275b3776dc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martynas=20Jusevi=C4=8Dius?= Date: Thu, 23 Oct 2025 22:42:55 +0200 Subject: [PATCH 6/7] Test fix --- http-tests/admin/model/add-property-constraint.sh | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/http-tests/admin/model/add-property-constraint.sh b/http-tests/admin/model/add-property-constraint.sh index 7571a14fb..c5e179841 100755 --- a/http-tests/admin/model/add-property-constraint.sh +++ b/http-tests/admin/model/add-property-constraint.sh @@ -58,12 +58,16 @@ turtle+="_:item a <${namespace_doc}#ConstrainedClass> .\n" turtle+="_:item dct:title \"Failure\" .\n" turtle+="_:item sioc:has_container <${END_USER_BASE_URL}> .\n" +# Using direct curl instead of put.sh because put.sh uses -f flag which exits on 4xx errors, +# but this test expects to capture the 422 response response=$(echo -e "$turtle" \ | turtle --base="$END_USER_BASE_URL" \ -| put.sh \ - -f "$OWNER_CERT_FILE" \ - -p "$OWNER_CERT_PWD" \ - --content-type "text/turtle" \ +| curl -k -v \ + -E "$OWNER_CERT_FILE":"$OWNER_CERT_PWD" \ + -d @- \ + -X PUT \ + -H "Content-Type: text/turtle" \ + -H "Accept: text/turtle" \ "$END_USER_BASE_URL" \ 2>&1) # redirect output from stderr to stdout From 8bcff34be5b3b851e89690bf5bde4e3686dfad31 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martynas=20Jusevi=C4=8Dius?= Date: Thu, 23 Oct 2025 23:17:08 +0200 Subject: [PATCH 7/7] Error flags in CLI scripts --- bin/add-generic-service.sh | 1 + bin/add-result-set-chart.sh | 1 + bin/add-select.sh | 1 + bin/add-view.sh | 1 + bin/admin/acl/add-agent-to-group.sh | 1 + bin/admin/acl/create-group.sh | 1 + bin/admin/add-ontology-import.sh | 1 + bin/admin/model/add-class.sh | 1 + bin/admin/model/add-construct.sh | 1 + bin/admin/model/add-property-constraint.sh | 1 + bin/admin/model/add-restriction.sh | 1 + bin/admin/model/add-select.sh | 1 + bin/admin/model/create-ontology.sh | 1 + bin/content/add-object-block.sh | 1 + bin/content/add-xhtml-block.sh | 1 + bin/create-container.sh | 1 + bin/create-item.sh | 1 + bin/imports/create-csv-import.sh | 1 + bin/imports/create-file.sh | 1 + bin/imports/create-query.sh | 1 + bin/imports/create-rdf-import.sh | 1 + 21 files changed, 21 insertions(+) diff --git a/bin/add-generic-service.sh b/bin/add-generic-service.sh index 645c33998..0dd84cdb5 100755 --- a/bin/add-generic-service.sh +++ b/bin/add-generic-service.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { diff --git a/bin/add-result-set-chart.sh b/bin/add-result-set-chart.sh index b4a0c7d7e..f93e8fe9b 100755 --- a/bin/add-result-set-chart.sh +++ b/bin/add-result-set-chart.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { diff --git a/bin/add-select.sh b/bin/add-select.sh index 0d48ab075..fc54ffb6f 100755 --- a/bin/add-select.sh +++ b/bin/add-select.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { diff --git a/bin/add-view.sh b/bin/add-view.sh index 24827c982..826ed6cda 100755 --- a/bin/add-view.sh +++ b/bin/add-view.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { diff --git a/bin/admin/acl/add-agent-to-group.sh b/bin/admin/acl/add-agent-to-group.sh index bdd790113..b7e8abb3a 100755 --- a/bin/admin/acl/add-agent-to-group.sh +++ b/bin/admin/acl/add-agent-to-group.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { diff --git a/bin/admin/acl/create-group.sh b/bin/admin/acl/create-group.sh index 12972ddf6..dc33cdb19 100755 --- a/bin/admin/acl/create-group.sh +++ b/bin/admin/acl/create-group.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { diff --git a/bin/admin/add-ontology-import.sh b/bin/admin/add-ontology-import.sh index f349e6368..df5ce800d 100755 --- a/bin/admin/add-ontology-import.sh +++ b/bin/admin/add-ontology-import.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { diff --git a/bin/admin/model/add-class.sh b/bin/admin/model/add-class.sh index 9d8abd639..f1784ad56 100755 --- a/bin/admin/model/add-class.sh +++ b/bin/admin/model/add-class.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { diff --git a/bin/admin/model/add-construct.sh b/bin/admin/model/add-construct.sh index b3a00cd1c..675188a5b 100755 --- a/bin/admin/model/add-construct.sh +++ b/bin/admin/model/add-construct.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { diff --git a/bin/admin/model/add-property-constraint.sh b/bin/admin/model/add-property-constraint.sh index 05787dc11..0fe5a00d0 100755 --- a/bin/admin/model/add-property-constraint.sh +++ b/bin/admin/model/add-property-constraint.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { diff --git a/bin/admin/model/add-restriction.sh b/bin/admin/model/add-restriction.sh index 6284baae9..f5101a9b4 100755 --- a/bin/admin/model/add-restriction.sh +++ b/bin/admin/model/add-restriction.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { diff --git a/bin/admin/model/add-select.sh b/bin/admin/model/add-select.sh index db82da765..02cc8d921 100755 --- a/bin/admin/model/add-select.sh +++ b/bin/admin/model/add-select.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { diff --git a/bin/admin/model/create-ontology.sh b/bin/admin/model/create-ontology.sh index d691b286d..62cd31b53 100755 --- a/bin/admin/model/create-ontology.sh +++ b/bin/admin/model/create-ontology.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { diff --git a/bin/content/add-object-block.sh b/bin/content/add-object-block.sh index 16b64532d..92fd89705 100755 --- a/bin/content/add-object-block.sh +++ b/bin/content/add-object-block.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { diff --git a/bin/content/add-xhtml-block.sh b/bin/content/add-xhtml-block.sh index 3af9cf508..c72fac1a2 100755 --- a/bin/content/add-xhtml-block.sh +++ b/bin/content/add-xhtml-block.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { diff --git a/bin/create-container.sh b/bin/create-container.sh index 29ef0bdde..fb9d4328d 100755 --- a/bin/create-container.sh +++ b/bin/create-container.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { diff --git a/bin/create-item.sh b/bin/create-item.sh index e66398cb7..63d3d19ce 100755 --- a/bin/create-item.sh +++ b/bin/create-item.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { diff --git a/bin/imports/create-csv-import.sh b/bin/imports/create-csv-import.sh index f77e41a3b..ffd745575 100755 --- a/bin/imports/create-csv-import.sh +++ b/bin/imports/create-csv-import.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { diff --git a/bin/imports/create-file.sh b/bin/imports/create-file.sh index ae1067c33..36413d34c 100755 --- a/bin/imports/create-file.sh +++ b/bin/imports/create-file.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { diff --git a/bin/imports/create-query.sh b/bin/imports/create-query.sh index 990edf959..ff9a8eab2 100755 --- a/bin/imports/create-query.sh +++ b/bin/imports/create-query.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { diff --git a/bin/imports/create-rdf-import.sh b/bin/imports/create-rdf-import.sh index ccbb24c4a..b51113a96 100755 --- a/bin/imports/create-rdf-import.sh +++ b/bin/imports/create-rdf-import.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() {