From cbbf229b252f5f96f7d0e0273f212fb637b5e9d8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martynas=20Jusevi=C4=8Dius?= Date: Tue, 13 Jan 2026 14:23:59 +0100 Subject: [PATCH 1/7] Extending `DirectGraphStoreImpl` instead of `GraphStoreImpl` --- pom.xml | 4 +- .../linkeddatahub/resource/Generate.java | 124 +++++++++++++----- .../linkeddatahub/resource/Graph.java | 45 ++----- .../linkeddatahub/resource/Transform.java | 4 +- .../linkeddatahub/resource/admin/SignUp.java | 14 +- .../linkeddatahub/resource/upload/Item.java | 10 -- .../linkeddatahub/server/model/Patchable.java | 5 +- ...oreImpl.java => DirectGraphStoreImpl.java} | 20 +-- .../server/model/impl/Dispatcher.java | 5 +- ...oxyResourceBase.java => ProxiedGraph.java} | 8 +- .../model/impl/ProxyResourceBaseTest.java | 16 +-- 11 files changed, 129 insertions(+), 126 deletions(-) rename src/main/java/com/atomgraph/linkeddatahub/server/model/impl/{GraphStoreImpl.java => DirectGraphStoreImpl.java} (94%) rename src/main/java/com/atomgraph/linkeddatahub/server/model/impl/{ProxyResourceBase.java => ProxiedGraph.java} (98%) diff --git a/pom.xml b/pom.xml index d745a488e..4000207a9 100644 --- a/pom.xml +++ b/pom.xml @@ -139,13 +139,13 @@ ${project.groupId} client - 4.2.10 + 4.2.11-SNAPSHOT classes ${project.groupId} client - 4.2.10 + 4.2.11-SNAPSHOT war diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/Generate.java b/src/main/java/com/atomgraph/linkeddatahub/resource/Generate.java index f4a1ccea9..c4e2eb83c 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/resource/Generate.java +++ b/src/main/java/com/atomgraph/linkeddatahub/resource/Generate.java @@ -18,11 +18,10 @@ import com.atomgraph.client.util.DataManager; import com.atomgraph.core.MediaTypes; +import com.atomgraph.linkeddatahub.apps.model.Application; import com.atomgraph.linkeddatahub.client.GraphStoreClient; import com.atomgraph.linkeddatahub.imports.QueryLoader; import com.atomgraph.linkeddatahub.model.Service; -import com.atomgraph.linkeddatahub.server.filter.response.CacheInvalidationFilter; -import com.atomgraph.linkeddatahub.server.model.impl.GraphStoreImpl; import com.atomgraph.linkeddatahub.server.security.AgentContext; import com.atomgraph.linkeddatahub.server.util.Skolemizer; import com.atomgraph.linkeddatahub.vocabulary.LDH; @@ -41,9 +40,11 @@ import jakarta.ws.rs.InternalServerErrorException; import jakarta.ws.rs.POST; import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.container.ResourceContext; import jakarta.ws.rs.core.Context; import jakarta.ws.rs.core.Request; import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.core.Response.Status; import jakarta.ws.rs.core.SecurityContext; import jakarta.ws.rs.core.UriBuilder; import jakarta.ws.rs.core.UriInfo; @@ -56,10 +57,8 @@ import org.apache.jena.rdf.model.ModelFactory; import org.apache.jena.rdf.model.ResIterator; import org.apache.jena.rdf.model.Resource; -import org.apache.jena.rdf.model.ResourceFactory; import org.apache.jena.vocabulary.DCTerms; import org.apache.jena.vocabulary.RDF; -import org.glassfish.jersey.uri.UriComponent; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -68,14 +67,21 @@ * * @author {@literal Martynas Jusevičius } */ -public class Generate extends GraphStoreImpl +public class Generate { private static final Logger log = LoggerFactory.getLogger(Generate.class); + + private final UriInfo uriInfo; + private final MediaTypes mediaTypes; + private final Application application; + private final Optional agentContext; + private final com.atomgraph.linkeddatahub.Application system; + private final ResourceContext resourceContext; /** * Constructs endpoint for container generation. - * + * * @param request current request * @param uriInfo current URI info * @param mediaTypes supported media types @@ -87,19 +93,35 @@ public class Generate extends GraphStoreImpl * @param securityContext JAX-RS security context * @param agentContext authenticated agent's context * @param dataManager RDF data manager + * @param resourceContext resource context for creating resources */ @Inject public Generate(@Context Request request, @Context UriInfo uriInfo, MediaTypes mediaTypes, com.atomgraph.linkeddatahub.apps.model.Application application, Optional ontology, Optional service, @Context SecurityContext securityContext, Optional agentContext, @Context Providers providers, com.atomgraph.linkeddatahub.Application system, - DataManager dataManager) + DataManager dataManager, @Context ResourceContext resourceContext) { - super(request, uriInfo, mediaTypes, application, ontology, service, securityContext, agentContext, providers, system); + this.uriInfo = uriInfo; + this.mediaTypes = mediaTypes; + this.application = application; + this.agentContext = agentContext; + this.system = system; + this.resourceContext = resourceContext; } - + + /** + * Generates containers for given classes. + * Expects a model containing a parent container (sioc:has_parent) and one or more class specifications + * with void:class and spin:query properties. Creates a new container for each class with a view based + * on the provided SPARQL SELECT query. + * + * @param model the RDF model containing the generation parameters + * @param defaultGraph whether to use the default graph + * @param graphUri the target graph URI + * @return JAX-RS response indicating success or failure + */ @POST - @Override public Response post(Model model, @QueryParam("default") @DefaultValue("false") Boolean defaultGraph, @QueryParam("graph") URI graphUri) { ResIterator it = model.listSubjectsWithProperty(SIOC.HAS_PARENT); @@ -143,9 +165,10 @@ public Response post(Model model, @QueryParam("default") @DefaultValue("false") service))); new Skolemizer(containerGraphURI.toString()).apply(containerModel); - try (Response containerResponse = super.post(containerModel, false, containerGraphURI)) + // append triples directly to the graph store without doing an HTTP request (and thus no ACL check) + try (Response containerResponse = getResourceContext().getResource(Graph.class).post(containerModel, false, containerGraphURI)) { - if (containerResponse.getStatus() != Response.Status.CREATED.getStatusCode()) + if (!containerResponse.getStatusInfo().getFamily().equals(Status.Family.SUCCESSFUL)) { if (log.isErrorEnabled()) log.error("Cannot create container"); throw new InternalServerErrorException("Cannot create container"); @@ -159,10 +182,7 @@ public Response post(Model model, @QueryParam("default") @DefaultValue("false") } // ban the parent container URI from proxy cache to make sure the next query using it will be fresh (e.g. SELECT that loads children) - try (Response response = ban(getApplication().getService().getBackendProxy(), parent.getURI())) - { - // Response automatically closed by try-with-resources - } + getSystem().ban(getApplication().getService().getBackendProxy(), parent.getURI(), true); return Response.ok().build(); } @@ -211,7 +231,7 @@ public Resource createContainer(Model model, URI graphURI, Resource parent, Stri addLiteral(DCTerms.title, title). addLiteral(DH.slug, UUID.randomUUID().toString()). addLiteral(DCTerms.created, Calendar.getInstance()). - addProperty(ResourceFactory.createProperty(RDF.getURI(), "_1"), content); + addProperty(model.createProperty(RDF.getURI(), "_1"), content); // TO-DO: make sure we're creating sequence value larger than the existing ones? } /** @@ -228,20 +248,64 @@ public Resource createView(Model model, Resource query) addProperty(SPIN.query, query); } - /** - * Bans URL from the backend proxy cache. - * - * @param proxy proxy server URL - * @param url banned URL - * @return proxy server response + /** + * Returns the supported media types. + * + * @return media types */ - public Response ban(Resource proxy, String url) + public MediaTypes getMediaTypes() { - if (url == null) throw new IllegalArgumentException("Resource cannot be null"); - - return getSystem().getClient().target(proxy.getURI()).request(). - header(CacheInvalidationFilter.HEADER_NAME, UriComponent.encode(url, UriComponent.Type.UNRESERVED)). // the value has to be URL-encoded in order to match request URLs in Varnish - method("BAN", Response.class); + return mediaTypes; } - + + /** + * Returns the current application. + * + * @return the application + */ + public Application getApplication() + { + return application; + } + + /** + * Returns the current URI info. + * + * @return URI info + */ + public UriInfo getUriInfo() + { + return uriInfo; + } + + /** + * Returns the authenticated agent's context. + * + * @return optional agent context + */ + public Optional getAgentContext() + { + return agentContext; + } + + /** + * Returns the system application. + * + * @return system application + */ + public com.atomgraph.linkeddatahub.Application getSystem() + { + return system; + } + + /** + * Returns the resource context. + * + * @return resource context + */ + public ResourceContext getResourceContext() + { + return resourceContext; + } + } \ No newline at end of file diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/Graph.java b/src/main/java/com/atomgraph/linkeddatahub/resource/Graph.java index d170d7f56..4f6316298 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/resource/Graph.java +++ b/src/main/java/com/atomgraph/linkeddatahub/resource/Graph.java @@ -27,7 +27,7 @@ import com.atomgraph.linkeddatahub.model.Service; import com.atomgraph.linkeddatahub.server.io.ValidatingModelProvider; import com.atomgraph.linkeddatahub.server.model.Patchable; -import com.atomgraph.linkeddatahub.server.model.impl.GraphStoreImpl; +import com.atomgraph.linkeddatahub.server.model.impl.DirectGraphStoreImpl; import com.atomgraph.linkeddatahub.server.security.AgentContext; import com.atomgraph.linkeddatahub.server.util.PatchUpdateVisitor; import com.atomgraph.linkeddatahub.server.util.Skolemizer; @@ -46,8 +46,6 @@ import jakarta.ws.rs.BadRequestException; import jakarta.ws.rs.Consumes; import jakarta.ws.rs.DELETE; -import jakarta.ws.rs.DefaultValue; -import jakarta.ws.rs.GET; import jakarta.ws.rs.HttpMethod; import jakarta.ws.rs.InternalServerErrorException; import jakarta.ws.rs.NotFoundException; @@ -55,7 +53,6 @@ import jakarta.ws.rs.PATCH; import jakarta.ws.rs.POST; import jakarta.ws.rs.PUT; -import jakarta.ws.rs.QueryParam; import jakarta.ws.rs.WebApplicationException; import jakarta.ws.rs.core.Context; import jakarta.ws.rs.core.HttpHeaders; @@ -120,7 +117,7 @@ * * @author {@literal Martynas Jusevičius } */ -public class Graph extends GraphStoreImpl implements Patchable +public class Graph extends DirectGraphStoreImpl implements Patchable { private static final Logger log = LoggerFactory.getLogger(Graph.class); @@ -163,17 +160,10 @@ public Graph(@Context Request request, @Context UriInfo uriInfo, MediaTypes medi !secretaryDocURI.equals(uri)) allowedMethods.add(HttpMethod.DELETE); } - - @Override - @GET - public Response get(@QueryParam("default") @DefaultValue("false") Boolean defaultGraph, @QueryParam("graph") URI graphUriUnused) - { - return super.get(false, getURI()); - } @Override @POST - public Response post(Model model, @QueryParam("default") @DefaultValue("false") Boolean defaultGraph, @QueryParam("graph") URI graphUriUnused) + public Response post(Model model) { if (log.isTraceEnabled()) log.trace("POST Graph Store request with RDF payload: {} payload size(): {}", model, model.size()); @@ -206,7 +196,7 @@ public Response post(Model model, @QueryParam("default") @DefaultValue("false") @Override @PUT // the AuthorizationFilter only allows creating new child URIs for existing containers (i.e. there has to be a .. container already) - public Response put(Model model, @QueryParam("default") @DefaultValue("false") Boolean defaultGraph, @QueryParam("graph") URI graphUriUnused) + public Response put(Model model) { if (log.isTraceEnabled()) log.trace("PUT Graph Store request with RDF payload: {} payload size(): {}", model, model.size()); @@ -311,12 +301,11 @@ public Response put(Model model, @QueryParam("default") @DefaultValue("false") B * The GRAPH keyword is therefore not allowed in the update string. * * @param updateRequest SPARQL update - * @param graphUriUnused named graph URI (unused) * @return response response object */ @PATCH @Override - public Response patch(UpdateRequest updateRequest, @QueryParam("graph") URI graphUriUnused) + public Response patch(UpdateRequest updateRequest) { if (updateRequest == null) throw new BadRequestException("SPARQL update not specified"); if (log.isDebugEnabled()) log.debug("PATCH request on named graph with URI: {}", getURI()); @@ -420,13 +409,11 @@ public Response options() * Files are written to storage before the RDF data is passed to the default POST handler method. * * @param multiPart multipart form data - * @param defaultGraph true if default graph is requested - * @param graphUriUnused named graph URI (unused) * @return HTTP response */ @POST @Consumes(MediaType.MULTIPART_FORM_DATA) - public Response postMultipart(FormDataMultiPart multiPart, @QueryParam("default") @DefaultValue("false") Boolean defaultGraph, @QueryParam("graph") URI graphUriUnused) + public Response postMultipart(FormDataMultiPart multiPart) { if (log.isDebugEnabled()) log.debug("MultiPart fields: {} body parts: {}", multiPart.getFields(), multiPart.getBodyParts()); @@ -464,13 +451,11 @@ public Response postMultipart(FormDataMultiPart multiPart, @QueryParam("default" * Files are written to storage before the RDF data is passed to the default PUT handler method. * * @param multiPart multipart form data - * @param defaultGraph true if default graph is requested - * @param graphUriUnused named graph URI (unused) * @return HTTP response */ @PUT @Consumes(MediaType.MULTIPART_FORM_DATA) - public Response putMultipart(FormDataMultiPart multiPart, @QueryParam("default") @DefaultValue("false") Boolean defaultGraph, @QueryParam("graph") URI graphUriUnused) + public Response putMultipart(FormDataMultiPart multiPart) { if (log.isDebugEnabled()) log.debug("MultiPart fields: {} body parts: {}", multiPart.getFields(), multiPart.getBodyParts()); @@ -486,7 +471,7 @@ public Response putMultipart(FormDataMultiPart multiPart, @QueryParam("default") int fileCount = writeFiles(model, getFileNameBodyPartMap(multiPart)); if (log.isDebugEnabled()) log.debug("# of files uploaded: {} ", fileCount); - return put(model, defaultGraph, getURI()); // ignore the @QueryParam("graph") value + return put(model, false, getURI()); } catch (URISyntaxException ex) { @@ -503,13 +488,11 @@ public Response putMultipart(FormDataMultiPart multiPart, @QueryParam("default") /** * Implements DELETE method of SPARQL Graph Store Protocol. * - * @param defaultGraph true if default graph is requested - * @param graphUriUnused named graph URI (unused) * @return response */ @DELETE @Override - public Response delete(@QueryParam("default") @DefaultValue("false") Boolean defaultGraph, @QueryParam("graph") URI graphUriUnused) + public Response delete() { if (!getAllowedMethods().contains(HttpMethod.DELETE)) throw new WebApplicationException("Cannot delete document", Response.status(Response.Status.METHOD_NOT_ALLOWED).allow(getAllowedMethods()).build()); @@ -865,16 +848,6 @@ public ResponseBuilder evaluatePreconditions(Model model) return getInternalResponse(model, getURI()).evaluatePreconditions(); } - /** - * Returns the named graph URI. - * - * @return graph URI - */ - public URI getURI() - { - return getUriInfo().getAbsolutePath(); - } - /** * Returns SPARQL endpoint accessor. * diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/Transform.java b/src/main/java/com/atomgraph/linkeddatahub/resource/Transform.java index cc449de4f..659450e2d 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/resource/Transform.java +++ b/src/main/java/com/atomgraph/linkeddatahub/resource/Transform.java @@ -23,7 +23,7 @@ import com.atomgraph.linkeddatahub.imports.QueryLoader; import com.atomgraph.linkeddatahub.model.Service; import com.atomgraph.linkeddatahub.server.io.ValidatingModelProvider; -import com.atomgraph.linkeddatahub.server.model.impl.GraphStoreImpl; +import com.atomgraph.linkeddatahub.server.model.impl.DirectGraphStoreImpl; import com.atomgraph.linkeddatahub.server.security.AgentContext; import com.atomgraph.linkeddatahub.vocabulary.NFO; import com.atomgraph.spinrdf.vocabulary.SPIN; @@ -67,7 +67,7 @@ * * @author {@literal Martynas Jusevičius } */ -public class Transform extends GraphStoreImpl +public class Transform extends DirectGraphStoreImpl { private static final Logger log = LoggerFactory.getLogger(Transform.class); diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/admin/SignUp.java b/src/main/java/com/atomgraph/linkeddatahub/resource/admin/SignUp.java index c8f65f42a..1f53e05bf 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/resource/admin/SignUp.java +++ b/src/main/java/com/atomgraph/linkeddatahub/resource/admin/SignUp.java @@ -26,7 +26,7 @@ import com.atomgraph.linkeddatahub.model.Service; import com.atomgraph.linkeddatahub.listener.EMailListener; import com.atomgraph.linkeddatahub.server.filter.response.CacheInvalidationFilter; -import com.atomgraph.linkeddatahub.server.model.impl.GraphStoreImpl; +import com.atomgraph.linkeddatahub.server.model.impl.DirectGraphStoreImpl; import com.atomgraph.linkeddatahub.server.security.AgentContext; import com.atomgraph.linkeddatahub.server.util.MessageBuilder; import com.atomgraph.linkeddatahub.server.util.Skolemizer; @@ -101,7 +101,7 @@ * * @author Martynas Jusevičius {@literal } */ -public class SignUp extends GraphStoreImpl +public class SignUp extends DirectGraphStoreImpl { private static final Logger log = LoggerFactory.getLogger(SignUp.class); @@ -519,16 +519,6 @@ public Service getAgentService() { return getApplication().getService(); } - - /** - * Returns URI of this resource. - * - * @return resource URI - */ - public URI getURI() - { - return getUriInfo().getAbsolutePath(); - } /** * Returns the number of days until the WebID certificate expires. diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/upload/Item.java b/src/main/java/com/atomgraph/linkeddatahub/resource/upload/Item.java index 41876ac2d..9c037b768 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/resource/upload/Item.java +++ b/src/main/java/com/atomgraph/linkeddatahub/resource/upload/Item.java @@ -292,16 +292,6 @@ public Model describe() return getService().getSPARQLClient().loadModel(QueryFactory.create("DESCRIBE <" + getURI() + ">")); } - /** - * Returns URI of this file. - * - * @return file URI - */ - public URI getURI() - { - return getUriInfo().getAbsolutePath(); - } - /** * Returns RDF resource of this file. * diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/model/Patchable.java b/src/main/java/com/atomgraph/linkeddatahub/server/model/Patchable.java index 773074251..9c60b8c96 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/model/Patchable.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/model/Patchable.java @@ -16,9 +16,7 @@ */ package com.atomgraph.linkeddatahub.server.model; -import java.net.URI; import jakarta.ws.rs.PATCH; -import jakarta.ws.rs.QueryParam; import jakarta.ws.rs.core.Response; import org.apache.jena.update.UpdateRequest; @@ -34,10 +32,9 @@ public interface Patchable * Handles PATCH request.SPARQL update is used as the patch format. * * @param updateRequest SPARQL update - * @param graphUri named graph URI * @return response * @see HTTP PATCH */ - @PATCH Response patch(UpdateRequest updateRequest, @QueryParam("graph") URI graphUri); + @PATCH Response patch(UpdateRequest updateRequest); } diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/GraphStoreImpl.java b/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/DirectGraphStoreImpl.java similarity index 94% rename from src/main/java/com/atomgraph/linkeddatahub/server/model/impl/GraphStoreImpl.java rename to src/main/java/com/atomgraph/linkeddatahub/server/model/impl/DirectGraphStoreImpl.java index 1c25cd2a2..5d5a960e2 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/GraphStoreImpl.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/DirectGraphStoreImpl.java @@ -52,17 +52,16 @@ * * @author Martynas Jusevičius {@literal } */ -public abstract class GraphStoreImpl extends com.atomgraph.core.model.impl.GraphStoreImpl +public abstract class DirectGraphStoreImpl extends com.atomgraph.core.model.impl.DirectGraphStoreImpl { - private static final Logger log = LoggerFactory.getLogger(GraphStoreImpl.class); + private static final Logger log = LoggerFactory.getLogger(DirectGraphStoreImpl.class); /** * The relative path of the content-addressed file container. */ public static final String UPLOADS_PATH = "uploads"; - private final UriInfo uriInfo; private final com.atomgraph.linkeddatahub.apps.model.Application application; private final Ontology ontology; private final Service service; @@ -90,15 +89,14 @@ public abstract class GraphStoreImpl extends com.atomgraph.core.model.impl.Graph * @param system system application */ @Inject - public GraphStoreImpl(@Context Request request, @Context UriInfo uriInfo, MediaTypes mediaTypes, + public DirectGraphStoreImpl(@Context Request request, @Context UriInfo uriInfo, MediaTypes mediaTypes, com.atomgraph.linkeddatahub.apps.model.Application application, Optional ontology, Optional service, @Context SecurityContext securityContext, Optional agentContext, @Context Providers providers, com.atomgraph.linkeddatahub.Application system) { - super(request, service.get(), mediaTypes); + super(request, service.get(), mediaTypes, uriInfo); if (ontology.isEmpty()) throw new InternalServerErrorException("Ontology is not specified"); if (service.isEmpty()) throw new InternalServerErrorException("Service is not specified"); - this.uriInfo = uriInfo; this.application = application; this.ontology = ontology.get(); this.service = service.get(); @@ -218,16 +216,6 @@ public MessageDigest getMessageDigest() { return messageDigest; } - - /** - * Returns the request URI information. - * - * @return URI info - */ - public UriInfo getUriInfo() - { - return uriInfo; - } /** * Returns the current application. diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/Dispatcher.java b/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/Dispatcher.java index 451dc874f..76a0a0026 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/Dispatcher.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/Dispatcher.java @@ -82,12 +82,12 @@ public Optional getProxyClass() if (getUriInfo().getQueryParameters().containsKey(AC.uri.getLocalName())) { if (log.isDebugEnabled()) log.debug("No Application matched request URI <{}>, dispatching to ProxyResourceBase", getUriInfo().getQueryParameters().getFirst(AC.uri.getLocalName())); - return Optional.of(ProxyResourceBase.class); + return Optional.of(ProxiedGraph.class); } if (getDataset().isPresent()) { if (log.isDebugEnabled()) log.debug("Serving request URI <{}> from Dataset <{}>, dispatching to ProxyResourceBase", getUriInfo().getAbsolutePath(), getDataset().get()); - return Optional.of(ProxyResourceBase.class); + return Optional.of(ProxiedGraph.class); } return Optional.empty(); @@ -263,6 +263,7 @@ public Class getSettingsEndpoint() /** * Returns the default JAX-RS resource class. + * Only directly identified access to named graphs is allowed (the Graph Store Protocol endpoint is not exposed). * * @return resource class */ diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/ProxyResourceBase.java b/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/ProxiedGraph.java similarity index 98% rename from src/main/java/com/atomgraph/linkeddatahub/server/model/impl/ProxyResourceBase.java rename to src/main/java/com/atomgraph/linkeddatahub/server/model/impl/ProxiedGraph.java index 5665a8e72..02eb59f62 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/ProxyResourceBase.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/ProxiedGraph.java @@ -83,10 +83,10 @@ * * @author {@literal Martynas Jusevičius } */ -public class ProxyResourceBase extends com.atomgraph.client.model.impl.ProxyResourceBase +public class ProxiedGraph extends com.atomgraph.client.model.impl.ProxiedGraph { - private static final Logger log = LoggerFactory.getLogger(ProxyResourceBase.class); + private static final Logger log = LoggerFactory.getLogger(ProxiedGraph.class); private final UriInfo uriInfo; private final ContainerRequestContext crc; @@ -117,7 +117,7 @@ public class ProxyResourceBase extends com.atomgraph.client.model.impl.ProxyReso * @param dataset optional dataset */ @Inject - public ProxyResourceBase(@Context UriInfo uriInfo, @Context Request request, @Context HttpHeaders httpHeaders, MediaTypes mediaTypes, + public ProxiedGraph(@Context UriInfo uriInfo, @Context Request request, @Context HttpHeaders httpHeaders, MediaTypes mediaTypes, com.atomgraph.linkeddatahub.apps.model.Application application, Optional service, @Context SecurityContext securityContext, @Context ContainerRequestContext crc, com.atomgraph.linkeddatahub.Application system, @Context HttpServletRequest httpServletRequest, DataManager dataManager, Optional agentContext, @@ -157,7 +157,7 @@ public ProxyResourceBase(@Context UriInfo uriInfo, @Context Request request, @Co * @param agentContext authenticated agent's context * @param providers registry of JAX-RS providers */ - protected ProxyResourceBase(@Context UriInfo uriInfo, @Context Request request, @Context HttpHeaders httpHeaders, MediaTypes mediaTypes, + protected ProxiedGraph(@Context UriInfo uriInfo, @Context Request request, @Context HttpHeaders httpHeaders, MediaTypes mediaTypes, com.atomgraph.linkeddatahub.apps.model.Application application, Optional service, @Context SecurityContext securityContext, @Context ContainerRequestContext crc, @QueryParam("uri") URI uri, @QueryParam("endpoint") URI endpoint, @QueryParam("query") String query, @QueryParam("accept") MediaType accept, @QueryParam("mode") URI mode, diff --git a/src/test/java/com/atomgraph/linkeddatahub/server/model/impl/ProxyResourceBaseTest.java b/src/test/java/com/atomgraph/linkeddatahub/server/model/impl/ProxyResourceBaseTest.java index 7e9cc8c43..4e8687796 100644 --- a/src/test/java/com/atomgraph/linkeddatahub/server/model/impl/ProxyResourceBaseTest.java +++ b/src/test/java/com/atomgraph/linkeddatahub/server/model/impl/ProxyResourceBaseTest.java @@ -33,51 +33,51 @@ public class ProxyResourceBaseTest @Test(expected = IllegalArgumentException.class) public void testNullURI() { - ProxyResourceBase.validateNotInternalURL(null); + ProxiedGraph.validateNotInternalURL(null); } @Test(expected = ForbiddenException.class) public void testLinkLocalIPv4Blocked() { - ProxyResourceBase.validateNotInternalURL(URI.create("http://169.254.1.1:8080/test")); + ProxiedGraph.validateNotInternalURL(URI.create("http://169.254.1.1:8080/test")); } @Test(expected = ForbiddenException.class) public void testPrivateClass10Blocked() { - ProxyResourceBase.validateNotInternalURL(URI.create("http://10.0.0.1:8080/test")); + ProxiedGraph.validateNotInternalURL(URI.create("http://10.0.0.1:8080/test")); } @Test(expected = ForbiddenException.class) public void testPrivateClass172Blocked() { - ProxyResourceBase.validateNotInternalURL(URI.create("http://172.16.0.0:8080/test")); + ProxiedGraph.validateNotInternalURL(URI.create("http://172.16.0.0:8080/test")); } @Test(expected = ForbiddenException.class) public void testPrivateClass192Blocked() { - ProxyResourceBase.validateNotInternalURL(URI.create("http://192.168.1.1:8080/test")); + ProxiedGraph.validateNotInternalURL(URI.create("http://192.168.1.1:8080/test")); } @Test public void testExternalURLAllowed() { // Public IPs should be allowed (no exception thrown) - ProxyResourceBase.validateNotInternalURL(URI.create("http://8.8.8.8:80/test")); + ProxiedGraph.validateNotInternalURL(URI.create("http://8.8.8.8:80/test")); } @Test public void testPublicDomainAllowed() { // Public domains should be allowed (no exception thrown) - ProxyResourceBase.validateNotInternalURL(URI.create("http://example.org/test")); + ProxiedGraph.validateNotInternalURL(URI.create("http://example.org/test")); } @Test public void testHTTPSAllowed() { // HTTPS to public domain should be allowed (no exception thrown) - ProxyResourceBase.validateNotInternalURL(URI.create("https://www.w3.org/ns/ldp")); + ProxiedGraph.validateNotInternalURL(URI.create("https://www.w3.org/ns/ldp")); } } From a5be9f5eb8ec0084000a4ddd1d6dfc0bf5fa995b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martynas=20Jusevi=C4=8Dius?= Date: Tue, 13 Jan 2026 14:40:44 +0100 Subject: [PATCH 2/7] `SignUp` endpoint fix --- .../linkeddatahub/resource/admin/SignUp.java | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/admin/SignUp.java b/src/main/java/com/atomgraph/linkeddatahub/resource/admin/SignUp.java index 1f53e05bf..bb161a509 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/resource/admin/SignUp.java +++ b/src/main/java/com/atomgraph/linkeddatahub/resource/admin/SignUp.java @@ -64,10 +64,7 @@ import jakarta.inject.Inject; import jakarta.mail.MessagingException; import jakarta.servlet.ServletConfig; -import jakarta.ws.rs.DefaultValue; -import jakarta.ws.rs.GET; import jakarta.ws.rs.POST; -import jakarta.ws.rs.QueryParam; import jakarta.ws.rs.InternalServerErrorException; import jakarta.ws.rs.core.MediaType; import jakarta.ws.rs.core.Response; @@ -180,16 +177,9 @@ public SignUp(@Context Request request, @Context UriInfo uriInfo, MediaTypes med download = uriInfo.getQueryParameters().containsKey("download"); // debug param that allows downloading the certificate } - @GET - @Override - public Response get(@QueryParam("default") @DefaultValue("false") Boolean defaultGraph, @QueryParam("graph") URI graphUri) - { - return super.get(false, getURI()); - } - @POST @Override - public Response post(Model agentModel, @QueryParam("default") @DefaultValue("false") Boolean defaultGraph, @QueryParam("graph") URI graphUri) + public Response post(Model agentModel) { URI agentGraphUri = getUriInfo().getBaseUriBuilder().path(AGENT_PATH).path("{slug}/").build(UUID.randomUUID().toString()); new Skolemizer(agentGraphUri.toString()).apply(agentModel); From fa6a47882230f08b4a15701a2d3a3b10023c4512 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martynas=20Jusevi=C4=8Dius?= Date: Tue, 13 Jan 2026 15:53:55 +0100 Subject: [PATCH 3/7] Cleaned up REST method annotations --- .../atomgraph/linkeddatahub/resource/Add.java | 16 ++--------- .../linkeddatahub/resource/Generate.java | 28 ++++--------------- .../resource/acl/AccessRequest.java | 12 ++------ .../linkeddatahub/resource/upload/Item.java | 12 ++------ 4 files changed, 14 insertions(+), 54 deletions(-) diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/Add.java b/src/main/java/com/atomgraph/linkeddatahub/resource/Add.java index 0a12c036d..9bc6b93ad 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/resource/Add.java +++ b/src/main/java/com/atomgraph/linkeddatahub/resource/Add.java @@ -19,7 +19,6 @@ import com.atomgraph.core.MediaTypes; import com.atomgraph.core.vocabulary.SD; import com.atomgraph.linkeddatahub.client.GraphStoreClient; -import com.atomgraph.linkeddatahub.model.Service; import com.atomgraph.linkeddatahub.server.security.AgentContext; import java.io.InputStream; import java.io.OutputStream; @@ -27,18 +26,14 @@ import java.util.Optional; import jakarta.inject.Inject; import jakarta.ws.rs.BadRequestException; -import jakarta.ws.rs.DefaultValue; import jakarta.ws.rs.POST; -import jakarta.ws.rs.QueryParam; import jakarta.ws.rs.client.Entity; import jakarta.ws.rs.core.Context; import jakarta.ws.rs.core.Request; import jakarta.ws.rs.core.Response; -import jakarta.ws.rs.core.SecurityContext; import jakarta.ws.rs.core.StreamingOutput; import jakarta.ws.rs.core.UriInfo; import jakarta.ws.rs.ext.Providers; -import org.apache.jena.ontology.Ontology; import org.apache.jena.rdf.model.Model; import org.apache.jena.rdf.model.ResIterator; import org.apache.jena.rdf.model.Resource; @@ -67,18 +62,13 @@ public class Add * @param request current request * @param uriInfo current URI info * @param mediaTypes supported media types - * @param application matched application - * @param ontology matched application's ontology - * @param service matched application's service * @param providers JAX-RS providers * @param system system application - * @param securityContext JAX-RS security context * @param agentContext authenticated agent's context */ @Inject public Add(@Context Request request, @Context UriInfo uriInfo, MediaTypes mediaTypes, - com.atomgraph.linkeddatahub.apps.model.Application application, Optional ontology, Optional service, - @Context SecurityContext securityContext, Optional agentContext, + Optional agentContext, @Context Providers providers, com.atomgraph.linkeddatahub.Application system) { this.uriInfo = uriInfo; @@ -92,12 +82,10 @@ public Add(@Context Request request, @Context UriInfo uriInfo, MediaTypes mediaT * Expects a model containing a resource with dct:source (source URI) and sd:name (target graph URI) properties. * * @param model the RDF model containing the import parameters - * @param defaultGraph whether to import into the default graph - * @param graphUri the target graph URI * @return JAX-RS response with the imported data */ @POST - public Response post(Model model, @QueryParam("default") @DefaultValue("false") Boolean defaultGraph, @QueryParam("graph") URI graphUri) + public Response post(Model model) { ResIterator it = model.listSubjectsWithProperty(DCTerms.source); try diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/Generate.java b/src/main/java/com/atomgraph/linkeddatahub/resource/Generate.java index c4e2eb83c..e7d10a50c 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/resource/Generate.java +++ b/src/main/java/com/atomgraph/linkeddatahub/resource/Generate.java @@ -16,12 +16,10 @@ */ package com.atomgraph.linkeddatahub.resource; -import com.atomgraph.client.util.DataManager; import com.atomgraph.core.MediaTypes; import com.atomgraph.linkeddatahub.apps.model.Application; import com.atomgraph.linkeddatahub.client.GraphStoreClient; import com.atomgraph.linkeddatahub.imports.QueryLoader; -import com.atomgraph.linkeddatahub.model.Service; import com.atomgraph.linkeddatahub.server.security.AgentContext; import com.atomgraph.linkeddatahub.server.util.Skolemizer; import com.atomgraph.linkeddatahub.vocabulary.LDH; @@ -36,20 +34,15 @@ import java.util.UUID; import jakarta.inject.Inject; import jakarta.ws.rs.BadRequestException; -import jakarta.ws.rs.DefaultValue; import jakarta.ws.rs.InternalServerErrorException; import jakarta.ws.rs.POST; -import jakarta.ws.rs.QueryParam; import jakarta.ws.rs.container.ResourceContext; import jakarta.ws.rs.core.Context; import jakarta.ws.rs.core.Request; import jakarta.ws.rs.core.Response; import jakarta.ws.rs.core.Response.Status; -import jakarta.ws.rs.core.SecurityContext; import jakarta.ws.rs.core.UriBuilder; import jakarta.ws.rs.core.UriInfo; -import jakarta.ws.rs.ext.Providers; -import org.apache.jena.ontology.Ontology; import org.apache.jena.query.ParameterizedSparqlString; import org.apache.jena.query.Query; import org.apache.jena.query.Syntax; @@ -86,21 +79,14 @@ public class Generate * @param uriInfo current URI info * @param mediaTypes supported media types * @param application matched application - * @param ontology matched application's ontology - * @param service matched application's service - * @param providers JAX-RS providers * @param system system application - * @param securityContext JAX-RS security context * @param agentContext authenticated agent's context - * @param dataManager RDF data manager * @param resourceContext resource context for creating resources */ @Inject public Generate(@Context Request request, @Context UriInfo uriInfo, MediaTypes mediaTypes, - com.atomgraph.linkeddatahub.apps.model.Application application, Optional ontology, Optional service, - @Context SecurityContext securityContext, Optional agentContext, - @Context Providers providers, com.atomgraph.linkeddatahub.Application system, - DataManager dataManager, @Context ResourceContext resourceContext) + com.atomgraph.linkeddatahub.apps.model.Application application, Optional agentContext, + com.atomgraph.linkeddatahub.Application system, @Context ResourceContext resourceContext) { this.uriInfo = uriInfo; this.mediaTypes = mediaTypes; @@ -112,17 +98,15 @@ public Generate(@Context Request request, @Context UriInfo uriInfo, MediaTypes m /** * Generates containers for given classes. - * Expects a model containing a parent container (sioc:has_parent) and one or more class specifications - * with void:class and spin:query properties. Creates a new container for each class with a view based - * on the provided SPARQL SELECT query. + * Expects a model containing a parent container (sioc:has_parent) and one or more class specifications + * with void:class and spin:query properties. Creates a new container for each class with a view based + * on the provided SPARQL SELECT query. * * @param model the RDF model containing the generation parameters - * @param defaultGraph whether to use the default graph - * @param graphUri the target graph URI * @return JAX-RS response indicating success or failure */ @POST - public Response post(Model model, @QueryParam("default") @DefaultValue("false") Boolean defaultGraph, @QueryParam("graph") URI graphUri) + public Response post(Model model) { ResIterator it = model.listSubjectsWithProperty(SIOC.HAS_PARENT); try diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/acl/AccessRequest.java b/src/main/java/com/atomgraph/linkeddatahub/resource/acl/AccessRequest.java index 8e2a60699..859efa01f 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/resource/acl/AccessRequest.java +++ b/src/main/java/com/atomgraph/linkeddatahub/resource/acl/AccessRequest.java @@ -30,12 +30,10 @@ import com.atomgraph.linkeddatahub.vocabulary.SIOC; import jakarta.inject.Inject; import jakarta.servlet.ServletConfig; -import jakarta.ws.rs.DefaultValue; import jakarta.ws.rs.GET; import jakarta.ws.rs.InternalServerErrorException; import jakarta.ws.rs.NotAllowedException; import jakarta.ws.rs.POST; -import jakarta.ws.rs.QueryParam; import jakarta.ws.rs.core.Context; import jakarta.ws.rs.core.Response; import jakarta.ws.rs.core.UriBuilder; @@ -100,12 +98,10 @@ public AccessRequest(com.atomgraph.linkeddatahub.apps.model.Application applicat /** * Implements the HTTP GET method. * - * @param defaultGraph default graph flag - * @param graphUri graph URI * @return response object */ @GET - public Response get(@QueryParam("default") @DefaultValue("false") Boolean defaultGraph, @QueryParam("graph") URI graphUri) + public Response get() { throw new NotAllowedException("GET is not allowed on this endpoint"); } @@ -114,12 +110,10 @@ public Response get(@QueryParam("default") @DefaultValue("false") Boolean defaul * Implements the HTTP POST method for submitting access requests. * * @param model RDF model with access request data - * @param defaultGraph default graph flag - * @param graphUri graph URI * @return response object */ @POST - public Response post(Model model, @QueryParam("default") @DefaultValue("false") Boolean defaultGraph, @QueryParam("graph") URI graphUri) + public Response post(Model model) { ResIterator it = model.listResourcesWithProperty(RDF.type, ACL.Authorization); try @@ -128,7 +122,7 @@ public Response post(Model model, @QueryParam("default") @DefaultValue("false") { Resource authorization = it.next(); - graphUri = getAuthRequestContainerUriBuilder().path(UUID.randomUUID().toString() + "/").build(); // URI of the new access request graph + URI graphUri = getAuthRequestContainerUriBuilder().path(UUID.randomUUID().toString() + "/").build(); // URI of the new access request graph Model requestModel = ModelFactory.createDefaultModel(); Resource agent = authorization.getPropertyResourceValue(ACL.agent); diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/upload/Item.java b/src/main/java/com/atomgraph/linkeddatahub/resource/upload/Item.java index 9c037b768..3e7ae8d44 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/resource/upload/Item.java +++ b/src/main/java/com/atomgraph/linkeddatahub/resource/upload/Item.java @@ -30,17 +30,14 @@ import com.atomgraph.linkeddatahub.model.Service; import com.atomgraph.linkeddatahub.server.io.FileRangeOutput; import com.atomgraph.linkeddatahub.server.security.AgentContext; -import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.Optional; import jakarta.annotation.PostConstruct; import jakarta.inject.Inject; -import jakarta.ws.rs.DefaultValue; import jakarta.ws.rs.GET; import jakarta.ws.rs.NotAcceptableException; import jakarta.ws.rs.NotFoundException; -import jakarta.ws.rs.QueryParam; import jakarta.ws.rs.WebApplicationException; import jakarta.ws.rs.core.EntityTag; import jakarta.ws.rs.core.HttpHeaders; @@ -113,9 +110,9 @@ public void init() @GET @Override - public Response get(@QueryParam("default") @DefaultValue("false") Boolean defaultGraph, @QueryParam("graph") URI graphUri) + public Response get() { - return getResponseBuilder(getResource().getModel(), graphUri).build(); + return getResponseBuilder(getResource().getModel(), getURI()).build(); } @Override @@ -275,10 +272,7 @@ public jakarta.ws.rs.core.MediaType getMediaType() @Override public List getWritableMediaTypes(Class clazz) { - List list = new ArrayList<>(); - list.add(getMediaType()); - - return list; + return List.of(getMediaType()); } /** From f5e8d2e0f3415837721a13f52148aac64f743814 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martynas=20Jusevi=C4=8Dius?= Date: Tue, 13 Jan 2026 15:55:57 +0100 Subject: [PATCH 4/7] More annotation cleanup --- .../atomgraph/linkeddatahub/resource/acl/Access.java | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/acl/Access.java b/src/main/java/com/atomgraph/linkeddatahub/resource/acl/Access.java index 021523308..f22b7378e 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/resource/acl/Access.java +++ b/src/main/java/com/atomgraph/linkeddatahub/resource/acl/Access.java @@ -18,9 +18,6 @@ import com.atomgraph.client.util.HTMLMediaTypePredicate; import com.atomgraph.core.MediaTypes; -import static com.atomgraph.core.model.SPARQLEndpoint.DEFAULT_GRAPH_URI; -import static com.atomgraph.core.model.SPARQLEndpoint.NAMED_GRAPH_URI; -import static com.atomgraph.core.model.SPARQLEndpoint.QUERY; import com.atomgraph.core.util.ModelUtils; import com.atomgraph.linkeddatahub.apps.model.AdminApplication; import com.atomgraph.linkeddatahub.apps.model.Application; @@ -36,7 +33,6 @@ import jakarta.inject.Inject; import jakarta.ws.rs.BadRequestException; import jakarta.ws.rs.GET; -import jakarta.ws.rs.QueryParam; import jakarta.ws.rs.core.Context; import jakarta.ws.rs.core.EntityTag; import jakarta.ws.rs.core.Request; @@ -47,7 +43,6 @@ import java.util.List; import java.util.Optional; import org.apache.jena.query.ParameterizedSparqlString; -import org.apache.jena.query.Query; import org.apache.jena.query.QuerySolutionMap; import org.apache.jena.query.ResultSetRewindable; import org.apache.jena.rdf.model.Model; @@ -106,14 +101,10 @@ public Access(@Context Request request, @Context UriInfo uriInfo, MediaTypes med /** * Implements the HTTP GET method for retrieving access control information. * - * @param unused SPARQL query parameter (unused) - * @param defaultGraphUris default graph URIs - * @param namedGraphUris named graph URIs * @return response with access control data */ @GET - public Response get(@QueryParam(QUERY) Query unused, - @QueryParam(DEFAULT_GRAPH_URI) List defaultGraphUris, @QueryParam(NAMED_GRAPH_URI) List namedGraphUris) + public Response get() { final Agent agent = getAgentContext().map(AgentContext::getAgent).orElse(null); From f19a9c11c9424b732711161dd923d6aa4f85cf30 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martynas=20Jusevi=C4=8Dius?= Date: Tue, 13 Jan 2026 17:10:39 +0100 Subject: [PATCH 5/7] Moved `Graph` logic into `DirectGraphStoreImpl` --- .../linkeddatahub/resource/Generate.java | 3 +- .../linkeddatahub/resource/Graph.java | 861 ------------------ .../linkeddatahub/resource/upload/Item.java | 3 +- .../model/impl/DirectGraphStoreImpl.java | 801 +++++++++++++++- .../server/model/impl/Dispatcher.java | 3 +- 5 files changed, 804 insertions(+), 867 deletions(-) delete mode 100644 src/main/java/com/atomgraph/linkeddatahub/resource/Graph.java diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/Generate.java b/src/main/java/com/atomgraph/linkeddatahub/resource/Generate.java index e7d10a50c..716289439 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/resource/Generate.java +++ b/src/main/java/com/atomgraph/linkeddatahub/resource/Generate.java @@ -20,6 +20,7 @@ import com.atomgraph.linkeddatahub.apps.model.Application; import com.atomgraph.linkeddatahub.client.GraphStoreClient; import com.atomgraph.linkeddatahub.imports.QueryLoader; +import com.atomgraph.linkeddatahub.server.model.impl.DirectGraphStoreImpl; import com.atomgraph.linkeddatahub.server.security.AgentContext; import com.atomgraph.linkeddatahub.server.util.Skolemizer; import com.atomgraph.linkeddatahub.vocabulary.LDH; @@ -150,7 +151,7 @@ public Response post(Model model) new Skolemizer(containerGraphURI.toString()).apply(containerModel); // append triples directly to the graph store without doing an HTTP request (and thus no ACL check) - try (Response containerResponse = getResourceContext().getResource(Graph.class).post(containerModel, false, containerGraphURI)) + try (Response containerResponse = getResourceContext().getResource(DirectGraphStoreImpl.class).post(containerModel, false, containerGraphURI)) { if (!containerResponse.getStatusInfo().getFamily().equals(Status.Family.SUCCESSFUL)) { diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/Graph.java b/src/main/java/com/atomgraph/linkeddatahub/resource/Graph.java deleted file mode 100644 index 4f6316298..000000000 --- a/src/main/java/com/atomgraph/linkeddatahub/resource/Graph.java +++ /dev/null @@ -1,861 +0,0 @@ -/** - * Copyright 2021 Martynas Jusevičius - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ -package com.atomgraph.linkeddatahub.resource; - -import com.atomgraph.client.util.HTMLMediaTypePredicate; -import com.atomgraph.client.vocabulary.AC; -import com.atomgraph.core.MediaTypes; -import com.atomgraph.core.model.EndpointAccessor; -import com.atomgraph.linkeddatahub.apps.model.EndUserApplication; -import com.atomgraph.linkeddatahub.client.GraphStoreClient; -import com.atomgraph.linkeddatahub.model.CSVImport; -import com.atomgraph.linkeddatahub.model.RDFImport; -import com.atomgraph.linkeddatahub.model.Service; -import com.atomgraph.linkeddatahub.server.io.ValidatingModelProvider; -import com.atomgraph.linkeddatahub.server.model.Patchable; -import com.atomgraph.linkeddatahub.server.model.impl.DirectGraphStoreImpl; -import com.atomgraph.linkeddatahub.server.security.AgentContext; -import com.atomgraph.linkeddatahub.server.util.PatchUpdateVisitor; -import com.atomgraph.linkeddatahub.server.util.Skolemizer; -import com.atomgraph.linkeddatahub.vocabulary.ACL; -import com.atomgraph.linkeddatahub.vocabulary.DH; -import com.atomgraph.linkeddatahub.vocabulary.LDH; -import com.atomgraph.linkeddatahub.vocabulary.NFO; -import com.atomgraph.linkeddatahub.vocabulary.SIOC; -import static com.atomgraph.server.status.UnprocessableEntityStatus.UNPROCESSABLE_ENTITY; -import java.net.URI; -import java.nio.charset.StandardCharsets; -import java.util.Arrays; -import java.util.List; -import java.util.Optional; -import jakarta.inject.Inject; -import jakarta.ws.rs.BadRequestException; -import jakarta.ws.rs.Consumes; -import jakarta.ws.rs.DELETE; -import jakarta.ws.rs.HttpMethod; -import jakarta.ws.rs.InternalServerErrorException; -import jakarta.ws.rs.NotFoundException; -import jakarta.ws.rs.OPTIONS; -import jakarta.ws.rs.PATCH; -import jakarta.ws.rs.POST; -import jakarta.ws.rs.PUT; -import jakarta.ws.rs.WebApplicationException; -import jakarta.ws.rs.core.Context; -import jakarta.ws.rs.core.HttpHeaders; -import jakarta.ws.rs.core.MediaType; -import jakarta.ws.rs.core.Request; -import jakarta.ws.rs.core.Response; -import jakarta.ws.rs.core.Response.ResponseBuilder; -import static jakarta.ws.rs.core.Response.Status.PERMANENT_REDIRECT; -import jakarta.ws.rs.core.SecurityContext; -import jakarta.ws.rs.core.UriInfo; -import jakarta.ws.rs.ext.MessageBodyReader; -import jakarta.ws.rs.ext.Providers; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.net.URISyntaxException; -import java.nio.channels.Channels; -import java.nio.channels.FileChannel; -import java.security.DigestInputStream; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Date; -import java.util.GregorianCalendar; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; -import org.apache.commons.codec.binary.Hex; -import org.apache.commons.lang3.StringUtils; -import org.apache.jena.atlas.RuntimeIOException; -import org.apache.jena.datatypes.xsd.XSDDateTime; -import org.apache.jena.ontology.Ontology; -import org.apache.jena.query.Dataset; -import org.apache.jena.query.DatasetFactory; -import org.apache.jena.rdf.model.Model; -import org.apache.jena.rdf.model.ModelFactory; -import org.apache.jena.rdf.model.RDFNode; -import org.apache.jena.rdf.model.ResIterator; -import org.apache.jena.rdf.model.Resource; -import org.apache.jena.rdf.model.ResourceFactory; -import org.apache.jena.rdf.model.Statement; -import org.apache.jena.rdf.model.StmtIterator; -import org.apache.jena.sparql.modify.request.UpdateDeleteWhere; -import org.apache.jena.sparql.modify.request.UpdateModify; -import org.apache.jena.sparql.vocabulary.FOAF; -import org.apache.jena.update.Update; -import org.apache.jena.update.UpdateAction; -import org.apache.jena.update.UpdateRequest; -import org.apache.jena.util.ResourceUtils; -import org.apache.jena.util.iterator.ExtendedIterator; -import org.apache.jena.vocabulary.DCTerms; -import org.apache.jena.vocabulary.RDF; -import org.glassfish.jersey.media.multipart.FormDataBodyPart; -import org.glassfish.jersey.media.multipart.FormDataMultiPart; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * JAX-RS resource that handles requests to directly-identified named graphs. - * Direct identification is specified in the Graph Store Protocol. - * - * @author {@literal Martynas Jusevičius } - */ -public class Graph extends DirectGraphStoreImpl implements Patchable -{ - - private static final Logger log = LoggerFactory.getLogger(Graph.class); - - private final Set allowedMethods; - - /** - * Constructs resource. - * - * @param request current request - * @param uriInfo URI information of the current request - * @param mediaTypes a registry of readable/writable media types - * @param application current application - * @param ontology ontology of the current application - * @param service SPARQL service of the current application - * @param securityContext JAX-RS security context - * @param agentContext authenticated agent's context - * @param providers JAX-RS provider registry - * @param system system application - */ - @Inject - public Graph(@Context Request request, @Context UriInfo uriInfo, MediaTypes mediaTypes, - com.atomgraph.linkeddatahub.apps.model.Application application, Optional ontology, Optional service, - @Context SecurityContext securityContext, Optional agentContext, - @Context Providers providers, com.atomgraph.linkeddatahub.Application system) - { - super(request, uriInfo, mediaTypes, application, ontology, service, securityContext, agentContext, providers, system); - - URI uri = uriInfo.getAbsolutePath(); - allowedMethods = new HashSet<>(); - allowedMethods.add(HttpMethod.GET); - allowedMethods.add(HttpMethod.POST); - - if (!ownerDocURI.equals(uri) && - !secretaryDocURI.equals(uri)) - allowedMethods.add(HttpMethod.PUT); - - if (!application.getBaseURI().equals(uri) && - !ownerDocURI.equals(uri) && - !secretaryDocURI.equals(uri)) - allowedMethods.add(HttpMethod.DELETE); - } - - @Override - @POST - public Response post(Model model) - { - if (log.isTraceEnabled()) log.trace("POST Graph Store request with RDF payload: {} payload size(): {}", model, model.size()); - - final Model existingModel = getService().getGraphStoreClient().getModel(getURI().toString()); - - ResponseBuilder rb = evaluatePreconditions(existingModel); - if (rb != null) return rb.build(); // preconditions not met - - model.createResource(getURI().toString()). - removeAll(DCTerms.modified). - addLiteral(DCTerms.modified, ResourceFactory.createTypedLiteral(GregorianCalendar.getInstance())); - - // container/item (graph) resource is already skolemized, skolemize the rest of the model - new Skolemizer(getURI().toString()).apply(model); - - // is this implemented correctly? The specification is not very clear. - if (log.isDebugEnabled()) log.debug("POST Model to named graph with URI: {}", getURI()); - // First remove old dct:modified values from the triplestore, then add new data - existingModel.createResource(getURI().toString()).removeAll(DCTerms.modified); - getService().getGraphStoreClient().putModel(getURI().toString(), existingModel.add(model)); // replace entire graph to avoid accumulating dct:modified - Model updatedModel = existingModel.add(model); - - submitImports(model); - - return Response.noContent(). - tag(getInternalResponse(updatedModel, null).getVariantEntityTag()). // entity tag of the updated graph - build(); - } - - @Override - @PUT - // the AuthorizationFilter only allows creating new child URIs for existing containers (i.e. there has to be a .. container already) - public Response put(Model model) - { - if (log.isTraceEnabled()) log.trace("PUT Graph Store request with RDF payload: {} payload size(): {}", model, model.size()); - - if (!getAllowedMethods().contains(HttpMethod.PUT)) - { - if (log.isErrorEnabled()) log.error("Method '{}' is not allowed on document URI <{}>", HttpMethod.PUT, getURI()); - throw new WebApplicationException("Method '" + HttpMethod.PUT + "' is not allowed on document URI <" + getURI() + ">", Response.status(Response.Status.METHOD_NOT_ALLOWED).allow(getAllowedMethods()).build()); - } - - // enforce that request URI always end with a slash - by redirecting to it if doesn't not already - if (!getURI().toString().endsWith("/")) - { - String uriWithSlash = getURI().toString() + "/"; - - if (log.isDebugEnabled()) log.debug("Redirecting document URI <{}> to <{}> in order to enforce trailing a slash", getURI(), uriWithSlash); - - return Response.status(PERMANENT_REDIRECT). - location(URI.create(uriWithSlash)). - build(); - } - if (getURI().getPath().contains("//")) - { - if (log.isDebugEnabled()) log.debug("Rejected document URI <{}> - double slashes are not allowed", getURI()); - throw new BadRequestException("Double slashes not allowed in document URIs"); - } - - new Skolemizer(getURI().toString()).apply(model); - Model existingModel = null; - try - { - existingModel = getService().getGraphStoreClient().getModel(getURI().toString()); - - ResponseBuilder rb = evaluatePreconditions(existingModel); - if (rb != null) return rb.build(); // preconditions not met - } - catch (NotFoundException ex) - { - //if (existingModel == null) existingModel = null; - } - - Resource parent = model.createResource(getURI().resolve("..").toString()); - Resource resource = model.createResource(getURI().toString()). - removeAll(SIOC.HAS_PARENT). - removeAll(SIOC.HAS_CONTAINER); - - if (!getApplication().getBaseURI().equals(getURI())) // don't update Root document's metadata - { - if (resource.hasProperty(RDF.type, DH.Container)) - resource.addProperty(SIOC.HAS_PARENT, parent); - else - resource.addProperty(SIOC.HAS_CONTAINER, parent). - addProperty(RDF.type, DH.Item); // TO-DO: replace with foaf:Document? - } - - if (existingModel == null) // creating new graph and attaching it to the document hierarchy - { - resource.removeAll(DCTerms.created). // remove any client-supplied dct:created values - addLiteral(DCTerms.created, ResourceFactory.createTypedLiteral(GregorianCalendar.getInstance())); - - if (getAgentContext().isPresent()) resource.addProperty(DCTerms.creator, getAgentContext().get().getAgent()). - addProperty(ACL.owner, getAgentContext().get().getAgent()); - - if (log.isDebugEnabled()) log.debug("PUT Model into new named graph with URI: {}", getURI()); - getService().getGraphStoreClient().putModel(getURI().toString(), model); // TO-DO: catch exceptions - - submitImports(model); - - return Response.created(getURI()). - build(); - } - else // updating existing graph - { - // retain metadata from existing document resource - ExtendedIterator it = existingModel.createResource(getURI().toString()).listProperties(DCTerms.created). - andThen(existingModel.createResource(getURI().toString()).listProperties(DCTerms.creator)). - andThen(existingModel.createResource(getURI().toString()).listProperties(ACL.owner)); - try - { - it.forEach(stmt -> model.add(stmt)); - } - finally - { - it.close(); - } - - resource.removeAll(DCTerms.modified). - addLiteral(DCTerms.modified, ResourceFactory.createTypedLiteral(GregorianCalendar.getInstance())); - - if (log.isDebugEnabled()) log.debug("PUT Model into existing named graph with URI: {}", getURI()); - getService().getGraphStoreClient().putModel(getURI().toString(), model); // TO-DO: catch exceptions - - submitImports(model); - - return getInternalResponse(existingModel, null).getResponseBuilder(). - build(); - } - } - - /** - * Implements PATCH method of SPARQL Graph Store Protocol. - * Accepts SPARQL update as the request body which is executed in the context of the specified graph. - * The GRAPH keyword is therefore not allowed in the update string. - * - * @param updateRequest SPARQL update - * @return response response object - */ - @PATCH - @Override - public Response patch(UpdateRequest updateRequest) - { - if (updateRequest == null) throw new BadRequestException("SPARQL update not specified"); - if (log.isDebugEnabled()) log.debug("PATCH request on named graph with URI: {}", getURI()); - if (log.isDebugEnabled()) log.debug("PATCH update string: {}", updateRequest.toString()); - - if (updateRequest.getOperations().size() != 1) - throw new WebApplicationException("Only a single SPARQL Update is supported by PATCH", UNPROCESSABLE_ENTITY.getStatusCode()); // 422 Unprocessable Entity - - Update update = updateRequest.getOperations().get(0); - if (!(update instanceof UpdateModify || update instanceof UpdateDeleteWhere)) - throw new WebApplicationException("Only INSERT/WHERE and DELETE WHERE forms of SPARQL Update are supported by PATCH", UNPROCESSABLE_ENTITY.getStatusCode()); // 422 Unprocessable Entity - - // check for GRAPH keyword which is disallowed - PatchUpdateVisitor visitor = new PatchUpdateVisitor(); - update.visit(visitor); - if (visitor.containsNamedGraph()) - { - if (log.isWarnEnabled()) log.debug("SPARQL update used with PATCH method cannot contain the GRAPH keyword"); - throw new WebApplicationException("SPARQL update used with PATCH method cannot contain the GRAPH keyword", UNPROCESSABLE_ENTITY.getStatusCode()); // 422 Unprocessable Entity - } - // no need to set WITH since we'll be updating model in memory before persisting it - - final Dataset dataset; - final Model existingModel = getService().getGraphStoreClient().getModel(getURI().toString()); - if (existingModel == null) throw new NotFoundException("Named graph with URI <" + getURI() + "> not found"); - - ResponseBuilder rb = evaluatePreconditions(existingModel); - if (rb != null) return rb.build(); // preconditions not met - - Model beforeUpdateModel = ModelFactory.createDefaultModel().add(existingModel); - dataset = DatasetFactory.wrap(existingModel); - UpdateAction.execute(updateRequest, dataset); // update model in memory - - Set changedResources = getChangedResources(beforeUpdateModel, existingModel); - Model changedModel = ModelFactory.createDefaultModel(); - - // collect triples of changed resources into a new model which will be validated - no point validating resources that haven't changed - for (Resource resource : changedResources) - changedModel.add(existingModel.listStatements(resource, null, (RDFNode) null)); - - // if PATCH results in an empty model, treat it as a DELETE request - if (changedModel.isEmpty()) return delete(Boolean.FALSE, getURI()); - - validate(changedModel); // this would normally be done transparently by the ValidatingModelProvider - put(dataset.getDefaultModel(), Boolean.FALSE, getURI()); - - return getInternalResponse(dataset.getDefaultModel(), null).getResponseBuilder(). // entity tag of the updated graph - status(Response.Status.NO_CONTENT). - entity(null). // 'Content-Type' header has to be explicitly unset in ResponseHeadersFilter - header(HttpHeaders.CONTENT_LOCATION, getURI()). - tag(getInternalResponse(dataset.getDefaultModel(), null).getVariantEntityTag()). // TO-DO: optimize! - build(); - } - - /** - * Gets a diff of triples between two models and returns a set of their subject resources. - * - * @param beforeUpdateModel model before the update - * @param afterUpdateModel model after the update - * @return set of changed resources - */ - public Set getChangedResources(Model beforeUpdateModel, Model afterUpdateModel) - { - if (beforeUpdateModel == null) throw new IllegalArgumentException("Model before update cannot be null"); - if (afterUpdateModel == null) throw new IllegalArgumentException("Model after update cannot be null"); - - Model addedTriples = afterUpdateModel.difference(beforeUpdateModel); - Model removedTriples = beforeUpdateModel.difference(afterUpdateModel); - - Set changedResources = new HashSet<>(); - addedTriples.listStatements().forEachRemaining(statement -> { - changedResources.add(statement.getSubject()); - }); - removedTriples.listStatements().forEachRemaining(statement -> { - changedResources.add(statement.getSubject()); - }); - - return changedResources; - } - - /** - * Overrides OPTIONS HTTP header values.Specifies allowed methods. - * - * @return HTTP response - */ - @OPTIONS - public Response options() - { - Response.ResponseBuilder rb = Response.ok(); - - rb.allow(getAllowedMethods()); - - String acceptWritable = StringUtils.join(getWritableMediaTypes(Model.class), ","); - rb.header("Accept-Post", acceptWritable); - - return rb.build(); - } - - /** - * Handles multipart POST - * Files are written to storage before the RDF data is passed to the default POST handler method. - * - * @param multiPart multipart form data - * @return HTTP response - */ - @POST - @Consumes(MediaType.MULTIPART_FORM_DATA) - public Response postMultipart(FormDataMultiPart multiPart) - { - if (log.isDebugEnabled()) log.debug("MultiPart fields: {} body parts: {}", multiPart.getFields(), multiPart.getBodyParts()); - - try - { - Model model = parseModel(multiPart); - validate(model); - if (log.isTraceEnabled()) log.trace("POST Graph Store request with RDF payload: {} payload size(): {}", model, model.size()); - - final boolean existingGraph = getService().getGraphStoreClient().containsModel(getURI().toString()); - if (!existingGraph) throw new NotFoundException("Named graph with URI <" + getURI() + "> not found"); - - new Skolemizer(getURI().toString()).apply(model); // skolemize before writing files (they require absolute URIs) - - int fileCount = writeFiles(model, getFileNameBodyPartMap(multiPart)); - if (log.isDebugEnabled()) log.debug("# of files uploaded: {} ", fileCount); - - if (log.isDebugEnabled()) log.debug("POSTed Model size: {}", model.size()); - return post(model, false, getURI()); // ignore the @QueryParam("graph") value - } - catch (URISyntaxException ex) - { - if (log.isErrorEnabled()) log.error("URI '{}' has syntax error in request with media type: {}", ex.getInput(), multiPart.getMediaType()); - throw new BadRequestException(ex); - } - catch (RuntimeIOException ex) - { - if (log.isErrorEnabled()) log.error("Could not read uploaded file as media type: {}", multiPart.getMediaType()); - throw new BadRequestException(ex); - } - } - - /** - * Handles multipart PUT - * Files are written to storage before the RDF data is passed to the default PUT handler method. - * - * @param multiPart multipart form data - * @return HTTP response - */ - @PUT - @Consumes(MediaType.MULTIPART_FORM_DATA) - public Response putMultipart(FormDataMultiPart multiPart) - { - if (log.isDebugEnabled()) log.debug("MultiPart fields: {} body parts: {}", multiPart.getFields(), multiPart.getBodyParts()); - - try - { - Model model = parseModel(multiPart); - MessageBodyReader reader = getProviders().getMessageBodyReader(Model.class, null, null, com.atomgraph.core.MediaType.APPLICATION_NTRIPLES_TYPE); - if (reader instanceof ValidatingModelProvider validatingModelProvider) model = validatingModelProvider.processRead(model); - if (log.isDebugEnabled()) log.debug("POSTed Model size: {}", model.size()); - - new Skolemizer(getURI().toString()).apply(model); // skolemize before writing files (they require absolute URIs) - - int fileCount = writeFiles(model, getFileNameBodyPartMap(multiPart)); - if (log.isDebugEnabled()) log.debug("# of files uploaded: {} ", fileCount); - - return put(model, false, getURI()); - } - catch (URISyntaxException ex) - { - if (log.isErrorEnabled()) log.error("URI '{}' has syntax error in request with media type: {}", ex.getInput(), multiPart.getMediaType()); - throw new BadRequestException(ex); - } - catch (RuntimeIOException ex) - { - if (log.isErrorEnabled()) log.error("Could not read uploaded file as media type: {}", multiPart.getMediaType()); - throw new BadRequestException(ex); - } - } - - /** - * Implements DELETE method of SPARQL Graph Store Protocol. - * - * @return response - */ - @DELETE - @Override - public Response delete() - { - if (!getAllowedMethods().contains(HttpMethod.DELETE)) - throw new WebApplicationException("Cannot delete document", Response.status(Response.Status.METHOD_NOT_ALLOWED).allow(getAllowedMethods()).build()); - - try - { - Model existingModel = getService().getGraphStoreClient().getModel(getURI().toString()); - - ResponseBuilder rb = evaluatePreconditions(existingModel); - if (rb != null) return rb.build(); // preconditions not met - } - catch (NotFoundException ex) - { - //if (existingModel == null) existingModel = null; - } - - return super.delete(false, getURI()); - } - - /** - * Get internal response object. - * - * @param model RDF model - * @param graphUri graph URI - * @return response - */ - public com.atomgraph.core.model.impl.Response getInternalResponse(Model model, URI graphUri) - { - return new com.atomgraph.core.model.impl.Response(getRequest(), - model, - getLastModified(model, graphUri), - getEntityTag(model), - getWritableMediaTypes(Model.class), - getLanguages(), - getEncodings(), - new HTMLMediaTypePredicate()); - } - - /** - * Get response builder. - * - * @param model RDF model - * @param graphUri graph URI - * @return response builder - */ - @Override - public ResponseBuilder getResponseBuilder(Model model, URI graphUri) - { - return getInternalResponse(model, graphUri).getResponseBuilder(); - } - - /** - * List allowed HTTP methods for the current graph URI. - * Exceptions apply to the application's Root document, owner's WebID document, and secretary's WebID document. - * - * @return list of HTTP methods - */ - public Set getAllowedMethods() - { - return allowedMethods; - } - - /** - * Writes all files from the multipart RDF/POST request body. - * - * @param model model with RDF resources - * @param fileNameBodyPartMap a mapping of request part names and objects - * @return number of written files - */ - public int writeFiles(Model model, Map fileNameBodyPartMap) - { - if (model == null) throw new IllegalArgumentException("Model cannot be null"); - if (fileNameBodyPartMap == null) throw new IllegalArgumentException("Map cannot be null"); - - int count = 0; - ResIterator resIt = model.listResourcesWithProperty(NFO.fileName); - try - { - while (resIt.hasNext()) - { - Resource file = resIt.next(); - String fileName = file.getProperty(NFO.fileName).getString(); - FormDataBodyPart bodyPart = fileNameBodyPartMap.get(fileName); - - if (bodyPart != null) // bodyPart is null if nfo:fileName is a simple input and not a file input - { - // writing files has to go before post() as it can change model (e.g. add body part media type as dct:format) - if (log.isDebugEnabled()) log.debug("Writing FormDataBodyPart with fileName {} to file with URI {}", fileName, file.getURI()); - writeFile(file, bodyPart); - - count++; - } - } - } - finally - { - resIt.close(); - } - - return count; - } - - /** - * Writes a data stream to the upload folder. - * - * @param uri file URI - * @param base application's base URI - * @param is file input stream - * @return file - */ - public File writeFile(URI uri, URI base, InputStream is) - { - return writeFile(uri, base, getSystem().getUploadRoot(), is); - } - - /** - * Writes a data stream to a folder. - * - * @param uri file URI - * @param base application's base URI - * @param uploadRoot destination folder URI - * @param is file input stream - * @return file - */ - public File writeFile(URI uri, URI base, URI uploadRoot, InputStream is) - { - if (uri == null) throw new IllegalArgumentException("File URI cannot be null"); - if (!uri.isAbsolute()) throw new IllegalArgumentException("File URI must be absolute"); - if (base == null) throw new IllegalArgumentException("Base URI cannot be null"); - if (uploadRoot == null) throw new IllegalArgumentException("Upload root URI cannot be null"); - - URI relative = base.relativize(uri); - if (log.isDebugEnabled()) log.debug("Upload folder root URI: {}", uploadRoot); - File file = new File(uploadRoot.resolve(relative)); - - return writeFile(file, is); - } - - /** - * Writes data stream to a file destination. - * - * @param file destination - * @param is input stream - * @return file - */ - public File writeFile(File file, InputStream is) - { - if (file == null) throw new IllegalArgumentException("File cannot be null"); - if (is == null) throw new IllegalArgumentException("File InputStream cannot be null"); - - try (FileOutputStream fos = new FileOutputStream(file)) - { - if (log.isDebugEnabled()) log.debug("Writing input stream: {} to file: {}", is, file); - FileChannel destination = fos.getChannel(); - destination.transferFrom(Channels.newChannel(is), 0, 104857600); - return file; - } - catch (IOException ex) - { - if (log.isErrorEnabled()) log.error("Error writing file: {}", file); - throw new InternalServerErrorException(ex); - } - } - - /** - * Writes the specified part of the multipart request body as file and returns the file. - * File's RDF resource is used to attached metadata about the file, such as format and SHA1 hash sum. - * - * @param resource file's RDF resource - * @param bodyPart file's body part - * @return written file - */ - public File writeFile(Resource resource, FormDataBodyPart bodyPart) - { - if (resource == null) throw new IllegalArgumentException("File Resource cannot be null"); - if (!resource.isURIResource()) throw new IllegalArgumentException("File Resource must have a URI"); - if (bodyPart == null) throw new IllegalArgumentException("FormDataBodyPart cannot be null"); - - try (InputStream is = bodyPart.getEntityAs(InputStream.class); - DigestInputStream dis = new DigestInputStream(is, getMessageDigest())) - { - dis.getMessageDigest().reset(); - File tempFile = File.createTempFile("tmp", null); - try (FileOutputStream fos = new FileOutputStream(tempFile); - FileChannel destination = fos.getChannel()) - { - destination.transferFrom(Channels.newChannel(dis), 0, 104857600); - } - String sha1Hash = Hex.encodeHexString(dis.getMessageDigest().digest()); // BigInteger seems to have an issue when the leading hex digit is 0 - if (log.isDebugEnabled()) log.debug("Wrote file: {} with SHA1 hash: {}", tempFile, sha1Hash); - - resource.addLiteral(FOAF.sha1, sha1Hash); - // user could have specified an explicit media type; otherwise - use the media type that the browser has sent - if (!resource.hasProperty(DCTerms.format)) resource.addProperty(DCTerms.format, com.atomgraph.linkeddatahub.MediaType.toResource(bodyPart.getMediaType())); - - URI sha1Uri = getUploadsUriBuilder().path("{sha1}").build(sha1Hash); - if (log.isDebugEnabled()) log.debug("Renaming resource: {} to SHA1 based URI: {}", resource, sha1Uri); - ResourceUtils.renameResource(resource, sha1Uri.toString()); - - try (FileInputStream fis = new FileInputStream(tempFile)) - { - return writeFile(sha1Uri, getUriInfo().getBaseUri(), fis); - } - } - catch (IOException ex) - { - if (log.isErrorEnabled()) log.error("File I/O error", ex); - throw new InternalServerErrorException(ex); - } - } - - /** - * Submits imports for the given model. - * - * @param model the RDF model - */ - public void submitImports(Model model) - { - if (model == null) throw new IllegalArgumentException("Model cannot be null"); - - ExtendedIterator it = model.listSubjectsWithProperty(RDF.type, LDH.CSVImport). - andThen(model.listSubjectsWithProperty(RDF.type, LDH.RDFImport)). - filterKeep(_import -> { return _import.canAs(CSVImport.class) || _import.canAs(RDFImport.class); }); // canAs(Import.class) would require InfModel - try - { - Service adminService = getApplication().canAs(EndUserApplication.class) ? getApplication().as(EndUserApplication.class).getAdminApplication().getService() : null; - GraphStoreClient gsc = GraphStoreClient.create(getSystem().getImportClient(), getSystem().getMediaTypes()). - delegation(getUriInfo().getBaseUri(), getAgentContext().orElse(null)); - - while (it.hasNext()) - { - Resource _import = it.next(); - - // start the import asynchroniously - if (_import.canAs(CSVImport.class)) - getSystem().submitImport(_import.as(CSVImport.class), getApplication(), getApplication().getService(), adminService, getUriInfo().getBaseUri().toString(), gsc); - if (_import.canAs(RDFImport.class)) - getSystem().submitImport(_import.as(RDFImport.class), getApplication(), getApplication().getService(), adminService, getUriInfo().getBaseUri().toString(), gsc); - } - } - finally - { - it.close(); - } - } - - /** - * Returns the date of last modification of the specified URI resource. - * - * @param model resource model - * @param graphUri resource URI - * @return modification date - */ - @Override - public Date getLastModified(Model model, URI graphUri) - { - if (graphUri == null) return null; - - return getLastModified(model.createResource(graphUri.toString())); - } - - /** - * Returns the date of last modification of the specified resource. - * - * @param resource resource - * @return modification date - */ - public Date getLastModified(Resource resource) - { - if (resource == null) throw new IllegalArgumentException("Resource cannot be null"); - - List dates = new ArrayList<>(); - - StmtIterator createdIt = resource.listProperties(DCTerms.created); - try - { - while (createdIt.hasNext()) - { - Statement stmt = createdIt.next(); - if (stmt.getObject().isLiteral() && stmt.getObject().asLiteral().getValue() instanceof XSDDateTime) - dates.add(((XSDDateTime)stmt.getObject().asLiteral().getValue()).asCalendar().getTime()); - } - } - finally - { - createdIt.close(); - } - - StmtIterator modifiedIt = resource.listProperties(DCTerms.modified); - try - { - while (modifiedIt.hasNext()) - { - Statement stmt = modifiedIt.next(); - if (stmt.getObject().isLiteral() && stmt.getObject().asLiteral().getValue() instanceof XSDDateTime) - dates.add(((XSDDateTime)stmt.getObject().asLiteral().getValue()).asCalendar().getTime()); - } - } - finally - { - modifiedIt.close(); - } - - if (!dates.isEmpty()) return Collections.max(dates); - - return null; - } - - /** - * Gets a list of media types that a writable for a message body class. - * - * @param clazz message body class, normally Dataset.class or Model.class - * @return list of media types - */ - @Override - public List getWritableMediaTypes(Class clazz) - { - // restrict writable MediaTypes to the requested one (usually by RDF export feature) - if (getUriInfo().getQueryParameters().containsKey(AC.accept.getLocalName())) // TO-DO: move to ResourceFilter? - { - String accept = getUriInfo().getQueryParameters().getFirst(AC.accept.getLocalName()); - - MediaType mediaType = MediaType.valueOf(accept).withCharset(StandardCharsets.UTF_8.name()); // set charset=UTF-8 - return Arrays.asList(mediaType); - } - - return super.getWritableMediaTypes(clazz); - } - - /** - * Validates model against SPIN and SHACL constraints. - * - * @param model RDF model - * @return validated model - */ - public Model validate(Model model) - { - MessageBodyReader reader = getProviders().getMessageBodyReader(Model.class, null, null, com.atomgraph.core.MediaType.APPLICATION_NTRIPLES_TYPE); - if (reader instanceof ValidatingModelProvider validatingModelProvider) return validatingModelProvider.processRead(model); - - throw new InternalServerErrorException("Could not obtain ValidatingModelProvider instance"); - } - - /** - * Evaluates the state of the given graph against the request preconditions. - * Checks the last modified data (if any) and calculates an ETag value. - * - * @param model RDF model - * @return {@code jakarta.ws.rs.core.Response.ResponseBuilder} instance. null if preconditions are not met. - */ - public ResponseBuilder evaluatePreconditions(Model model) - { - return getInternalResponse(model, getURI()).evaluatePreconditions(); - } - - /** - * Returns SPARQL endpoint accessor. - * - * @return endpoint accessor - */ - public EndpointAccessor getEndpointAccessor() - { - return getService().getEndpointAccessor(); - } - -} diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/upload/Item.java b/src/main/java/com/atomgraph/linkeddatahub/resource/upload/Item.java index 3e7ae8d44..130a03609 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/resource/upload/Item.java +++ b/src/main/java/com/atomgraph/linkeddatahub/resource/upload/Item.java @@ -29,6 +29,7 @@ import com.atomgraph.core.MediaTypes; import com.atomgraph.linkeddatahub.model.Service; import com.atomgraph.linkeddatahub.server.io.FileRangeOutput; +import com.atomgraph.linkeddatahub.server.model.impl.DirectGraphStoreImpl; import com.atomgraph.linkeddatahub.server.security.AgentContext; import java.util.Collections; import java.util.Date; @@ -58,7 +59,7 @@ * * @author Martynas Jusevičius {@literal } */ -public class Item extends com.atomgraph.linkeddatahub.resource.Graph +public class Item extends DirectGraphStoreImpl { private static final Logger log = LoggerFactory.getLogger(Item.class); diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/DirectGraphStoreImpl.java b/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/DirectGraphStoreImpl.java index 5d5a960e2..2693295d5 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/DirectGraphStoreImpl.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/DirectGraphStoreImpl.java @@ -16,10 +16,27 @@ */ package com.atomgraph.linkeddatahub.server.model.impl; +import com.atomgraph.client.util.HTMLMediaTypePredicate; +import com.atomgraph.client.vocabulary.AC; import com.atomgraph.core.MediaTypes; +import com.atomgraph.core.model.EndpointAccessor; import com.atomgraph.core.riot.lang.RDFPostReader; +import com.atomgraph.linkeddatahub.apps.model.EndUserApplication; +import com.atomgraph.linkeddatahub.client.GraphStoreClient; +import com.atomgraph.linkeddatahub.model.CSVImport; +import com.atomgraph.linkeddatahub.model.RDFImport; import com.atomgraph.linkeddatahub.model.Service; +import com.atomgraph.linkeddatahub.server.io.ValidatingModelProvider; +import com.atomgraph.linkeddatahub.server.model.Patchable; import com.atomgraph.linkeddatahub.server.security.AgentContext; +import com.atomgraph.linkeddatahub.server.util.PatchUpdateVisitor; +import com.atomgraph.linkeddatahub.server.util.Skolemizer; +import com.atomgraph.linkeddatahub.vocabulary.ACL; +import com.atomgraph.linkeddatahub.vocabulary.DH; +import com.atomgraph.linkeddatahub.vocabulary.LDH; +import com.atomgraph.linkeddatahub.vocabulary.NFO; +import com.atomgraph.linkeddatahub.vocabulary.SIOC; +import static com.atomgraph.server.status.UnprocessableEntityStatus.UNPROCESSABLE_ENTITY; import java.net.URI; import java.net.URISyntaxException; import java.security.MessageDigest; @@ -27,19 +44,72 @@ import java.util.Locale; import java.util.Optional; import jakarta.inject.Inject; +import jakarta.ws.rs.BadRequestException; +import jakarta.ws.rs.Consumes; +import jakarta.ws.rs.DELETE; +import jakarta.ws.rs.HttpMethod; import jakarta.ws.rs.InternalServerErrorException; +import jakarta.ws.rs.NotFoundException; +import jakarta.ws.rs.OPTIONS; +import jakarta.ws.rs.PATCH; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.PUT; +import jakarta.ws.rs.WebApplicationException; import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.HttpHeaders; +import jakarta.ws.rs.core.MediaType; import jakarta.ws.rs.core.Request; +import jakarta.ws.rs.core.Response; +import static jakarta.ws.rs.core.Response.Status.PERMANENT_REDIRECT; import jakarta.ws.rs.core.SecurityContext; import jakarta.ws.rs.core.UriBuilder; import jakarta.ws.rs.core.UriInfo; +import jakarta.ws.rs.ext.MessageBodyReader; import jakarta.ws.rs.ext.Providers; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.channels.Channels; +import java.nio.channels.FileChannel; +import java.nio.charset.StandardCharsets; +import java.security.DigestInputStream; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Date; +import java.util.GregorianCalendar; import java.util.HashMap; +import java.util.HashSet; import java.util.Iterator; import java.util.Map; +import java.util.Set; +import org.apache.commons.codec.binary.Hex; +import org.apache.commons.lang3.StringUtils; +import org.apache.jena.atlas.RuntimeIOException; +import org.apache.jena.datatypes.xsd.XSDDateTime; import org.apache.jena.ontology.Ontology; +import org.apache.jena.query.Dataset; +import org.apache.jena.query.DatasetFactory; import org.apache.jena.rdf.model.Model; +import org.apache.jena.rdf.model.ModelFactory; +import org.apache.jena.rdf.model.RDFNode; +import org.apache.jena.rdf.model.ResIterator; +import org.apache.jena.rdf.model.Resource; +import org.apache.jena.rdf.model.ResourceFactory; +import org.apache.jena.rdf.model.Statement; +import org.apache.jena.rdf.model.StmtIterator; +import org.apache.jena.sparql.modify.request.UpdateDeleteWhere; +import org.apache.jena.sparql.modify.request.UpdateModify; +import org.apache.jena.sparql.vocabulary.FOAF; +import org.apache.jena.update.Update; +import org.apache.jena.update.UpdateAction; +import org.apache.jena.update.UpdateRequest; +import org.apache.jena.util.ResourceUtils; +import org.apache.jena.util.iterator.ExtendedIterator; +import org.apache.jena.vocabulary.DCTerms; +import org.apache.jena.vocabulary.RDF; import org.glassfish.jersey.media.multipart.BodyPart; import org.glassfish.jersey.media.multipart.FormDataBodyPart; import org.glassfish.jersey.media.multipart.FormDataMultiPart; @@ -52,7 +122,7 @@ * * @author Martynas Jusevičius {@literal } */ -public abstract class DirectGraphStoreImpl extends com.atomgraph.core.model.impl.DirectGraphStoreImpl +public class DirectGraphStoreImpl extends com.atomgraph.core.model.impl.DirectGraphStoreImpl implements Patchable { private static final Logger log = LoggerFactory.getLogger(DirectGraphStoreImpl.class); @@ -73,7 +143,8 @@ public abstract class DirectGraphStoreImpl extends com.atomgraph.core.model.impl protected final URI ownerDocURI, secretaryDocURI; private final SecurityContext securityContext; private final Optional agentContext; - + private final Set allowedMethods; + /** * Constructs Graph Store. * @@ -116,6 +187,711 @@ public DirectGraphStoreImpl(@Context Request request, @Context UriInfo uriInfo, { throw new InternalServerErrorException(ex); } + + URI uri = uriInfo.getAbsolutePath(); + allowedMethods = new HashSet<>(); + allowedMethods.add(HttpMethod.GET); + allowedMethods.add(HttpMethod.POST); + + if (!ownerDocURI.equals(uri) && + !secretaryDocURI.equals(uri)) + allowedMethods.add(HttpMethod.PUT); + + if (!application.getBaseURI().equals(uri) && + !ownerDocURI.equals(uri) && + !secretaryDocURI.equals(uri)) + allowedMethods.add(HttpMethod.DELETE); + } + + /** + * Implements POST method of SPARQL Graph Store Protocol. + * Adds triples to the existing graph, skolemizes blank nodes, updates modification timestamp, and submits any imports. + * + * @param model RDF model to add to the graph + * @return HTTP response with updated entity tag + */ + @Override + @POST + public Response post(Model model) + { + if (log.isTraceEnabled()) log.trace("POST Graph Store request with RDF payload: {} payload size(): {}", model, model.size()); + + final Model existingModel = getService().getGraphStoreClient().getModel(getURI().toString()); + + Response.ResponseBuilder rb = evaluatePreconditions(existingModel); + if (rb != null) return rb.build(); // preconditions not met + + model.createResource(getURI().toString()). + removeAll(DCTerms.modified). + addLiteral(DCTerms.modified, ResourceFactory.createTypedLiteral(GregorianCalendar.getInstance())); + + // container/item (graph) resource is already skolemized, skolemize the rest of the model + new Skolemizer(getURI().toString()).apply(model); + + // is this implemented correctly? The specification is not very clear. + if (log.isDebugEnabled()) log.debug("POST Model to named graph with URI: {}", getURI()); + // First remove old dct:modified values from the triplestore, then add new data + existingModel.createResource(getURI().toString()).removeAll(DCTerms.modified); + getService().getGraphStoreClient().putModel(getURI().toString(), existingModel.add(model)); // replace entire graph to avoid accumulating dct:modified + Model updatedModel = existingModel.add(model); + + submitImports(model); + + return Response.noContent(). + tag(getInternalResponse(updatedModel, null).getVariantEntityTag()). // entity tag of the updated graph + build(); + } + + /** + * Implements PUT method of SPARQL Graph Store Protocol. + * Creates a new graph or updates an existing one. Enforces trailing slash in URIs, skolemizes blank nodes, + * establishes parent/container relationships, and manages metadata (created, modified, creator, owner timestamps). + * + * @param model RDF model to create or update + * @return HTTP response with 201 Created for new graphs or 200 OK for updates + */ + @Override + @PUT + // the AuthorizationFilter only allows creating new child URIs for existing containers (i.e. there has to be a .. container already) + public Response put(Model model) + { + if (log.isTraceEnabled()) log.trace("PUT Graph Store request with RDF payload: {} payload size(): {}", model, model.size()); + + if (!getAllowedMethods().contains(HttpMethod.PUT)) + { + if (log.isErrorEnabled()) log.error("Method '{}' is not allowed on document URI <{}>", HttpMethod.PUT, getURI()); + throw new WebApplicationException("Method '" + HttpMethod.PUT + "' is not allowed on document URI <" + getURI() + ">", Response.status(Response.Status.METHOD_NOT_ALLOWED).allow(getAllowedMethods()).build()); + } + + // enforce that request URI always end with a slash - by redirecting to it if doesn't not already + if (!getURI().toString().endsWith("/")) + { + String uriWithSlash = getURI().toString() + "/"; + + if (log.isDebugEnabled()) log.debug("Redirecting document URI <{}> to <{}> in order to enforce trailing a slash", getURI(), uriWithSlash); + + return Response.status(PERMANENT_REDIRECT). + location(URI.create(uriWithSlash)). + build(); + } + if (getURI().getPath().contains("//")) + { + if (log.isDebugEnabled()) log.debug("Rejected document URI <{}> - double slashes are not allowed", getURI()); + throw new BadRequestException("Double slashes not allowed in document URIs"); + } + + new Skolemizer(getURI().toString()).apply(model); + Model existingModel = null; + try + { + existingModel = getService().getGraphStoreClient().getModel(getURI().toString()); + + Response.ResponseBuilder rb = evaluatePreconditions(existingModel); + if (rb != null) return rb.build(); // preconditions not met + } + catch (NotFoundException ex) + { + //if (existingModel == null) existingModel = null; + } + + Resource parent = model.createResource(getURI().resolve("..").toString()); + Resource resource = model.createResource(getURI().toString()). + removeAll(SIOC.HAS_PARENT). + removeAll(SIOC.HAS_CONTAINER); + + if (!getApplication().getBaseURI().equals(getURI())) // don't update Root document's metadata + { + if (resource.hasProperty(RDF.type, DH.Container)) + resource.addProperty(SIOC.HAS_PARENT, parent); + else + resource.addProperty(SIOC.HAS_CONTAINER, parent). + addProperty(RDF.type, DH.Item); // TO-DO: replace with foaf:Document? + } + + if (existingModel == null) // creating new graph and attaching it to the document hierarchy + { + resource.removeAll(DCTerms.created). // remove any client-supplied dct:created values + addLiteral(DCTerms.created, ResourceFactory.createTypedLiteral(GregorianCalendar.getInstance())); + + if (getAgentContext().isPresent()) resource.addProperty(DCTerms.creator, getAgentContext().get().getAgent()). + addProperty(ACL.owner, getAgentContext().get().getAgent()); + + if (log.isDebugEnabled()) log.debug("PUT Model into new named graph with URI: {}", getURI()); + getService().getGraphStoreClient().putModel(getURI().toString(), model); // TO-DO: catch exceptions + + submitImports(model); + + return Response.created(getURI()). + build(); + } + else // updating existing graph + { + // retain metadata from existing document resource + ExtendedIterator it = existingModel.createResource(getURI().toString()).listProperties(DCTerms.created). + andThen(existingModel.createResource(getURI().toString()).listProperties(DCTerms.creator)). + andThen(existingModel.createResource(getURI().toString()).listProperties(ACL.owner)); + try + { + it.forEach(stmt -> model.add(stmt)); + } + finally + { + it.close(); + } + + resource.removeAll(DCTerms.modified). + addLiteral(DCTerms.modified, ResourceFactory.createTypedLiteral(GregorianCalendar.getInstance())); + + if (log.isDebugEnabled()) log.debug("PUT Model into existing named graph with URI: {}", getURI()); + getService().getGraphStoreClient().putModel(getURI().toString(), model); // TO-DO: catch exceptions + + submitImports(model); + + return getInternalResponse(existingModel, null).getResponseBuilder(). + build(); + } + } + + /** + * Implements PATCH method of SPARQL Graph Store Protocol. + * Accepts SPARQL update as the request body which is executed in the context of the specified graph. + * The GRAPH keyword is therefore not allowed in the update string. + * + * @param updateRequest SPARQL update + * @return response response object + */ + @PATCH + @Override + public Response patch(UpdateRequest updateRequest) + { + if (updateRequest == null) throw new BadRequestException("SPARQL update not specified"); + if (log.isDebugEnabled()) log.debug("PATCH request on named graph with URI: {}", getURI()); + if (log.isDebugEnabled()) log.debug("PATCH update string: {}", updateRequest.toString()); + + if (updateRequest.getOperations().size() != 1) + throw new WebApplicationException("Only a single SPARQL Update is supported by PATCH", UNPROCESSABLE_ENTITY.getStatusCode()); // 422 Unprocessable Entity + + Update update = updateRequest.getOperations().get(0); + if (!(update instanceof UpdateModify || update instanceof UpdateDeleteWhere)) + throw new WebApplicationException("Only INSERT/WHERE and DELETE WHERE forms of SPARQL Update are supported by PATCH", UNPROCESSABLE_ENTITY.getStatusCode()); // 422 Unprocessable Entity + + // check for GRAPH keyword which is disallowed + PatchUpdateVisitor visitor = new PatchUpdateVisitor(); + update.visit(visitor); + if (visitor.containsNamedGraph()) + { + if (log.isWarnEnabled()) log.debug("SPARQL update used with PATCH method cannot contain the GRAPH keyword"); + throw new WebApplicationException("SPARQL update used with PATCH method cannot contain the GRAPH keyword", UNPROCESSABLE_ENTITY.getStatusCode()); // 422 Unprocessable Entity + } + // no need to set WITH since we'll be updating model in memory before persisting it + + final Dataset dataset; + final Model existingModel = getService().getGraphStoreClient().getModel(getURI().toString()); + if (existingModel == null) throw new NotFoundException("Named graph with URI <" + getURI() + "> not found"); + + Response.ResponseBuilder rb = evaluatePreconditions(existingModel); + if (rb != null) return rb.build(); // preconditions not met + + Model beforeUpdateModel = ModelFactory.createDefaultModel().add(existingModel); + dataset = DatasetFactory.wrap(existingModel); + UpdateAction.execute(updateRequest, dataset); // update model in memory + + Set changedResources = getChangedResources(beforeUpdateModel, existingModel); + Model changedModel = ModelFactory.createDefaultModel(); + + // collect triples of changed resources into a new model which will be validated - no point validating resources that haven't changed + for (Resource resource : changedResources) + changedModel.add(existingModel.listStatements(resource, null, (RDFNode) null)); + + // if PATCH results in an empty model, treat it as a DELETE request + if (changedModel.isEmpty()) return delete(Boolean.FALSE, getURI()); + + validate(changedModel); // this would normally be done transparently by the ValidatingModelProvider + put(dataset.getDefaultModel(), Boolean.FALSE, getURI()); + + return getInternalResponse(dataset.getDefaultModel(), null).getResponseBuilder(). // entity tag of the updated graph + status(Response.Status.NO_CONTENT). + entity(null). // 'Content-Type' header has to be explicitly unset in ResponseHeadersFilter + header(HttpHeaders.CONTENT_LOCATION, getURI()). + tag(getInternalResponse(dataset.getDefaultModel(), null).getVariantEntityTag()). // TO-DO: optimize! + build(); + } + + /** + * Overrides OPTIONS HTTP header values.Specifies allowed methods. + * + * @return HTTP response + */ + @OPTIONS + public Response options() + { + Response.ResponseBuilder rb = Response.ok(); + + rb.allow(getAllowedMethods()); + + String acceptWritable = StringUtils.join(getWritableMediaTypes(Model.class), ","); + rb.header("Accept-Post", acceptWritable); + + return rb.build(); + } + + /** + * Handles multipart POST + * Files are written to storage before the RDF data is passed to the default POST handler method. + * + * @param multiPart multipart form data + * @return HTTP response + */ + @POST + @Consumes(MediaType.MULTIPART_FORM_DATA) + public Response postMultipart(FormDataMultiPart multiPart) + { + if (log.isDebugEnabled()) log.debug("MultiPart fields: {} body parts: {}", multiPart.getFields(), multiPart.getBodyParts()); + + try + { + Model model = parseModel(multiPart); + validate(model); + if (log.isTraceEnabled()) log.trace("POST Graph Store request with RDF payload: {} payload size(): {}", model, model.size()); + + final boolean existingGraph = getService().getGraphStoreClient().containsModel(getURI().toString()); + if (!existingGraph) throw new NotFoundException("Named graph with URI <" + getURI() + "> not found"); + + new Skolemizer(getURI().toString()).apply(model); // skolemize before writing files (they require absolute URIs) + + int fileCount = writeFiles(model, getFileNameBodyPartMap(multiPart)); + if (log.isDebugEnabled()) log.debug("# of files uploaded: {} ", fileCount); + + if (log.isDebugEnabled()) log.debug("POSTed Model size: {}", model.size()); + return post(model, false, getURI()); // ignore the @QueryParam("graph") value + } + catch (URISyntaxException ex) + { + if (log.isErrorEnabled()) log.error("URI '{}' has syntax error in request with media type: {}", ex.getInput(), multiPart.getMediaType()); + throw new BadRequestException(ex); + } + catch (RuntimeIOException ex) + { + if (log.isErrorEnabled()) log.error("Could not read uploaded file as media type: {}", multiPart.getMediaType()); + throw new BadRequestException(ex); + } + } + + /** + * Handles multipart PUT + * Files are written to storage before the RDF data is passed to the default PUT handler method. + * + * @param multiPart multipart form data + * @return HTTP response + */ + @PUT + @Consumes(MediaType.MULTIPART_FORM_DATA) + public Response putMultipart(FormDataMultiPart multiPart) + { + if (log.isDebugEnabled()) log.debug("MultiPart fields: {} body parts: {}", multiPart.getFields(), multiPart.getBodyParts()); + + try + { + Model model = parseModel(multiPart); + MessageBodyReader reader = getProviders().getMessageBodyReader(Model.class, null, null, com.atomgraph.core.MediaType.APPLICATION_NTRIPLES_TYPE); + if (reader instanceof ValidatingModelProvider validatingModelProvider) model = validatingModelProvider.processRead(model); + if (log.isDebugEnabled()) log.debug("POSTed Model size: {}", model.size()); + + new Skolemizer(getURI().toString()).apply(model); // skolemize before writing files (they require absolute URIs) + + int fileCount = writeFiles(model, getFileNameBodyPartMap(multiPart)); + if (log.isDebugEnabled()) log.debug("# of files uploaded: {} ", fileCount); + + return put(model, false, getURI()); + } + catch (URISyntaxException ex) + { + if (log.isErrorEnabled()) log.error("URI '{}' has syntax error in request with media type: {}", ex.getInput(), multiPart.getMediaType()); + throw new BadRequestException(ex); + } + catch (RuntimeIOException ex) + { + if (log.isErrorEnabled()) log.error("Could not read uploaded file as media type: {}", multiPart.getMediaType()); + throw new BadRequestException(ex); + } + } + + /** + * Implements DELETE method of SPARQL Graph Store Protocol. + * + * @return response + */ + @DELETE + @Override + public Response delete() + { + if (!getAllowedMethods().contains(HttpMethod.DELETE)) + throw new WebApplicationException("Cannot delete document", Response.status(Response.Status.METHOD_NOT_ALLOWED).allow(getAllowedMethods()).build()); + + try + { + Model existingModel = getService().getGraphStoreClient().getModel(getURI().toString()); + + Response.ResponseBuilder rb = evaluatePreconditions(existingModel); + if (rb != null) return rb.build(); // preconditions not met + } + catch (NotFoundException ex) + { + //if (existingModel == null) existingModel = null; + } + + return super.delete(false, getURI()); + } + + /** + * Gets a diff of triples between two models and returns a set of their subject resources. + * + * @param beforeUpdateModel model before the update + * @param afterUpdateModel model after the update + * @return set of changed resources + */ + public Set getChangedResources(Model beforeUpdateModel, Model afterUpdateModel) + { + if (beforeUpdateModel == null) throw new IllegalArgumentException("Model before update cannot be null"); + if (afterUpdateModel == null) throw new IllegalArgumentException("Model after update cannot be null"); + + Model addedTriples = afterUpdateModel.difference(beforeUpdateModel); + Model removedTriples = beforeUpdateModel.difference(afterUpdateModel); + + Set changedResources = new HashSet<>(); + addedTriples.listStatements().forEachRemaining(statement -> { + changedResources.add(statement.getSubject()); + }); + removedTriples.listStatements().forEachRemaining(statement -> { + changedResources.add(statement.getSubject()); + }); + + return changedResources; + } + + /** + * Get internal response object. + * + * @param model RDF model + * @param graphUri graph URI + * @return response + */ + public com.atomgraph.core.model.impl.Response getInternalResponse(Model model, URI graphUri) + { + return new com.atomgraph.core.model.impl.Response(getRequest(), + model, + getLastModified(model, graphUri), + getEntityTag(model), + getWritableMediaTypes(Model.class), + getLanguages(), + getEncodings(), + new HTMLMediaTypePredicate()); + } + + /** + * Get response builder. + * + * @param model RDF model + * @param graphUri graph URI + * @return response builder + */ + @Override + public Response.ResponseBuilder getResponseBuilder(Model model, URI graphUri) + { + return getInternalResponse(model, graphUri).getResponseBuilder(); + } + + /** + * Writes all files from the multipart RDF/POST request body. + * + * @param model model with RDF resources + * @param fileNameBodyPartMap a mapping of request part names and objects + * @return number of written files + */ + public int writeFiles(Model model, Map fileNameBodyPartMap) + { + if (model == null) throw new IllegalArgumentException("Model cannot be null"); + if (fileNameBodyPartMap == null) throw new IllegalArgumentException("Map cannot be null"); + + int count = 0; + ResIterator resIt = model.listResourcesWithProperty(NFO.fileName); + try + { + while (resIt.hasNext()) + { + Resource file = resIt.next(); + String fileName = file.getProperty(NFO.fileName).getString(); + FormDataBodyPart bodyPart = fileNameBodyPartMap.get(fileName); + + if (bodyPart != null) // bodyPart is null if nfo:fileName is a simple input and not a file input + { + // writing files has to go before post() as it can change model (e.g. add body part media type as dct:format) + if (log.isDebugEnabled()) log.debug("Writing FormDataBodyPart with fileName {} to file with URI {}", fileName, file.getURI()); + writeFile(file, bodyPart); + + count++; + } + } + } + finally + { + resIt.close(); + } + + return count; + } + + /** + * Writes a data stream to the upload folder. + * + * @param uri file URI + * @param base application's base URI + * @param is file input stream + * @return file + */ + public File writeFile(URI uri, URI base, InputStream is) + { + return writeFile(uri, base, getSystem().getUploadRoot(), is); + } + + /** + * Writes a data stream to a folder. + * + * @param uri file URI + * @param base application's base URI + * @param uploadRoot destination folder URI + * @param is file input stream + * @return file + */ + public File writeFile(URI uri, URI base, URI uploadRoot, InputStream is) + { + if (uri == null) throw new IllegalArgumentException("File URI cannot be null"); + if (!uri.isAbsolute()) throw new IllegalArgumentException("File URI must be absolute"); + if (base == null) throw new IllegalArgumentException("Base URI cannot be null"); + if (uploadRoot == null) throw new IllegalArgumentException("Upload root URI cannot be null"); + + URI relative = base.relativize(uri); + if (log.isDebugEnabled()) log.debug("Upload folder root URI: {}", uploadRoot); + File file = new File(uploadRoot.resolve(relative)); + + return writeFile(file, is); + } + + /** + * Writes data stream to a file destination. + * + * @param file destination + * @param is input stream + * @return file + */ + public File writeFile(File file, InputStream is) + { + if (file == null) throw new IllegalArgumentException("File cannot be null"); + if (is == null) throw new IllegalArgumentException("File InputStream cannot be null"); + + try (FileOutputStream fos = new FileOutputStream(file)) + { + if (log.isDebugEnabled()) log.debug("Writing input stream: {} to file: {}", is, file); + FileChannel destination = fos.getChannel(); + destination.transferFrom(Channels.newChannel(is), 0, 104857600); + return file; + } + catch (IOException ex) + { + if (log.isErrorEnabled()) log.error("Error writing file: {}", file); + throw new InternalServerErrorException(ex); + } + } + + /** + * Writes the specified part of the multipart request body as file and returns the file. + * File's RDF resource is used to attached metadata about the file, such as format and SHA1 hash sum. + * + * @param resource file's RDF resource + * @param bodyPart file's body part + * @return written file + */ + public File writeFile(Resource resource, FormDataBodyPart bodyPart) + { + if (resource == null) throw new IllegalArgumentException("File Resource cannot be null"); + if (!resource.isURIResource()) throw new IllegalArgumentException("File Resource must have a URI"); + if (bodyPart == null) throw new IllegalArgumentException("FormDataBodyPart cannot be null"); + + try (InputStream is = bodyPart.getEntityAs(InputStream.class); + DigestInputStream dis = new DigestInputStream(is, getMessageDigest())) + { + dis.getMessageDigest().reset(); + File tempFile = File.createTempFile("tmp", null); + try (FileOutputStream fos = new FileOutputStream(tempFile); + FileChannel destination = fos.getChannel()) + { + destination.transferFrom(Channels.newChannel(dis), 0, 104857600); + } + String sha1Hash = Hex.encodeHexString(dis.getMessageDigest().digest()); // BigInteger seems to have an issue when the leading hex digit is 0 + if (log.isDebugEnabled()) log.debug("Wrote file: {} with SHA1 hash: {}", tempFile, sha1Hash); + + resource.addLiteral(FOAF.sha1, sha1Hash); + // user could have specified an explicit media type; otherwise - use the media type that the browser has sent + if (!resource.hasProperty(DCTerms.format)) resource.addProperty(DCTerms.format, com.atomgraph.linkeddatahub.MediaType.toResource(bodyPart.getMediaType())); + + URI sha1Uri = getUploadsUriBuilder().path("{sha1}").build(sha1Hash); + if (log.isDebugEnabled()) log.debug("Renaming resource: {} to SHA1 based URI: {}", resource, sha1Uri); + ResourceUtils.renameResource(resource, sha1Uri.toString()); + + try (FileInputStream fis = new FileInputStream(tempFile)) + { + return writeFile(sha1Uri, getUriInfo().getBaseUri(), fis); + } + } + catch (IOException ex) + { + if (log.isErrorEnabled()) log.error("File I/O error", ex); + throw new InternalServerErrorException(ex); + } + } + + /** + * Submits imports for the given model. + * + * @param model the RDF model + */ + public void submitImports(Model model) + { + if (model == null) throw new IllegalArgumentException("Model cannot be null"); + + ExtendedIterator it = model.listSubjectsWithProperty(RDF.type, LDH.CSVImport). + andThen(model.listSubjectsWithProperty(RDF.type, LDH.RDFImport)). + filterKeep(_import -> { return _import.canAs(CSVImport.class) || _import.canAs(RDFImport.class); }); // canAs(Import.class) would require InfModel + try + { + Service adminService = getApplication().canAs(EndUserApplication.class) ? getApplication().as(EndUserApplication.class).getAdminApplication().getService() : null; + GraphStoreClient gsc = GraphStoreClient.create(getSystem().getImportClient(), getSystem().getMediaTypes()). + delegation(getUriInfo().getBaseUri(), getAgentContext().orElse(null)); + + while (it.hasNext()) + { + Resource _import = it.next(); + + // start the import asynchroniously + if (_import.canAs(CSVImport.class)) + getSystem().submitImport(_import.as(CSVImport.class), getApplication(), getApplication().getService(), adminService, getUriInfo().getBaseUri().toString(), gsc); + if (_import.canAs(RDFImport.class)) + getSystem().submitImport(_import.as(RDFImport.class), getApplication(), getApplication().getService(), adminService, getUriInfo().getBaseUri().toString(), gsc); + } + } + finally + { + it.close(); + } + } + + /** + * Returns the date of last modification of the specified URI resource. + * + * @param model resource model + * @param graphUri resource URI + * @return modification date + */ + @Override + public Date getLastModified(Model model, URI graphUri) + { + if (graphUri == null) return null; + + return getLastModified(model.createResource(graphUri.toString())); + } + + /** + * Returns the date of last modification of the specified resource. + * + * @param resource resource + * @return modification date + */ + public Date getLastModified(Resource resource) + { + if (resource == null) throw new IllegalArgumentException("Resource cannot be null"); + + List dates = new ArrayList<>(); + + StmtIterator createdIt = resource.listProperties(DCTerms.created); + try + { + while (createdIt.hasNext()) + { + Statement stmt = createdIt.next(); + if (stmt.getObject().isLiteral() && stmt.getObject().asLiteral().getValue() instanceof XSDDateTime) + dates.add(((XSDDateTime)stmt.getObject().asLiteral().getValue()).asCalendar().getTime()); + } + } + finally + { + createdIt.close(); + } + + StmtIterator modifiedIt = resource.listProperties(DCTerms.modified); + try + { + while (modifiedIt.hasNext()) + { + Statement stmt = modifiedIt.next(); + if (stmt.getObject().isLiteral() && stmt.getObject().asLiteral().getValue() instanceof XSDDateTime) + dates.add(((XSDDateTime)stmt.getObject().asLiteral().getValue()).asCalendar().getTime()); + } + } + finally + { + modifiedIt.close(); + } + + if (!dates.isEmpty()) return Collections.max(dates); + + return null; + } + + /** + * Gets a list of media types that a writable for a message body class. + * + * @param clazz message body class, normally Dataset.class or Model.class + * @return list of media types + */ + @Override + public List getWritableMediaTypes(Class clazz) + { + // restrict writable MediaTypes to the requested one (usually by RDF export feature) + if (getUriInfo().getQueryParameters().containsKey(AC.accept.getLocalName())) // TO-DO: move to ResourceFilter? + { + String accept = getUriInfo().getQueryParameters().getFirst(AC.accept.getLocalName()); + + MediaType mediaType = MediaType.valueOf(accept).withCharset(StandardCharsets.UTF_8.name()); // set charset=UTF-8 + return Arrays.asList(mediaType); + } + + return super.getWritableMediaTypes(clazz); + } + + /** + * Validates model against SPIN and SHACL constraints. + * + * @param model RDF model + * @return validated model + */ + public Model validate(Model model) + { + MessageBodyReader reader = getProviders().getMessageBodyReader(Model.class, null, null, com.atomgraph.core.MediaType.APPLICATION_NTRIPLES_TYPE); + if (reader instanceof ValidatingModelProvider validatingModelProvider) return validatingModelProvider.processRead(model); + + throw new InternalServerErrorException("Could not obtain ValidatingModelProvider instance"); + } + + /** + * Evaluates the state of the given graph against the request preconditions. + * Checks the last modified data (if any) and calculates an ETag value. + * + * @param model RDF model + * @return {@code jakarta.ws.rs.core.Response.ResponseBuilder} instance. null if preconditions are not met. + */ + public Response.ResponseBuilder evaluatePreconditions(Model model) + { + return getInternalResponse(model, getURI()).evaluatePreconditions(); } /** @@ -186,6 +962,27 @@ public Map getFileNameBodyPartMap(FormDataMultiPart mu return fileNameBodyPartMap; } + /** + * List allowed HTTP methods for the current graph URI. + * Exceptions apply to the application's Root document, owner's WebID document, and secretary's WebID document. + * + * @return list of HTTP methods + */ + public Set getAllowedMethods() + { + return allowedMethods; + } + + /** + * Returns SPARQL endpoint accessor. + * + * @return endpoint accessor + */ + public EndpointAccessor getEndpointAccessor() + { + return getService().getEndpointAccessor(); + } + /** * Returns a list of supported languages. * diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/Dispatcher.java b/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/Dispatcher.java index 76a0a0026..c9e1eb5de 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/Dispatcher.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/Dispatcher.java @@ -27,7 +27,6 @@ import com.atomgraph.linkeddatahub.resource.admin.pkg.UninstallPackage; import com.atomgraph.linkeddatahub.resource.Settings; import com.atomgraph.linkeddatahub.resource.admin.SignUp; -import com.atomgraph.linkeddatahub.resource.Graph; import com.atomgraph.linkeddatahub.resource.acl.Access; import com.atomgraph.linkeddatahub.resource.acl.AccessRequest; import java.util.Optional; @@ -269,7 +268,7 @@ public Class getSettingsEndpoint() */ public Class getDocumentClass() { - return Graph.class; + return DirectGraphStoreImpl.class; } /** From af3dc6ae724d661bdf5fb107b7e5710f043112b9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martynas=20Jusevi=C4=8Dius?= Date: Tue, 13 Jan 2026 19:57:02 +0100 Subject: [PATCH 6/7] `Item` endpoint fix --- .../linkeddatahub/resource/Transform.java | 143 ++++++++++-- .../linkeddatahub/resource/upload/Item.java | 208 ++++++++++++------ .../resource/upload/sha1/Item.java | 98 --------- .../server/model/impl/Dispatcher.java | 2 +- 4 files changed, 266 insertions(+), 185 deletions(-) delete mode 100644 src/main/java/com/atomgraph/linkeddatahub/resource/upload/sha1/Item.java diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/Transform.java b/src/main/java/com/atomgraph/linkeddatahub/resource/Transform.java index 659450e2d..19366b18c 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/resource/Transform.java +++ b/src/main/java/com/atomgraph/linkeddatahub/resource/Transform.java @@ -16,12 +16,10 @@ */ package com.atomgraph.linkeddatahub.resource; -import com.atomgraph.client.util.DataManager; import com.atomgraph.core.MediaTypes; import com.atomgraph.core.vocabulary.SD; import com.atomgraph.linkeddatahub.client.GraphStoreClient; import com.atomgraph.linkeddatahub.imports.QueryLoader; -import com.atomgraph.linkeddatahub.model.Service; import com.atomgraph.linkeddatahub.server.io.ValidatingModelProvider; import com.atomgraph.linkeddatahub.server.model.impl.DirectGraphStoreImpl; import com.atomgraph.linkeddatahub.server.security.AgentContext; @@ -36,20 +34,19 @@ import jakarta.inject.Inject; import jakarta.ws.rs.BadRequestException; import jakarta.ws.rs.Consumes; -import jakarta.ws.rs.DefaultValue; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.NotAllowedException; import jakarta.ws.rs.POST; -import jakarta.ws.rs.QueryParam; import jakarta.ws.rs.client.Entity; +import jakarta.ws.rs.container.ResourceContext; import jakarta.ws.rs.core.Context; import jakarta.ws.rs.core.MediaType; import jakarta.ws.rs.core.Request; import jakarta.ws.rs.core.Response; -import jakarta.ws.rs.core.SecurityContext; import jakarta.ws.rs.core.UriInfo; import jakarta.ws.rs.ext.MessageBodyReader; import jakarta.ws.rs.ext.Providers; import org.apache.jena.atlas.RuntimeIOException; -import org.apache.jena.ontology.Ontology; import org.apache.jena.query.Query; import org.apache.jena.query.QueryExecution; import org.apache.jena.query.Syntax; @@ -67,39 +64,69 @@ * * @author {@literal Martynas Jusevičius } */ -public class Transform extends DirectGraphStoreImpl +public class Transform { private static final Logger log = LoggerFactory.getLogger(Transform.class); + private final UriInfo uriInfo; + private final MediaTypes mediaTypes; + private final com.atomgraph.linkeddatahub.apps.model.Application application; + private final Optional agentContext; + private final Providers providers; + private final com.atomgraph.linkeddatahub.Application system; + private final ResourceContext resourceContext; + /** * Constructs endpoint for synchronous RDF data imports. * * @param request current request * @param uriInfo current URI info * @param mediaTypes supported media types - * @param application matched application - * @param ontology matched application's ontology - * @param service matched application's service + * @param application current application * @param providers JAX-RS providers * @param system system application - * @param securityContext JAX-RS security context * @param agentContext authenticated agent's context - * @param dataManager RDF data manager + * @param resourceContext resource context */ @Inject public Transform(@Context Request request, @Context UriInfo uriInfo, MediaTypes mediaTypes, - com.atomgraph.linkeddatahub.apps.model.Application application, Optional ontology, Optional service, - @Context SecurityContext securityContext, Optional agentContext, + com.atomgraph.linkeddatahub.apps.model.Application application, + Optional agentContext, @Context Providers providers, com.atomgraph.linkeddatahub.Application system, - DataManager dataManager) + @Context ResourceContext resourceContext) + { + this.uriInfo = uriInfo; + this.mediaTypes = mediaTypes; + this.application = application; + this.agentContext = agentContext; + this.providers = providers; + this.system = system; + this.resourceContext = resourceContext; + } + + /** + * Rejects GET requests on this endpoint. + * + * @return never returns normally + * @throws NotAllowedException always thrown to indicate GET is not supported + */ + @GET + public Response get() { - super(request, uriInfo, mediaTypes, application, ontology, service, securityContext, agentContext, providers, system); + throw new NotAllowedException("GET is not allowed on this endpoint"); } + /** + * Transforms RDF data from a remote source using a SPARQL CONSTRUCT query and adds it to a target graph. + * Validates URIs to prevent SSRF attacks before processing. + * + * @param model RDF model containing transformation parameters (dct:source, sd:name, spin:query) + * @return HTTP response from forwarding the transformed data to the target graph + * @throws BadRequestException if required parameters are missing or invalid + */ @POST - @Override - public Response post(Model model, @QueryParam("default") @DefaultValue("false") Boolean defaultGraph, @QueryParam("graph") URI graphUri) + public Response post(Model model) { ResIterator it = model.listSubjectsWithProperty(DCTerms.source); try @@ -145,24 +172,24 @@ public Response post(Model model, @QueryParam("default") @DefaultValue("false") * Handles multipart requests with RDF files. * * @param multiPart multipart request object - * @param defaultGraph true if default graph was specified - * @param graphUri graph name * @return response */ @POST @Consumes(MediaType.MULTIPART_FORM_DATA) - public Response postMultipart(FormDataMultiPart multiPart, @QueryParam("default") @DefaultValue("false") Boolean defaultGraph, @QueryParam("graph") URI graphUri) + public Response postMultipart(FormDataMultiPart multiPart) { if (log.isDebugEnabled()) log.debug("MultiPart fields: {} body parts: {}", multiPart.getFields(), multiPart.getBodyParts()); try { - Model model = parseModel(multiPart); // do not skolemize because we don't know the graphUri yet + DirectGraphStoreImpl graphStore = getResourceContext().getResource(DirectGraphStoreImpl.class); + + Model model = graphStore.parseModel(multiPart); // do not skolemize because we don't know the graphUri yet MessageBodyReader reader = getProviders().getMessageBodyReader(Model.class, null, null, com.atomgraph.core.MediaType.APPLICATION_NTRIPLES_TYPE); if (reader instanceof ValidatingModelProvider validatingModelProvider) model = validatingModelProvider.processRead(model); if (log.isDebugEnabled()) log.debug("POSTed Model size: {}", model.size()); - return postFileBodyPart(model, getFileNameBodyPartMap(multiPart)); // do not write the uploaded file -- instead append its triples/quads + return postFileBodyPart(model, graphStore.getFileNameBodyPartMap(multiPart)); // do not write the uploaded file -- instead append its triples/quads } catch (URISyntaxException ex) { @@ -287,4 +314,74 @@ protected static void validateNotInternalURL(URI uri) } } + /** + * Returns the supported media types. + * + * @return media types + */ + public MediaTypes getMediaTypes() + { + return mediaTypes; + } + + /** + * Returns the current application. + * + * @return application resource + */ + public com.atomgraph.linkeddatahub.apps.model.Application getApplication() + { + return application; + } + + /** + * Returns the current URI info. + * + * @return URI info + */ + public UriInfo getUriInfo() + { + return uriInfo; + } + + /** + * Returns the authenticated agent's context. + * + * @return optional agent context + */ + public Optional getAgentContext() + { + return agentContext; + } + + /** + * Returns the registry of JAX-RS providers. + * + * @return JAX-RS providers registry + */ + public Providers getProviders() + { + return providers; + } + + /** + * Returns the system application. + * + * @return system application + */ + public com.atomgraph.linkeddatahub.Application getSystem() + { + return system; + } + + /** + * Returns the JAX-RS resource context. + * + * @return resource context + */ + public ResourceContext getResourceContext() + { + return resourceContext; + } + } diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/upload/Item.java b/src/main/java/com/atomgraph/linkeddatahub/resource/upload/Item.java index 130a03609..90fae8a00 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/resource/upload/Item.java +++ b/src/main/java/com/atomgraph/linkeddatahub/resource/upload/Item.java @@ -29,9 +29,6 @@ import com.atomgraph.core.MediaTypes; import com.atomgraph.linkeddatahub.model.Service; import com.atomgraph.linkeddatahub.server.io.FileRangeOutput; -import com.atomgraph.linkeddatahub.server.model.impl.DirectGraphStoreImpl; -import com.atomgraph.linkeddatahub.server.security.AgentContext; -import java.util.Collections; import java.util.Date; import java.util.Optional; import jakarta.annotation.PostConstruct; @@ -43,13 +40,13 @@ import jakarta.ws.rs.core.EntityTag; import jakarta.ws.rs.core.HttpHeaders; import jakarta.ws.rs.core.Response.Status; -import jakarta.ws.rs.core.SecurityContext; import jakarta.ws.rs.core.UriInfo; -import org.apache.jena.ontology.Ontology; +import jakarta.ws.rs.core.Variant.VariantListBuilder; import org.apache.jena.query.QueryFactory; import org.apache.jena.rdf.model.Model; import org.apache.jena.rdf.model.ModelFactory; import org.apache.jena.rdf.model.Resource; +import org.apache.jena.sparql.vocabulary.FOAF; import org.apache.jena.vocabulary.DCTerms; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -59,8 +56,9 @@ * * @author Martynas Jusevičius {@literal } */ -public class Item extends DirectGraphStoreImpl +public class Item { + private static final Logger log = LoggerFactory.getLogger(Item.class); private static final String ACCEPT_RANGES = "Accept-Ranges"; @@ -69,7 +67,11 @@ public class Item extends DirectGraphStoreImpl private static final String CONTENT_RANGE = "Content-Range"; private static final int CHUNK_SIZE = 1024 * 1024; // 1MB chunks + private final Request request; + private final UriInfo uriInfo; + private final Service service; private final Resource resource; + private final com.atomgraph.linkeddatahub.Application system; private final HttpHeaders httpHeaders; /** @@ -78,26 +80,24 @@ public class Item extends DirectGraphStoreImpl * @param request current request * @param uriInfo URI information of the current request * @param mediaTypes a registry of readable/writable media types - * @param application current application - * @param ontology ontology of the current application * @param service SPARQL service of the current application - * @param securityContext JAX-RS security context - * @param agentContext authenticated agent's context * @param providers JAX-RS provider registry * @param system system application * @param httpHeaders request headers */ @Inject public Item(@Context Request request, @Context UriInfo uriInfo, MediaTypes mediaTypes, - com.atomgraph.linkeddatahub.apps.model.Application application, Optional ontology, Optional service, - @Context SecurityContext securityContext, Optional agentContext, + Optional service, @Context Providers providers, com.atomgraph.linkeddatahub.Application system, @Context HttpHeaders httpHeaders) { - super(request, uriInfo, mediaTypes, application, ontology, service, securityContext, agentContext, providers, system); + this.request = request; + this.uriInfo = uriInfo; + this.service = service.get(); this.resource = ModelFactory.createDefaultModel().createResource(uriInfo.getAbsolutePath().toString()); - if (log.isDebugEnabled()) log.debug("Constructing {}", getClass()); + this.system = system; this.httpHeaders = httpHeaders; + if (log.isDebugEnabled()) log.debug("Constructing {}", getClass()); } /** @@ -108,38 +108,52 @@ public void init() { getResource().getModel().add(describe()); } - + + /** + * Handles GET requests for uploaded files. + * Evaluates HTTP preconditions and serves file content with appropriate Content-Security-Policy headers. + * + * @return HTTP response with file content or 304 Not Modified + */ @GET - @Override public Response get() { return getResponseBuilder(getResource().getModel(), getURI()).build(); } - - @Override + + /** + * Builds HTTP response for file requests. + * Handles content negotiation, HTTP precondition evaluation (ETag-based caching), + * byte-range requests, and applies Content-Security-Policy headers. + * + * @param model RDF model describing the file + * @param graphUri the graph URI (not used for binary file responses) + * @return response builder configured for file serving + */ public ResponseBuilder getResponseBuilder(Model model, URI graphUri) { // do not pass language list as languages do not apply to binary files - List variants = com.atomgraph.core.model.impl.Response.getVariants(getWritableMediaTypes(Model.class), Collections.emptyList(), getEncodings()); + List variants = VariantListBuilder.newInstance().mediaTypes(getMediaType()).build(); Variant variant = getRequest().selectVariant(variants); - if (variant == null) + if (variant == null || !getMediaType().isCompatible(variant.getMediaType())) { if (log.isTraceEnabled()) log.trace("Requested Variant {} is not on the list of acceptable Response Variants: {}", variant, variants); throw new NotAcceptableException(); } - // respond with file content if Variant is compatible with the File's MediaType. otherwise, send RDF - if (getMediaType().isCompatible(variant.getMediaType())) - { - URI fileURI = getSystem().getUploadRoot().resolve(getUriInfo().getPath()); - File file = new File(fileURI); + EntityTag entityTag = getEntityTag(); + ResponseBuilder rb = getRequest().evaluatePreconditions(entityTag); + if (rb != null) return rb; // file not modified + + URI fileURI = getSystem().getUploadRoot().resolve(getUriInfo().getPath()); + File file = new File(fileURI); - if (!file.exists()) throw new NotFoundException(new FileNotFoundException("File '" + getUriInfo().getPath() + "' not found")); + if (!file.exists()) throw new NotFoundException(new FileNotFoundException("File '" + getUriInfo().getPath() + "' not found")); + + if (getHttpHeaders().getRequestHeaders().containsKey(RANGE)) + { + String range = getHttpHeaders().getHeaderString(RANGE); - if (getHttpHeaders().getRequestHeaders().containsKey(RANGE)) - { - String range = getHttpHeaders().getHeaderString(RANGE); - // if (getHttpHeaders().getRequestHeaders().containsKey(IF_RANGE)) { // String ifRangeHeader = getHttpHeaders().getHeaderString(IF_RANGE); // @@ -155,34 +169,32 @@ public ResponseBuilder getResponseBuilder(Model model, URI graphUri) //// } // } // else - { - FileRangeOutput rangeOutput = getFileRangeOutput(file, range); - final long to = rangeOutput.getLength() + rangeOutput.getFrom(); - String contentRangeValue = String.format("bytes %d-%d/%d", rangeOutput.getFrom(), to - 1, rangeOutput.getFile().length()); - - return super.getResponseBuilder(model, graphUri). - status(Status.PARTIAL_CONTENT). - entity(rangeOutput). - type(variant.getMediaType()). - lastModified(getLastModified(file)). - header(HttpHeaders.CONTENT_LENGTH, rangeOutput.getLength()). // should override Transfer-Encoding: chunked - header(ACCEPT_RANGES, BYTES_RANGE). - header(CONTENT_RANGE, contentRangeValue). - header("Content-Security-Policy", "default-src 'none'; sandbox"); // LNK-011 fix: prevent XSS in uploaded HTML files - } - } + { + FileRangeOutput rangeOutput = getFileRangeOutput(file, range); + final long to = rangeOutput.getLength() + rangeOutput.getFrom(); + String contentRangeValue = String.format("bytes %d-%d/%d", rangeOutput.getFrom(), to - 1, rangeOutput.getFile().length()); - return super.getResponseBuilder(model, graphUri). - entity(file). - type(variant.getMediaType()). - lastModified(getLastModified(file)). - header(HttpHeaders.CONTENT_LENGTH, file.length()). // should override Transfer-Encoding: chunked - header(ACCEPT_RANGES, BYTES_RANGE). - header("Content-Security-Policy", "default-src 'none'; sandbox"); // LNK-011 fix: prevent XSS in uploaded HTML files - //header("Content-Disposition", "attachment; filename=\"" + getRequiredProperty(NFO.fileName).getString() + "\""). + return Response.status(Status.PARTIAL_CONTENT). + entity(rangeOutput). + type(variant.getMediaType()). + tag(entityTag). + lastModified(getLastModified(file)). + header(HttpHeaders.CONTENT_LENGTH, rangeOutput.getLength()). // should override Transfer-Encoding: chunked + header(ACCEPT_RANGES, BYTES_RANGE). + header(CONTENT_RANGE, contentRangeValue). + header("Content-Security-Policy", "default-src 'none'; sandbox"); // LNK-011 fix: prevent XSS in uploaded HTML files + } } - - return super.getResponseBuilder(model, graphUri); + + return Response.ok(). + entity(file). + type(variant.getMediaType()). + tag(entityTag). + lastModified(getLastModified(file)). + header(HttpHeaders.CONTENT_LENGTH, file.length()). // should override Transfer-Encoding: chunked + header(ACCEPT_RANGES, BYTES_RANGE). + header("Content-Security-Policy", "default-src 'none'; sandbox"); // LNK-011 fix: prevent XSS in uploaded HTML files + //header("Content-Disposition", "attachment; filename=\"" + getRequiredProperty(NFO.fileName).getString() + "\""). } /** @@ -235,11 +247,15 @@ public FileRangeOutput getFileRangeOutput(File file, String range) final long length = to - from; return new FileRangeOutput(file, from, length); } - - @Override - public EntityTag getEntityTag(Model model) + + /** + * Returns the ETag for HTTP caching based on the file's SHA1 hash. + * + * @return entity tag for cache validation + */ + public EntityTag getEntityTag() { - return null; // disable ETag based on Model hash + return new EntityTag(getSHA1Hash(getResource())); } /** @@ -269,8 +285,13 @@ public jakarta.ws.rs.core.MediaType getMediaType() return com.atomgraph.linkeddatahub.MediaType.valueOf(format); } - - @Override + + /** + * Returns the list of media types that can be used to write this file's content. + * + * @param clazz the class type (not used, file has single media type) + * @return list containing the file's media type + */ public List getWritableMediaTypes(Class clazz) { return List.of(getMediaType()); @@ -287,6 +308,57 @@ public Model describe() return getService().getSPARQLClient().loadModel(QueryFactory.create("DESCRIBE <" + getURI() + ">")); } + /** + * Returns SHA1 property value of the specified resource. + * + * @param resource RDF resource + * @return SHA1 hash string + */ + public String getSHA1Hash(Resource resource) + { + return resource.getRequiredProperty(FOAF.sha1).getString(); + } + + /** + * Returns the absolute URI of this file resource. + * + * @return the file's URI + */ + public URI getURI() + { + return getUriInfo().getAbsolutePath(); + } + + /** + * Returns the current JAX-RS request. + * + * @return request object + */ + public Request getRequest() + { + return request; + } + + /** + * Returns the URI information of the current request. + * + * @return URI info + */ + public UriInfo getUriInfo() + { + return uriInfo; + } + + /** + * Returns the SPARQL service of the current application. + * + * @return SPARQL service + */ + public Service getService() + { + return service; + } + /** * Returns RDF resource of this file. * @@ -296,7 +368,17 @@ public Resource getResource() { return resource; } - + + /** + * Returns the system application instance. + * + * @return system application + */ + public com.atomgraph.linkeddatahub.Application getSystem() + { + return system; + } + /** * Returns HTTP headers of the current request. * diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/upload/sha1/Item.java b/src/main/java/com/atomgraph/linkeddatahub/resource/upload/sha1/Item.java deleted file mode 100644 index a0120e520..000000000 --- a/src/main/java/com/atomgraph/linkeddatahub/resource/upload/sha1/Item.java +++ /dev/null @@ -1,98 +0,0 @@ -/** - * Copyright 2019 Martynas Jusevičius - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ -package com.atomgraph.linkeddatahub.resource.upload.sha1; - -import jakarta.ws.rs.core.Context; -import jakarta.ws.rs.core.Request; -import jakarta.ws.rs.ext.Providers; -import com.atomgraph.core.MediaTypes; -import com.atomgraph.linkeddatahub.model.Service; -import com.atomgraph.linkeddatahub.server.security.AgentContext; -import java.io.File; -import java.util.Date; -import java.util.Optional; -import jakarta.inject.Inject; -import jakarta.ws.rs.core.EntityTag; -import jakarta.ws.rs.core.HttpHeaders; -import jakarta.ws.rs.core.SecurityContext; -import jakarta.ws.rs.core.UriInfo; -import org.apache.jena.ontology.Ontology; -import org.apache.jena.rdf.model.Model; -import org.apache.jena.rdf.model.Resource; -import org.apache.jena.sparql.vocabulary.FOAF; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * JAX-RS resource that serves content-addressed (using SHA1 hash) file data. - * - * @author Martynas Jusevičius {@literal } - */ -public class Item extends com.atomgraph.linkeddatahub.resource.upload.Item -{ - private static final Logger log = LoggerFactory.getLogger(Item.class); - - /** - * Constructs resource. - * - * @param request current request - * @param uriInfo URI information of the current request - * @param mediaTypes a registry of readable/writable media types - * @param application current application - * @param ontology ontology of the current application - * @param service SPARQL service of the current application - * @param securityContext JAX-RS security context - * @param agentContext authenticated agent's context - * @param providers JAX-RS provider registry - * @param system system application - * @param httpHeaders request headers - */ - @Inject - public Item(@Context Request request, @Context UriInfo uriInfo, MediaTypes mediaTypes, - com.atomgraph.linkeddatahub.apps.model.Application application, Optional ontology, Optional service, - @Context SecurityContext securityContext, Optional agentContext, - @Context Providers providers, com.atomgraph.linkeddatahub.Application system, - @Context HttpHeaders httpHeaders) - { - super(request, uriInfo, mediaTypes, application, ontology, service, securityContext, agentContext, providers, system, httpHeaders); - if (log.isDebugEnabled()) log.debug("Constructing {}", getClass()); - } - - @Override - protected Date getLastModified(File file) - { - return null; // disable Last-Modified because we're using ETag here - } - - @Override - public EntityTag getEntityTag(Model model) - { - return new EntityTag(getSHA1Hash(getResource())); - } - - /** - * Returns SHA1 property value of the specified resource. - * - * @param resource RDF resource - * @return SHA1 hash string - */ - public String getSHA1Hash(Resource resource) - { - return resource.getRequiredProperty(FOAF.sha1).getString(); - } - -} diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/Dispatcher.java b/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/Dispatcher.java index c9e1eb5de..1ad8f8002 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/Dispatcher.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/Dispatcher.java @@ -180,7 +180,7 @@ public Class getAccessRequest() @Path("uploads/{sha1sum}") public Class getFileItem() { - return getProxyClass().orElse(com.atomgraph.linkeddatahub.resource.upload.sha1.Item.class); + return getProxyClass().orElse(com.atomgraph.linkeddatahub.resource.upload.Item.class); } /** From b66575827a662a3f59e1bcabc8a9c9ff4cb24516 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martynas=20Jusevi=C4=8Dius?= Date: Tue, 13 Jan 2026 21:53:31 +0100 Subject: [PATCH 7/7] `PATCH` fix for empty models --- .../linkeddatahub/server/model/impl/DirectGraphStoreImpl.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/DirectGraphStoreImpl.java b/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/DirectGraphStoreImpl.java index 2693295d5..e5a73fcd7 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/DirectGraphStoreImpl.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/DirectGraphStoreImpl.java @@ -404,7 +404,7 @@ public Response patch(UpdateRequest updateRequest) changedModel.add(existingModel.listStatements(resource, null, (RDFNode) null)); // if PATCH results in an empty model, treat it as a DELETE request - if (changedModel.isEmpty()) return delete(Boolean.FALSE, getURI()); + if (changedModel.isEmpty()) return delete(); validate(changedModel); // this would normally be done transparently by the ValidatingModelProvider put(dataset.getDefaultModel(), Boolean.FALSE, getURI());