├── .dockerignore
├── .github
└── workflows
│ └── http-tests.yml
├── .gitignore
├── Dockerfile
├── LICENSE
├── README.md
├── context.xsl
├── entrypoint.sh
├── examples
├── fuseki
│ ├── dataset.ttl
│ ├── docker-compose.yml
│ └── nginx.conf.template
└── wikidata
│ ├── docker-compose.yml
│ ├── location-mapping.n3
│ └── wikidata.ttl
├── hooks
└── post_push
├── http-tests
├── config
│ └── ds.ttl
├── dataset.trig
├── dev.log4j.properties
├── docker-compose.yml
├── graph-store-protocol
│ ├── DELETE-default.sh
│ ├── DELETE-named-404.sh
│ ├── DELETE-named.sh
│ ├── GET-default-304.sh
│ ├── GET-default-ntriples.sh
│ ├── GET-default.sh
│ ├── GET-named-304.sh
│ ├── GET-named-404.sh
│ ├── GET-named-ntriples.sh
│ ├── GET-named.sh
│ ├── POST-default.sh
│ ├── POST-named-existing.sh
│ ├── POST-named-new.sh
│ ├── PUT-default.sh
│ ├── PUT-named-existing.sh
│ ├── PUT-named-new.sh
│ └── direct
│ │ ├── DELETE-404.sh
│ │ ├── DELETE.sh
│ │ ├── GET-304.sh
│ │ ├── GET-404.sh
│ │ ├── GET-ntriples.sh
│ │ ├── GET.sh
│ │ ├── POST-404.sh
│ │ ├── POST-ntriples.sh
│ │ ├── PUT-new.sh
│ │ └── PUT-ntriples.sh
├── linked-data-templates
│ ├── ct
│ │ ├── DELETE-404.sh
│ │ ├── DELETE-with-param.sh
│ │ ├── DELETE.sh
│ │ ├── GET-304.sh
│ │ ├── GET-404.sh
│ │ ├── GET-406.sh
│ │ ├── GET-base-404.sh
│ │ ├── GET-etag-header.sh
│ │ ├── GET-link-headers.sh
│ │ ├── GET-ntriples.sh
│ │ ├── GET-with-param.sh
│ │ ├── GET.sh
│ │ ├── POST-400.sh
│ │ ├── POST-415.sh
│ │ ├── POST-ntriples.sh
│ │ ├── POST-rdfxml.sh
│ │ ├── POST-to-non-existing.sh
│ │ ├── PUT-415.sh
│ │ ├── PUT-base.sh
│ │ ├── PUT-ntriples.sh
│ │ ├── PUT-rdfxml.sh
│ │ └── PUT-to-non-existing.sh
│ ├── custom
│ │ ├── DELETE-invalid-update-500.sh
│ │ ├── DELETE-missing-update-501.sh
│ │ ├── DELETE-non-match-404.sh
│ │ ├── DELETE-optional-param-set-404.sh
│ │ ├── DELETE-optional-param-set.sh
│ │ ├── GET-304.sh
│ │ ├── GET-base-404.sh
│ │ ├── GET-inherited-param-not-set.sh
│ │ ├── GET-inherited-param-set-404.sh
│ │ ├── GET-inherited-param-set.sh
│ │ ├── GET-invalid-query-500.sh
│ │ ├── GET-mandatory-param-not-set-400.sh
│ │ ├── GET-mandatory-param-set-404.sh
│ │ ├── GET-mandatory-param-set.sh
│ │ ├── GET-match-extended.sh
│ │ ├── GET-match-super.sh
│ │ ├── GET-missing-query-500.sh
│ │ ├── GET-non-match-404-error.sh
│ │ ├── GET-non-match-404.sh
│ │ ├── GET-optional-default-param.sh
│ │ ├── GET-optional-param-not-set.sh
│ │ ├── GET-optional-param-set-404.sh
│ │ ├── GET-optional-param-set.sh
│ │ ├── GET-this-unbound.sh
│ │ ├── GET-value-type-param-set.sh
│ │ ├── GET.sh
│ │ ├── POST-non-match.sh
│ │ ├── POST-shacl-constraint-422.sh
│ │ ├── POST-shacl-constraint.sh
│ │ ├── POST-spin-constraint-422.sh
│ │ ├── POST-spin-constraint.sh
│ │ ├── PUT-base.sh
│ │ ├── PUT-non-match.sh
│ │ ├── PUT-shacl-constraint-422.sh
│ │ ├── PUT-shacl-constraint.sh
│ │ ├── PUT-spin-constraint-422.sh
│ │ ├── PUT-spin-constraint.sh
│ │ ├── location-mapping.n3
│ │ └── ontology.ttl
│ └── ngt
│ │ ├── DELETE-404.sh
│ │ ├── DELETE-with-param.sh
│ │ ├── DELETE.sh
│ │ ├── GET-304.sh
│ │ ├── GET-404.sh
│ │ ├── GET-406.sh
│ │ ├── GET-base-404.sh
│ │ ├── GET-etag-header.sh
│ │ ├── GET-link-headers.sh
│ │ ├── GET-ntriples.sh
│ │ ├── GET-with-param.sh
│ │ ├── GET.sh
│ │ ├── POST-400.sh
│ │ ├── POST-415.sh
│ │ ├── POST-ntriples.sh
│ │ ├── POST-rdfxml.sh
│ │ ├── POST-to-non-existing.sh
│ │ ├── PUT-415.sh
│ │ ├── PUT-base.sh
│ │ ├── PUT-ntriples.sh
│ │ ├── PUT-rdfxml.sh
│ │ └── PUT-to-non-existing.sh
├── run.sh
└── sparql-protocol
│ ├── query
│ ├── GET-304.sh
│ ├── GET-csv-results.sh
│ ├── GET-default-graph-uri.sh
│ ├── GET-json-results.sh
│ ├── GET-named-graph-uri.sh
│ ├── GET-ntriples.sh
│ ├── GET-query-invalid-400.sh
│ ├── GET-query-not-set-400.sh
│ ├── GET-tsv-results.sh
│ ├── GET-xml-results.sh
│ ├── GET.sh
│ ├── POST-default-graph-uri.sh
│ ├── POST-directly-default-graph-uri.sh
│ ├── POST-directly-named-graph-uri.sh
│ ├── POST-directly-ntriples.sh
│ ├── POST-directly-query-invalid-400.sh
│ ├── POST-directly-xml-results.sh
│ ├── POST-directly.sh
│ ├── POST-json-results.sh
│ ├── POST-named-graph-uri.sh
│ ├── POST-ntriples.sh
│ ├── POST-query-invalid-400.sh
│ ├── POST-query-not-set-400.sh
│ ├── POST-xml-results.sh
│ └── POST.sh
│ └── update
│ ├── POST-directly.sh
│ ├── POST-update-invalid-400.sh
│ ├── POST-update-not-set-400.sh
│ └── POST.sh
├── pom.xml
├── release.sh
├── src
├── main
│ ├── java
│ │ └── com
│ │ │ └── atomgraph
│ │ │ └── processor
│ │ │ ├── exception
│ │ │ └── ParameterException.java
│ │ │ ├── factory
│ │ │ ├── OntologyFactory.java
│ │ │ └── TemplateCallFactory.java
│ │ │ ├── model
│ │ │ ├── Application.java
│ │ │ ├── Parameter.java
│ │ │ ├── Template.java
│ │ │ ├── TemplateCall.java
│ │ │ └── impl
│ │ │ │ ├── ApplicationImpl.java
│ │ │ │ ├── ParameterImpl.java
│ │ │ │ ├── TemplateCallImpl.java
│ │ │ │ └── TemplateImpl.java
│ │ │ ├── server
│ │ │ ├── Application.java
│ │ │ ├── filter
│ │ │ │ └── response
│ │ │ │ │ └── ResponseHeaderFilter.java
│ │ │ ├── io
│ │ │ │ ├── SkolemizingDatasetProvider.java
│ │ │ │ └── SkolemizingModelProvider.java
│ │ │ ├── mapper
│ │ │ │ └── ParameterExceptionMapper.java
│ │ │ ├── model
│ │ │ │ ├── QueriedResource.java
│ │ │ │ ├── Resource.java
│ │ │ │ └── impl
│ │ │ │ │ └── ResourceBase.java
│ │ │ └── resource
│ │ │ │ └── graph
│ │ │ │ └── Item.java
│ │ │ ├── util
│ │ │ ├── InsertDataBuilder.java
│ │ │ ├── OntModelReadOnly.java
│ │ │ ├── OntologyLoader.java
│ │ │ ├── RDFNodeFactory.java
│ │ │ ├── RulePrinter.java
│ │ │ ├── Skolemizer.java
│ │ │ ├── StateBuilder.java
│ │ │ └── TemplateMatcher.java
│ │ │ └── vocabulary
│ │ │ ├── AP.java
│ │ │ ├── LDT.java
│ │ │ └── SIOC.java
│ ├── resources
│ │ ├── com
│ │ │ └── atomgraph
│ │ │ │ └── processor
│ │ │ │ ├── c.ttl
│ │ │ │ ├── ct.ttl
│ │ │ │ ├── dh.ttl
│ │ │ │ ├── foaf.owl
│ │ │ │ ├── http-statusCodes.rdf
│ │ │ │ ├── http.owl
│ │ │ │ ├── ldt.ttl
│ │ │ │ ├── ngt.ttl
│ │ │ │ ├── sioc.owl
│ │ │ │ ├── sparql-service.owl
│ │ │ │ ├── thgt.ttl
│ │ │ │ ├── tht.ttl
│ │ │ │ └── void.owl
│ │ ├── location-mapping.n3
│ │ └── log4j.properties
│ └── webapp
│ │ ├── META-INF
│ │ └── context.xml
│ │ └── WEB-INF
│ │ └── web.xml
└── test
│ ├── java
│ └── com
│ │ └── atomgraph
│ │ └── processor
│ │ ├── model
│ │ └── impl
│ │ │ ├── TemplateCallTest.java
│ │ │ └── TemplateTest.java
│ │ └── util
│ │ ├── OntModelReadOnlyTest.java
│ │ ├── SkolemizerTest.java
│ │ └── TemplateMatcherTest.java
│ └── resources
│ ├── location-mapping.ttl
│ └── log4j.properties
└── stress-test.jmx
/.dockerignore:
--------------------------------------------------------------------------------
1 | examples
2 | http-tests
3 | nbproject
4 | target
--------------------------------------------------------------------------------
/.github/workflows/http-tests.yml:
--------------------------------------------------------------------------------
1 | name: HTTP-tests
2 |
3 | on: push
4 |
5 | jobs:
6 | http-tests:
7 | name: Build Docker image and run HTTP test suite against it
8 | runs-on: ubuntu-latest
9 | steps:
10 | - name: Install Linux packages
11 | run: sudo apt-get update && sudo apt-get install -qq raptor2-utils
12 | - name: Checkout code
13 | uses: actions/checkout@v2
14 | - name: Build Docker image
15 | run: docker build -t atomgraph/processor .
16 | - name: Run Docker containers
17 | run: docker-compose up -d # run fuseki and processor containers in the background
18 | working-directory: http-tests
19 | - name: Wait for the servers to start...
20 | run: |
21 | while ! curl -w "%{http_code}\n" -s -o /dev/null http://localhost:8080 | grep "404" ; do sleep 1 ; done # wait for processor-ct to start
22 | while ! curl -w "%{http_code}\n" -s -o /dev/null http://localhost:8081 | grep "404" ; do sleep 1 ; done # wait for processor-ct-write to start
23 | while ! curl -w "%{http_code}\n" -s -o /dev/null http://localhost:8082 | grep "404" ; do sleep 1 ; done # wait for processor-ngt to start
24 | while ! curl -w "%{http_code}\n" -s -o /dev/null http://localhost:8083 | grep "404" ; do sleep 1 ; done # wait for processor-ngt-write to start
25 | while ! curl -w "%{http_code}\n" -s -o /dev/null http://localhost:8085 | grep "404" ; do sleep 1 ; done # wait for processor-custom to start
26 | while ! curl -w "%{http_code}\n" -s -o /dev/null http://localhost:8086 | grep "404" ; do sleep 1 ; done # wait for processor-custom-write to start
27 | - name: Run HTTP test scripts
28 | run: ./run.sh
29 | shell: bash
30 | working-directory: http-tests
31 | - name: Stop Docker containers
32 | run: docker-compose down # shutdown fuseki and processor
33 | working-directory: http-tests
34 | - name: Remove Docker containers
35 | run: docker-compose rm -f
36 | working-directory: http-tests
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | target
2 | /nb-configuration.xml
3 | /.idea
4 | /*.iml
5 | /nbproject/
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM maven:3.8.4-openjdk-17 as maven
2 |
3 | ### Build AtomGraph Processor
4 |
5 | WORKDIR /usr/src/Processor
6 |
7 | COPY . /usr/src/Processor
8 |
9 | RUN mvn -Pstandalone clean install
10 |
11 | ### Deploy Processor webapp on Tomcat
12 |
13 | FROM tomcat:10.1.4-jdk17
14 |
15 | WORKDIR $CATALINA_HOME/webapps
16 |
17 | RUN rm -rf * # remove Tomcat's default webapps
18 |
19 | # copy exploded WAR folder from the maven stage
20 | COPY --from=maven /usr/src/Processor/target/ROOT/ ROOT/
21 |
22 | WORKDIR $CATALINA_HOME
23 |
24 | COPY src/main/webapp/META-INF/context.xml conf/Catalina/localhost/ROOT.xml
25 |
26 | ### Install XSLT processor and ps
27 |
28 | RUN apt-get update && \
29 | apt-get -y install xsltproc && \
30 | apt-get -y install procps
31 |
32 | ### Copy entrypoint
33 |
34 | COPY entrypoint.sh entrypoint.sh
35 |
36 | RUN chmod +x entrypoint.sh
37 |
38 | COPY context.xsl conf/Catalina/localhost/context.xsl
39 |
40 | ENTRYPOINT ["/usr/local/tomcat/entrypoint.sh"]
41 |
42 | EXPOSE 8080
43 |
44 | # system location mapping
45 | ENV LOCATION_MAPPING="/usr/local/tomcat/webapps/ROOT/WEB-INF/classes/location-mapping.n3"
46 |
47 | # user-defined location mapping
48 | ENV CUSTOM_LOCATION_MAPPING="/usr/local/tomcat/webapps/ROOT/WEB-INF/classes/custom-mapping.n3"
--------------------------------------------------------------------------------
/context.xsl:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
7 | ]>
8 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
--------------------------------------------------------------------------------
/entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | ### Signal handlers ###
4 |
5 | function handle_signal {
6 | case "$1" in
7 | TERM|INT|EXIT)
8 | if [ -n "$CMD_PID" ]; then
9 | kill "$CMD_PID" &>/dev/null
10 | sleep 1
11 | fi
12 |
13 | echo "Exiting ..." >&2
14 | exit 0
15 | ;;
16 | *)
17 | echo "Terminating abnormally" >&2
18 | exit 1
19 | ;;
20 | esac
21 | }
22 |
23 | function ignore_signal {
24 | log "Caught signal $1 - ignored" >&2
25 | }
26 |
27 | trap "handle_signal TERM" "TERM"
28 | trap "handle_signal INT" "INT"
29 | trap "ignore_signal HUP" "HUP"
30 |
31 | ### Sleeper function ###
32 |
33 | # $1 process PID
34 | function wait_to_finish {
35 | while true; do
36 | sleep 1 &
37 | PID=$!
38 |
39 | if ! wait $PID ; then
40 | kill $PID &>/dev/null
41 | fi
42 |
43 | if ! ps -p "$1" > /dev/null ; then # process not running anymore
44 | break; # exit while loop
45 | fi
46 | done
47 | }
48 |
49 | ### Arguments ###
50 |
51 | # context variables are used in $CATALINA_HOME/conf/Catalina/localhost/ROOT.xml
52 |
53 | if [ -z "$ENDPOINT" ] ; then
54 | echo '$ENDPOINT not set'
55 | exit 1
56 | fi
57 | if [ -z "$GRAPH_STORE" ] ; then
58 | echo '$GRAPH_STORE not set'
59 | exit 1
60 | fi
61 | if [ -z "$ONTOLOGY" ] ; then
62 | echo '$ONTOLOGY not set'
63 | exit 1
64 | fi
65 |
66 | # if user-defined location mapping exists, append it to system location mapping
67 |
68 | if [ -f "$CUSTOM_LOCATION_MAPPING" ] ; then
69 | cat "$CUSTOM_LOCATION_MAPPING" >> "$LOCATION_MAPPING"
70 | cat "$LOCATION_MAPPING"
71 | fi
72 |
73 | # set Context variables (which are used in $CATALINA_HOME/conf/Catalina/localhost/ROOT.xml)
74 |
75 | if [ -n "$ENDPOINT" ] ; then
76 | ENDPOINT_PARAM="--stringparam sd:endpoint $ENDPOINT "
77 | fi
78 | if [ -n "$GRAPH_STORE" ] ; then
79 | GRAPH_STORE_PARAM="--stringparam a:graphStore $GRAPH_STORE "
80 | fi
81 | if [ -n "$ONTOLOGY" ] ; then
82 | ONTOLOGY_PARAM="--stringparam ldt:ontology $ONTOLOGY "
83 | fi
84 | if [ -n "$AUTH_USER" ] ; then
85 | AUTH_USER_PARAM="--stringparam a:authUser $AUTH_USER "
86 | fi
87 | if [ -n "$AUTH_PWD" ] ; then
88 | AUTH_PWD_PARAM="--stringparam a:authPwd $AUTH_PWD "
89 | fi
90 | if [ -n "$PREEMPTIVE_AUTH" ] ; then
91 | PREEMPTIVE_AUTH_PARAM="--stringparam a:preemptiveAuth $PREEMPTIVE_AUTH "
92 | fi
93 |
94 | ### Execution ###
95 |
96 | # $CATALINA_HOME must be the WORKDIR at this point
97 |
98 | transform="xsltproc \
99 | --output conf/Catalina/localhost/ROOT.xml \
100 | $ENDPOINT_PARAM \
101 | $GRAPH_STORE_PARAM \
102 | $ONTOLOGY_PARAM \
103 | $AUTH_USER_PARAM \
104 | $AUTH_PWD_PARAM \
105 | $PREEMPTIVE_AUTH_PARAM \
106 | conf/Catalina/localhost/context.xsl \
107 | conf/Catalina/localhost/ROOT.xml"
108 |
109 | eval "$transform"
110 |
111 | # run Tomcat process in the background
112 |
113 | if [ -z "$JPDA_ADDRESS" ] ; then
114 | catalina.sh run &
115 | else
116 | catalina.sh jpda run &
117 | fi
118 |
119 | CMD_PID=$!
120 | wait_to_finish $CMD_PID
--------------------------------------------------------------------------------
/examples/fuseki/dataset.ttl:
--------------------------------------------------------------------------------
1 | @prefix foaf: .
2 | @prefix dct: .
3 | @prefix rdfs: .
4 |
5 | @base .
6 |
7 | <> a foaf:Document ;
8 | dct:title "localhost:8080" ;
9 | dct:description "This is an RDF document served by AtomGraph Processor" ;
10 | rdfs:seeAlso .
11 |
12 | @base .
13 |
14 | <> a foaf:Document ;
15 | dct:title "example.org" ;
16 | dct:description "This is an RDF document served by AtomGraph Processor" ;
17 | rdfs:seeAlso .
--------------------------------------------------------------------------------
/examples/fuseki/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "2"
2 | services:
3 | fuseki:
4 | image: atomgraph/fuseki:9985c4edd850c9277d241f6edc0d883013713ad3
5 | user: root # otherwise fuseki user does not have permissions to the mounted folder which is owner by root
6 | ports:
7 | - 3030:3030
8 | volumes:
9 | - ./dataset.ttl:/tmp/data/dataset.ttl:ro
10 | command: [ "--file", "/tmp/data/dataset.ttl", "/ds" ]
11 | processor:
12 | build: ../..
13 | depends_on:
14 | - fuseki
15 | ports:
16 | - 8080:8080
17 | - 8010:8000 # debugger
18 | environment:
19 | - JPDA_ADDRESS=*:8000 # debugger port
20 | - ENDPOINT="http://fuseki:3030/ds/" # hostname equals service name
21 | - GRAPH_STORE="http://fuseki:3030/ds/" # hostname equals service name
22 | - ONTOLOGY="https://www.w3.org/ns/ldt/core/templates#"
23 | nginx:
24 | image: nginx
25 | depends_on:
26 | - processor
27 | ports:
28 | - 80:80
29 | environment:
30 | - PROXY_PASS=http://processor:8080 # internal Processor URL (hostname equals docker-compose service name)
31 | - PROXY_SET_HOST=example.org # the hostname set on the request URI before it's passed to Processor
32 | volumes:
33 | - ./nginx.conf.template:/etc/nginx/nginx.conf.template:ro
34 | command: /bin/bash -c "envsubst '$$PROXY_PASS $$PROXY_SET_HOST' < /etc/nginx/nginx.conf.template > /etc/nginx/nginx.conf && nginx -g 'daemon off;'"
--------------------------------------------------------------------------------
/examples/fuseki/nginx.conf.template:
--------------------------------------------------------------------------------
1 | events {
2 | worker_connections 1024;
3 | }
4 |
5 | http {
6 | server {
7 | listen 80;
8 | server_name localhost;
9 | location / {
10 | proxy_pass $PROXY_PASS;
11 | proxy_set_header Host "$PROXY_SET_HOST";
12 | }
13 | }
14 | }
--------------------------------------------------------------------------------
/examples/wikidata/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "2"
2 | services:
3 | processor:
4 | build: ../..
5 | ports:
6 | - 8080:8080
7 | - 8010:8000 # debugger
8 | environment:
9 | - JPDA_ADDRESS=*:8000 # debugger port
10 | - ENDPOINT="https://query.wikidata.org/bigdata/namespace/wdq/sparql"
11 | - GRAPH_STORE="https://query.wikidata.org/bigdata/namespace/wdq/service" # fake value, unused
12 | - ONTOLOGY="https://github.com/AtomGraph/Processor/blob/develop/examples/wikidata#"
13 | volumes:
14 | - ./wikidata.ttl:/usr/local/tomcat/webapps/ROOT/WEB-INF/classes/org/wikidata/ldt.ttl
15 | - ./location-mapping.n3:/usr/local/tomcat/webapps/ROOT/WEB-INF/classes/custom-mapping.n3
--------------------------------------------------------------------------------
/examples/wikidata/location-mapping.n3:
--------------------------------------------------------------------------------
1 | @prefix lm: .
2 |
3 | [] lm:mapping
4 |
5 | [ lm:name "https://github.com/AtomGraph/Processor/blob/develop/examples/wikidata#" ; lm:altName "org/wikidata/ldt.ttl" ]
6 | .
--------------------------------------------------------------------------------
/examples/wikidata/wikidata.ttl:
--------------------------------------------------------------------------------
1 | @base .
2 |
3 | @prefix : <#> .
4 | @prefix rdfs: .
5 | @prefix owl: .
6 | @prefix ldt: .
7 | @prefix sp: .
8 | @prefix spin: .
9 | @prefix spl: .
10 |
11 | : a ldt:Ontology ;
12 | owl:imports ldt:, sp: ;
13 | rdfs:label "Wikidata's LDT ontology" .
14 |
15 | # root
16 |
17 | :RootTemplate a ldt:Template ;
18 | rdfs:label "Root resource template" ;
19 | ldt:match "/" ;
20 | ldt:query :RootQuery ;
21 | rdfs:isDefinedBy : .
22 |
23 | :RootQuery a ldt:Query, sp:Construct ;
24 | rdfs:label "Root query" ;
25 | sp:text """PREFIX foaf:
26 | PREFIX dct:
27 | PREFIX rdfs:
28 |
29 | CONSTRUCT
30 | {
31 | ?this a foaf:Document ;
32 | dct:title "This is an RDF document served by AtomGraph Processor" ;
33 | rdfs:seeAlso , .
34 | }
35 | WHERE
36 | {
37 | }""" ;
38 | rdfs:isDefinedBy : .
39 |
40 | # birthdays
41 |
42 | :BirthdaysTemplate a ldt:Template ;
43 | rdfs:label "People born today" ;
44 | ldt:match "/birthdays" ;
45 | ldt:param :SexParam ;
46 | ldt:query [ a :BirthdaysQueryTemplate ] ;
47 | rdfs:isDefinedBy : .
48 |
49 | :SexParam a ldt:Parameter ;
50 | rdfs:label "Sex parameter" ;
51 | spl:predicate :sex ;
52 | spl:valueType rdfs:Resource ;
53 | spl:optional true ;
54 | rdfs:isDefinedBy : .
55 |
56 | :BirthdaysQueryTemplate a spin:Template ;
57 | rdfs:label "Birthdays query template" ;
58 | spin:constraint :SexParam ;
59 | spin:body :BirthdaysQuery ;
60 | rdfs:isDefinedBy : .
61 |
62 | :BirthdaysQuery a ldt:Query, sp:Construct ;
63 | rdfs:label "Birthdays query" ;
64 | sp:text """PREFIX bd:
65 | PREFIX wdt:
66 | PREFIX wikibase:
67 |
68 | CONSTRUCT
69 | {
70 | ?entity ?year ;
71 | wdt:P21 ?sex
72 | }
73 | WHERE
74 | { SELECT ?entity ?year
75 | WHERE
76 | { BIND(month(now()) AS ?nowMonth)
77 | BIND(day(now()) AS ?nowDay)
78 | ?entity wdt:P569 ?date ;
79 | wdt:P21 ?sex .
80 | FILTER ( ( month(?date) = ?nowMonth ) && ( day(?date) = ?nowDay ) )
81 | BIND(year(?date) AS ?year)
82 | }
83 | LIMIT 100
84 | }""" ;
85 | rdfs:isDefinedBy : .
--------------------------------------------------------------------------------
/hooks/post_push:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | docker tag "$IMAGE_NAME" "$DOCKER_REPO":"$SOURCE_COMMIT"
4 |
5 | docker push "$DOCKER_REPO":"$SOURCE_COMMIT"
--------------------------------------------------------------------------------
/http-tests/config/ds.ttl:
--------------------------------------------------------------------------------
1 | @prefix : <#> .
2 | @prefix fuseki: .
3 | @prefix rdf: .
4 | @prefix rdfs: .
5 | @prefix ja: .
6 | @prefix tdb: .
7 |
8 | <#serviceInMemory> a fuseki:Service;
9 | rdfs:label "In-memory, transactional dataset.";
10 | fuseki:name "ds";
11 | fuseki:serviceQuery "query";
12 | fuseki:serviceQuery "sparql";
13 | fuseki:serviceUpdate "update";
14 | fuseki:serviceUpload "upload" ;
15 | fuseki:serviceReadWriteGraphStore "data" ;
16 | fuseki:serviceReadGraphStore "get" ;
17 | fuseki:dataset <#dataset>
18 | .
19 |
20 | <#dataset> a ja:MemoryDataset .
--------------------------------------------------------------------------------
/http-tests/dataset.trig:
--------------------------------------------------------------------------------
1 | {
2 |
3 | , "default object" .
4 |
5 | }
6 |
7 | {
8 |
9 | , "super object" .
10 |
11 | }
12 |
13 | {
14 |
15 | , "missing update object" .
16 |
17 | }
18 |
19 | {
20 |
21 | , "mandatory object" .
22 |
23 | }
24 |
25 | {
26 |
27 | , "optional object" .
28 |
29 | }
30 |
31 | {
32 |
33 | , "optional default object" .
34 |
35 | }
36 |
37 | {
38 |
39 | , "inherited object" .
40 |
41 | }
42 |
43 | {
44 |
45 | , 42 .
46 |
47 | }
48 |
49 | {
50 |
51 | .
52 |
53 | }
54 |
55 |
56 | {
57 |
58 | , "named object" .
59 |
60 | }
61 |
62 |
63 | {
64 |
65 | , "named object" .
66 |
67 | }
--------------------------------------------------------------------------------
/http-tests/dev.log4j.properties:
--------------------------------------------------------------------------------
1 | log4j.rootLogger=DEBUG, stdout
2 |
3 | log4j.logger.org.apache.jena.sparql.engine.optimizer.reorder=WARN
4 | log4j.logger.org.apache.jena.util.FileManager=WARN
5 | log4j.logger.org.apache.jena.shared.LockMRSW=WARN
6 | log4j.logger.org.apache.jena.riot.stream.StreamManager=WARN
7 | log4j.logger.org.apache.jena.riot.RDFDataMgr=WARN
8 | log4j.logger.org.apache.jena.reasoner=WARN
9 | log4j.logger.com.atomgraph.client.locator=WARN
10 |
11 | # Direct log messages to console
12 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
13 | log4j.appender.stdout.Target=System.out
14 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
15 | log4j.appender.stdout.layout.ConversionPattern=%d{ABSOLUTE} [%t] %5p %c{1}:%L - %m%n
--------------------------------------------------------------------------------
/http-tests/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "2"
2 | services:
3 | fuseki:
4 | image: atomgraph/fuseki
5 | ports:
6 | - 3030:3030
7 | volumes:
8 | - ./config/ds.ttl:/mnt/apache-fuseki/configuration/ds.ttl:ro
9 | command: ["--config", "/mnt/apache-fuseki/configuration/ds.ttl"]
10 | fuseki-write:
11 | image: atomgraph/fuseki
12 | ports:
13 | - 3031:3030
14 | volumes:
15 | - ./config/ds.ttl:/mnt/apache-fuseki/configuration/ds.ttl:ro
16 | command: ["--config", "/mnt/apache-fuseki/configuration/ds.ttl"]
17 | processor-ct:
18 | image: atomgraph/processor
19 | depends_on:
20 | - fuseki
21 | ports:
22 | - 8080:8080
23 | - 8002:8000 # debugger
24 | environment:
25 | - JPDA_ADDRESS=*:8000 # debugger port
26 | - ENDPOINT="http://fuseki:3030/ds/" # hostname equals service name
27 | - GRAPH_STORE="http://fuseki:3030/ds/" # hostname equals service name
28 | - ONTOLOGY="https://www.w3.org/ns/ldt/core/templates#"
29 | processor-ct-write: # dataset will be modified
30 | image: atomgraph/processor
31 | depends_on:
32 | - fuseki-write
33 | ports:
34 | - 8081:8080
35 | - 8003:8000 # debugger
36 | environment:
37 | - JPDA_ADDRESS=*:8000 # debugger port
38 | - ENDPOINT="http://fuseki-write:3030/ds/" # hostname equals service name
39 | - GRAPH_STORE="http://fuseki-write:3030/ds/" # hostname equals service name
40 | - ONTOLOGY="https://www.w3.org/ns/ldt/core/templates#"
41 | processor-ngt:
42 | image: atomgraph/processor
43 | depends_on:
44 | - fuseki
45 | ports:
46 | - 8082:8080
47 | - 8004:8000 # debugger
48 | environment:
49 | - JPDA_ADDRESS=*:8000 # debugger port
50 | - ENDPOINT="http://fuseki:3030/ds/" # hostname equals service name
51 | - GRAPH_STORE="http://fuseki:3030/ds/" # hostname equals service name
52 | - ONTOLOGY="https://www.w3.org/ns/ldt/named-graphs/templates#"
53 | processor-ngt-write: # dataset will be modified
54 | image: atomgraph/processor
55 | depends_on:
56 | - fuseki-write
57 | ports:
58 | - 8083:8080
59 | - 8005:8000 # debugger
60 | environment:
61 | - JPDA_ADDRESS=*:8000 # debugger port
62 | - ENDPOINT="http://fuseki-write:3030/ds/" # hostname equals service name
63 | - GRAPH_STORE="http://fuseki-write:3030/ds/" # hostname equals service name
64 | - ONTOLOGY="https://www.w3.org/ns/ldt/named-graphs/templates#"
65 | processor-custom:
66 | image: atomgraph/processor
67 | depends_on:
68 | - fuseki
69 | ports:
70 | - 8085:8080
71 | - 8006:8000 # debugger
72 | environment:
73 | - JPDA_ADDRESS=*:8000 # debugger port
74 | - ENDPOINT="http://fuseki:3030/ds/" # hostname equals service name
75 | - GRAPH_STORE="http://fuseki:3030/ds/" # hostname equals service name
76 | - ONTOLOGY="https://github.com/AtomGraph/Processor/blob/develop/http-tests/custom#"
77 | volumes:
78 | - ./linked-data-templates/custom/ontology.ttl:/usr/local/tomcat/webapps/ROOT/WEB-INF/classes/com/atomgraph/processor/http-tests/custom/ontology.ttl:ro
79 | - ./linked-data-templates/custom/location-mapping.n3:/usr/local/tomcat/webapps/ROOT/WEB-INF/classes/custom-mapping.n3:ro
80 | processor-custom-write:
81 | image: atomgraph/processor
82 | depends_on:
83 | - fuseki-write
84 | ports:
85 | - 8086:8080
86 | - 8007:8000 # debugger
87 | environment:
88 | - JPDA_ADDRESS=*:8000 # debugger port
89 | - ENDPOINT="http://fuseki-write:3030/ds/" # hostname equals service name
90 | - GRAPH_STORE="http://fuseki-write:3030/ds/" # hostname equals service name
91 | - ONTOLOGY="https://github.com/AtomGraph/Processor/blob/develop/http-tests/custom#"
92 | volumes:
93 | - ./linked-data-templates/custom/ontology.ttl:/usr/local/tomcat/webapps/ROOT/WEB-INF/classes/com/atomgraph/processor/http-tests/custom/ontology.ttl:ro
94 | - ./linked-data-templates/custom/location-mapping.n3:/usr/local/tomcat/webapps/ROOT/WEB-INF/classes/custom-mapping.n3:ro
--------------------------------------------------------------------------------
/http-tests/graph-store-protocol/DELETE-default.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # delete default graph
8 |
9 | curl -w "%{http_code}\n" -f -s -G \
10 | -X DELETE \
11 | "${BASE_URL_WRITABLE}service" \
12 | --data-urlencode "default=true" \
13 | | grep -q "${STATUS_NO_CONTENT}"
14 |
15 | curl -w "%{http_code}\n" -f -s -G \
16 | -H "Accept: application/n-triples" \
17 | "${BASE_URL_WRITABLE}service" \
18 | --data-urlencode "default=true" \
19 | | grep -q "${STATUS_OK}"
--------------------------------------------------------------------------------
/http-tests/graph-store-protocol/DELETE-named-404.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # attempt to delete non-existing named graph
4 |
5 | curl -w "%{http_code}\n" -f -s -G \
6 | -X DELETE \
7 | -H "Accept: application/n-triples" \
8 | "${BASE_URL_WRITABLE}service" \
9 | --data-urlencode "graph=${BASE_URL_WRITABLE}non-existing" \
10 | | grep -q "${STATUS_NOT_FOUND}"
--------------------------------------------------------------------------------
/http-tests/graph-store-protocol/DELETE-named.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # delete named graph
8 |
9 | curl -w "%{http_code}\n" -f -s -G \
10 | -X DELETE \
11 | "${BASE_URL_WRITABLE}service" \
12 | --data-urlencode "graph=${BASE_URL_WRITABLE}graphs/name/" \
13 | | grep -q "${STATUS_NO_CONTENT}"
14 |
15 | # check that the graph is gone
16 |
17 | curl -w "%{http_code}\n" -f -s -G \
18 | -H "Accept: application/n-triples" \
19 | "${BASE_URL_WRITABLE}service" \
20 | --data-urlencode "graph=${BASE_URL_WRITABLE}graphs/name/" \
21 | | grep -q "${STATUS_NOT_FOUND}"
--------------------------------------------------------------------------------
/http-tests/graph-store-protocol/GET-default-304.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # GET the default graph
4 | # request N-Triples twice - supply ETag second time and expect 304 Not Modified
5 |
6 | etag=$(
7 | curl -f -s -I -G \
8 | -H "Accept: application/n-triples" \
9 | "${BASE_URL}service" \
10 | --data-urlencode "default=true" \
11 | | grep 'ETag' \
12 | | tr -d '\r' \
13 | | sed -En 's/^ETag: (.*)$/\1/p')
14 |
15 | curl -w "%{http_code}\n" -f -s -G \
16 | -H "Accept: application/n-triples" \
17 | "${BASE_URL}service" \
18 | --data-urlencode "default=true" \
19 | -H "If-None-Match: $etag" \
20 | | grep -q "${STATUS_NOT_MODIFIED}"
--------------------------------------------------------------------------------
/http-tests/graph-store-protocol/GET-default-ntriples.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # use conneg to request N-Triples as the preferred format
4 |
5 | curl -f -s -G \
6 | -H "Accept: application/n-triples" \
7 | "${BASE_URL}service" \
8 | --data-urlencode "default=true" \
9 | | rapper -q --input ntriples --output ntriples /dev/stdin - \
10 | | tr -s '\n' '\t' \
11 | | grep "${BASE_URL}default-subject" \
12 | | grep -v "${BASE_URL}named-subject" > /dev/null
--------------------------------------------------------------------------------
/http-tests/graph-store-protocol/GET-default.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # GET the default graph
4 | # the value of ?default is not required, as per the GSP specification
5 |
6 | curl -w "%{http_code}\n" -f -s -G \
7 | "${BASE_URL}service" \
8 | --data-urlencode "default=true" \
9 | | grep -q "${STATUS_OK}"
--------------------------------------------------------------------------------
/http-tests/graph-store-protocol/GET-named-304.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # GET the named graph
4 | # request N-Triples twice - supply ETag second time and expect 304 Not Modified
5 |
6 | etag=$(
7 | curl -f -s -I -G \
8 | -H "Accept: application/n-triples" \
9 | "${BASE_URL}service" \
10 | --data-urlencode "graph=${BASE_URL}graphs/name/" \
11 | | grep 'ETag' \
12 | | tr -d '\r' \
13 | | sed -En 's/^ETag: (.*)$/\1/p')
14 |
15 | curl -w "%{http_code}\n" -f -s -G \
16 | -H "Accept: application/n-triples" \
17 | "${BASE_URL}service" \
18 | --data-urlencode "graph=${BASE_URL}graphs/name/" \
19 | -H "If-None-Match: $etag" \
20 | | grep -q "${STATUS_NOT_MODIFIED}"
--------------------------------------------------------------------------------
/http-tests/graph-store-protocol/GET-named-404.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | curl -w "%{http_code}\n" -f -s -G \
4 | "${BASE_URL}service" \
5 | --data-urlencode "graph=${BASE_URL}non-existing" \
6 | | grep -q "${STATUS_NOT_FOUND}"
--------------------------------------------------------------------------------
/http-tests/graph-store-protocol/GET-named-ntriples.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # use conneg to request N-Triples as the preferred format
4 |
5 | curl -f -s -G \
6 | -H "Accept: application/n-triples" \
7 | "${BASE_URL}service" \
8 | --data-urlencode "graph=${BASE_URL}graphs/name/" \
9 | | rapper -q --input ntriples --output ntriples /dev/stdin - \
10 | | tr -s '\n' '\t' \
11 | | grep "${BASE_URL}named-subject" \
12 | | grep -v "${BASE_URL}default-subject" > /dev/null
--------------------------------------------------------------------------------
/http-tests/graph-store-protocol/GET-named.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # GET the named graph
4 |
5 | curl -w "%{http_code}\n" -f -s -G \
6 | "${BASE_URL}service" \
7 | --data-urlencode "graph=${BASE_URL}graphs/name/" \
8 | | grep -q "${STATUS_OK}"
--------------------------------------------------------------------------------
/http-tests/graph-store-protocol/POST-default.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # separate URL-encoding step because we cannot combine -G with --data-binary
8 | encoded_url=$(curl -w "%{url_effective}\n" -G -s -o /dev/null \
9 | --data-urlencode "default=true" \
10 | "${BASE_URL_WRITABLE}service")
11 |
12 | # append new triples to the default graph
13 |
14 | (
15 | curl -w "%{http_code}\n" -f -s \
16 | -H "Accept: application/n-triples" \
17 | -H "Content-Type: application/n-triples" \
18 | --data-binary @- \
19 | "${encoded_url}" < "default object POST" .
21 | <${BASE_URL_WRITABLE}default-subject-post> "another object POST" .
22 | EOF
23 | ) \
24 | | grep -q "${STATUS_OK}"
25 |
26 | # check that resource is accessible
27 |
28 | curl -f -s \
29 | -H "Accept: application/n-triples" \
30 | "${encoded_url}" \
31 | | tr -d '\n' \
32 | | grep '"default object POST"' \
33 | | grep '"another object POST"' > /dev/null
--------------------------------------------------------------------------------
/http-tests/graph-store-protocol/POST-named-existing.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # separate URL-encoding step because we cannot combine -G with --data-binary
8 | encoded_url=$(curl -w "%{url_effective}\n" -G -s -o /dev/null \
9 | --data-urlencode "graph=${BASE_URL_WRITABLE}graphs/name/" \
10 | "${BASE_URL_WRITABLE}service")
11 |
12 | # append new triples to the named graph
13 |
14 | (
15 | curl -w "%{http_code}\n" -f -s \
16 | -H "Accept: application/n-triples" \
17 | -H "Content-Type: application/n-triples" \
18 | --data-binary @- \
19 | "${encoded_url}" < "named object POST" .
21 | <${BASE_URL_WRITABLE}named-subject-post> "another named object POST" .
22 | EOF
23 | ) \
24 | | grep -q "${STATUS_OK}"
25 |
26 | # check that resource is accessible
27 |
28 | curl -f -s -G \
29 | -H "Accept: application/n-triples" \
30 | "${encoded_url}" \
31 | | tr -d '\n' \
32 | | grep '"named object POST"' \
33 | | grep '"another named object POST"' > /dev/null
--------------------------------------------------------------------------------
/http-tests/graph-store-protocol/POST-named-new.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # separate URL-encoding step because we cannot combine -G with --data-binary
8 | encoded_url=$(curl -w "%{url_effective}\n" -G -s -o /dev/null \
9 | --data-urlencode "graph=${BASE_URL_WRITABLE}graphs/non-existing/" \
10 | "${BASE_URL_WRITABLE}service")
11 |
12 | # append new triples to the named graph
13 |
14 | (
15 | curl -w "%{http_code}\n" -f -s \
16 | -H "Accept: application/n-triples" \
17 | -H "Content-Type: application/n-triples" \
18 | --data-binary @- \
19 | "${encoded_url}" < "named object POST" .
21 | <${BASE_URL_WRITABLE}named-subject-post> "another named object POST" .
22 | EOF
23 | ) \
24 | | grep -q "${STATUS_CREATED}"
25 |
26 | # check that resource is accessible
27 |
28 | curl -f -s -G \
29 | -H "Accept: application/n-triples" \
30 | "${encoded_url}" \
31 | | tr -d '\n' \
32 | | grep '"named object POST"' \
33 | | grep '"another named object POST"' > /dev/null
--------------------------------------------------------------------------------
/http-tests/graph-store-protocol/PUT-default.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # separate URL-encoding step because we cannot combine -G with --data-binary
8 | encoded_url=$(curl -w "%{url_effective}\n" -G -s -o /dev/null \
9 | --data-urlencode "default=true" \
10 | "${BASE_URL_WRITABLE}service")
11 |
12 | # replace the default graph
13 |
14 | (
15 | curl -w "%{http_code}\n" -f -s \
16 | -X PUT \
17 | -H "Accept: application/n-triples" \
18 | -H "Content-Type: application/n-triples" \
19 | --data-binary @- \
20 | "${encoded_url}" < "default object POST" .
22 | <${BASE_URL_WRITABLE}named-subject-post> "another default object POST" .
23 | EOF
24 | ) \
25 | | grep -q "${STATUS_OK}"
26 |
27 | # check that resource is accessible
28 |
29 | curl -f -s -G \
30 | -H "Accept: application/n-triples" \
31 | "${encoded_url}" \
32 | | tr -d '\n' \
33 | | grep '"default object POST"' \
34 | | grep -v '"default object"' > /dev/null
--------------------------------------------------------------------------------
/http-tests/graph-store-protocol/PUT-named-existing.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # separate URL-encoding step because we cannot combine -G with --data-binary
8 | encoded_url=$(curl -w "%{url_effective}\n" -G -s -o /dev/null \
9 | --data-urlencode "graph=${BASE_URL_WRITABLE}graphs/name/" \
10 | "${BASE_URL_WRITABLE}service")
11 |
12 | # replace the named graph
13 |
14 | (
15 | curl -w "%{http_code}\n" -f -s \
16 | -X PUT \
17 | -H "Accept: application/n-triples" \
18 | -H "Content-Type: application/n-triples" \
19 | --data-binary @- \
20 | "${encoded_url}" < "named object PUT" .
22 | <${BASE_URL_WRITABLE}named-subject-put> "another named object PUT" .
23 | EOF
24 | ) \
25 | | grep -q "${STATUS_OK}"
26 |
27 | # check that resource is accessible
28 |
29 | curl -f -s -G \
30 | -H "Accept: application/n-triples" \
31 | "${encoded_url}" \
32 | | tr -d '\n' \
33 | | grep '"named object PUT"' \
34 | | grep -v '"named object"' > /dev/null
--------------------------------------------------------------------------------
/http-tests/graph-store-protocol/PUT-named-new.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # separate URL-encoding step because we cannot combine -G with --data-binary
8 | encoded_url=$(curl -w "%{url_effective}\n" -G -s -o /dev/null \
9 | --data-urlencode "graph=${BASE_URL_WRITABLE}graphs/non-existing/" \
10 | "${BASE_URL_WRITABLE}service")
11 |
12 | # replace the named graph
13 |
14 | (
15 | curl -w "%{http_code}\n" -f -s \
16 | -X PUT \
17 | -H "Accept: application/n-triples" \
18 | -H "Content-Type: application/n-triples" \
19 | --data-binary @- \
20 | "${encoded_url}" < "named object PUT" .
22 | <${BASE_URL_WRITABLE}named-subject-put> "another named object PUT" .
23 | EOF
24 | ) \
25 | | grep -q "${STATUS_CREATED}"
26 |
27 | # check that resource is accessible
28 |
29 | curl -f -s -G \
30 | -H "Accept: application/n-triples" \
31 | "${encoded_url}" \
32 | | tr -d '\n' \
33 | | grep '"named object PUT"' \
34 | | grep -v '"named object"' > /dev/null
--------------------------------------------------------------------------------
/http-tests/graph-store-protocol/direct/DELETE-404.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # attempt to delete non-existing named graph
4 |
5 | curl -w "%{http_code}\n" -f -s -G \
6 | -X DELETE \
7 | -H "Accept: application/n-triples" \
8 | "${BASE_URL_WRITABLE}graphs/non-existing/" \
9 | | grep -q "${STATUS_NOT_FOUND}"
--------------------------------------------------------------------------------
/http-tests/graph-store-protocol/direct/DELETE.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # delete directly identified named graph
8 |
9 | curl -w "%{http_code}\n" -f -s -G \
10 | -X DELETE \
11 | "${BASE_URL_WRITABLE}graphs/name/" \
12 | | grep -q "${STATUS_NO_CONTENT}"
13 |
14 | # check that the graph is gone
15 |
16 | curl -w "%{http_code}\n" -f -s -G \
17 | -H "Accept: application/n-triples" \
18 | "${BASE_URL_WRITABLE}graphs/name/" \
19 | | grep -q "${STATUS_NOT_FOUND}"
--------------------------------------------------------------------------------
/http-tests/graph-store-protocol/direct/GET-304.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # GET the named graph
4 | # request N-Triples twice - supply ETag second time and expect 304 Not Modified
5 |
6 | etag=$(
7 | curl -f -s -I -G \
8 | -H "Accept: application/n-triples" \
9 | "${BASE_URL}graphs/name/" \
10 | | grep 'ETag' \
11 | | tr -d '\r' \
12 | | sed -En 's/^ETag: (.*)$/\1/p')
13 |
14 | curl -w "%{http_code}\n" -f -s -G \
15 | -H "Accept: application/n-triples" \
16 | "${BASE_URL}graphs/name/" \
17 | -H "If-None-Match: $etag" \
18 | | grep -q "${STATUS_NOT_MODIFIED}"
--------------------------------------------------------------------------------
/http-tests/graph-store-protocol/direct/GET-404.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | curl -w "%{http_code}\n" -f -s -G \
4 | "${BASE_URL_WRITABLE}graphs/non-existing/" \
5 | | grep -q "${STATUS_NOT_FOUND}"
--------------------------------------------------------------------------------
/http-tests/graph-store-protocol/direct/GET-ntriples.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # use conneg to request N-Triples as the preferred format
4 |
5 | curl -f -s -G \
6 | -H "Accept: application/n-triples" \
7 | "${BASE_URL}graphs/name/" \
8 | | rapper -q --input ntriples --output ntriples /dev/stdin - \
9 | | tr -s '\n' '\t' \
10 | | grep "${BASE_URL}named-subject" \
11 | | grep -v "${BASE_URL}default-subject" > /dev/null
--------------------------------------------------------------------------------
/http-tests/graph-store-protocol/direct/GET.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # GET the directly identified named graph
4 |
5 | curl -w "%{http_code}\n" -f -s -G \
6 | -H "Accept: text/turtle" \
7 | "${BASE_URL}graphs/name/" \
8 | | grep -q "${STATUS_OK}"
--------------------------------------------------------------------------------
/http-tests/graph-store-protocol/direct/POST-404.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | curl -w "%{http_code}\n" -f -s \
4 | -H "Content-Type: application/n-triples" \
5 | "${BASE_URL_WRITABLE}graphs/non-existing/" \
6 | | grep -q "${STATUS_NOT_FOUND}"
--------------------------------------------------------------------------------
/http-tests/graph-store-protocol/direct/POST-ntriples.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # append new triples to the named graph
8 |
9 | (
10 | curl -w "%{http_code}\n" -f -s \
11 | -H "Accept: application/n-triples" \
12 | -H "Content-Type: application/n-triples" \
13 | --data-binary @- \
14 | "${BASE_URL_WRITABLE}graphs/name/" < "named object POST" .
16 | <${BASE_URL_WRITABLE}named-subject-post> "another named object POST" .
17 | EOF
18 | ) \
19 | | grep -q "${STATUS_OK}"
20 |
21 | # check that resource is accessible
22 |
23 | curl -f -s -G \
24 | -H "Accept: application/n-triples" \
25 | "${BASE_URL_WRITABLE}graphs/name/" \
26 | | tr -d '\n' \
27 | | grep '"named object POST"' \
28 | | grep '"another named object POST"' > /dev/null
--------------------------------------------------------------------------------
/http-tests/graph-store-protocol/direct/PUT-new.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | curl -w "%{http_code}\n" -f -s -G \
4 | -X PUT \
5 | -H "Content-Type: application/n-triples" \
6 | "${BASE_URL_WRITABLE}graphs/non-existing/" \
7 | | grep -q "${STATUS_CREATED}"
--------------------------------------------------------------------------------
/http-tests/graph-store-protocol/direct/PUT-ntriples.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # replace the named graph
8 |
9 | (
10 | curl -w "%{http_code}\n" -f -s \
11 | -X PUT \
12 | -H "Accept: application/n-triples" \
13 | -H "Content-Type: application/n-triples" \
14 | --data-binary @- \
15 | "${BASE_URL_WRITABLE}graphs/name/" < "named object PUT" .
17 | <${BASE_URL_WRITABLE}named-subject-put> "another named object PUT" .
18 | EOF
19 | ) \
20 | | grep -q "${STATUS_OK}"
21 |
22 | # check that resource is accessible
23 |
24 | curl -f -s -G \
25 | -H "Accept: application/n-triples" \
26 | "${BASE_URL_WRITABLE}graphs/name/" \
27 | | tr -d '\n' \
28 | | grep '"named object PUT"' \
29 | | grep -v '"named object"' > /dev/null
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ct/DELETE-404.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | curl -w "%{http_code}\n" -f -s \
8 | -X DELETE \
9 | "${BASE_URL_WRITABLE}non-existing" \
10 | | grep -q "${STATUS_NOT_FOUND}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ct/DELETE-with-param.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # check that unrecognized parameters are allowed
8 |
9 | curl -w "%{http_code}\n" -f -s \
10 | -X DELETE \
11 | "${BASE_URL_WRITABLE}default-subject?param=value" \
12 | | grep -q "${STATUS_NO_CONTENT}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ct/DELETE.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # delete resource
8 |
9 | curl -w "%{http_code}\n" -f -s \
10 | -X DELETE \
11 | "${BASE_URL_WRITABLE}default-subject" \
12 | | grep -q "${STATUS_NO_CONTENT}"
13 |
14 | # check that deleted resource is really gone
15 |
16 | curl -w "%{http_code}\n" -f -s \
17 | "${BASE_URL_WRITABLE}default-subject" \
18 | | grep -q "${STATUS_NOT_FOUND}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ct/GET-304.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # request N-Triples twice - supply ETag second time and expect 304 Not Modified
4 |
5 | etag=$(
6 | curl -f -s -I -G \
7 | -H "Accept: application/n-triples" \
8 | "${BASE_URL}default-subject" \
9 | | grep 'ETag' \
10 | | tr -d '\r' \
11 | | sed -En 's/^ETag: (.*)$/\1/p')
12 |
13 | curl -w "%{http_code}\n" -f -s -G \
14 | -H "Accept: application/n-triples" \
15 | "${BASE_URL}default-subject" \
16 | -H "If-None-Match: $etag" \
17 | | grep -q "${STATUS_NOT_MODIFIED}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ct/GET-404.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | curl -w "%{http_code}\n" -f -s \
4 | "${BASE_URL}non-existing" \
5 | | grep -q "${STATUS_NOT_FOUND}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ct/GET-406.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | curl -w "%{http_code}\n" -f -s \
4 | -H "Accept: application/not-accepted" \
5 | "${BASE_URL}default-subject" \
6 | | grep -q "${STATUS_NOT_ACCEPTABLE}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ct/GET-base-404.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | curl -w "%{http_code}\n" -f -s \
4 | -H "Accept: application/n-triples" \
5 | "${BASE_URL}" \
6 | | grep -q "${STATUS_NOT_FOUND}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ct/GET-etag-header.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | curl -f -s -I \
4 | -H "Accept: application/n-triples" \
5 | "${BASE_URL}default-subject" \
6 | | tr -d '\r\n' \
7 | | grep 'ETag: "' > /dev/null
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ct/GET-link-headers.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | curl -f -s -I \
4 | -H "Accept: application/n-triples" \
5 | "${BASE_URL}default-subject" \
6 | | tr -d '\r\n' \
7 | | grep 'Link: ; rel=https://www.w3.org/ns/ldt#template' \
8 | | grep 'Link: ; rel=https://www.w3.org/ns/ldt#ontology' \
9 | | grep "Link: <${BASE_URL}>; rel=https://www.w3.org/ns/ldt#base" > /dev/null
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ct/GET-ntriples.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # use conneg to request N-Triples as the preferred format
4 |
5 | curl -f -s \
6 | -H "Accept: application/n-triples; q=1.0, application/rdf+xml; q=0.9" \
7 | "${BASE_URL}default-subject" \
8 | | rapper -q --input ntriples --output ntriples /dev/stdin - \
9 | | tr -s '\n' '\t' \
10 | | grep '"default object"' \
11 | | grep "${BASE_URL}default-object" > /dev/null
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ct/GET-with-param.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # check that unrecognized parameters are allowed
4 |
5 | curl -w "%{http_code}\n" -f -s \
6 | -H "Accept: application/n-triples" \
7 | "${BASE_URL}default-subject?param=value" \
8 | | grep -q "${STATUS_OK}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ct/GET.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | curl -w "%{http_code}\n" -f -s \
4 | "${BASE_URL}default-subject" \
5 | | grep -q "${STATUS_OK}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ct/POST-400.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # intentionally corrupt N-Triples syntax should give Bad Request
4 |
5 | (
6 | curl -w "%{http_code}\n" -f -s \
7 | -H "Accept: application/n-triples" \
8 | -H "Content-Type: application/n-triples" \
9 | --data-binary @- \
10 | "${BASE_URL_WRITABLE}" < http://example.com/default-predicate "default object POST" .
12 | EOF
13 | ) \
14 | | grep -q "${STATUS_BAD_REQUEST}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ct/POST-415.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | (
4 | curl -w "%{http_code}\n" -f -s \
5 | -H "Accept: application/n-triples" \
6 | -H "Content-Type: application/not-accepted" --data-binary @- \
7 | "${BASE_URL_WRITABLE}" < "default object" .
9 | EOF
10 | ) | grep -q "$STATUS_UNSUPPORTED_MEDIA"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ct/POST-ntriples.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # append new resource description
8 |
9 | (
10 | curl -w "%{http_code}\n" -f -s \
11 | -H "Accept: application/n-triples" \
12 | -H "Content-Type: application/n-triples" \
13 | --data-binary @- \
14 | "${BASE_URL_WRITABLE}" < "default object POST" .
16 | <${BASE_URL_WRITABLE}default-subject-post> "another object POST" .
17 | EOF
18 | ) \
19 | | grep -q "${STATUS_OK}"
20 |
21 | # check that resource is accessible
22 |
23 | curl -f -s \
24 | -H "Accept: application/n-triples" \
25 | "${BASE_URL_WRITABLE}default-subject-post" \
26 | | tr -d '\n' \
27 | | grep '"default object POST"' \
28 | | grep '"another object POST"' > /dev/null
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ct/POST-rdfxml.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # append new resource description
8 |
9 | (
10 | curl -w "%{http_code}\n" -f -s \
11 | -H "Accept: application/n-triples" \
12 | -H "Content-Type: application/rdf+xml" \
13 | --data-binary @- \
14 | "${BASE_URL_WRITABLE}" <
16 |
17 |
18 | default object POST
19 | another object POST
20 |
21 |
22 | EOF
23 | ) \
24 | | grep -q "${STATUS_OK}"
25 |
26 | # check that resource is accessible
27 |
28 | curl -f -s \
29 | -H "Accept: application/n-triples" \
30 | "${BASE_URL_WRITABLE}default-subject-post" \
31 | | tr -d '\n' \
32 | | grep '"default object POST"' \
33 | | grep '"another object POST"' > /dev/null
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ct/POST-to-non-existing.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # append new resource description
8 |
9 | (
10 | curl -w "%{http_code}\n" -f -s \
11 | -H "Accept: application/n-triples" \
12 | -H "Content-Type: application/n-triples" \
13 | --data-binary @- \
14 | "${BASE_URL_WRITABLE}non-existing" < "default object POST" .
16 | <${BASE_URL_WRITABLE}default-subject-post> "another object POST" .
17 | EOF
18 | ) \
19 | | grep -q "${STATUS_OK}"
20 |
21 | # check that resource is accessible
22 |
23 | curl -f -s \
24 | -H "Accept: application/n-triples" \
25 | "${BASE_URL_WRITABLE}default-subject-post" \
26 | | tr -d '\n' \
27 | | grep '"default object POST"' \
28 | | grep '"another object POST"' > /dev/null
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ct/PUT-415.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | (
4 | curl -w "%{http_code}\n" -f -s \
5 | -X PUT \
6 | -H "Accept: application/n-triples" \
7 | -H "Content-Type: application/not-accepted" --data-binary @- \
8 | "${BASE_URL_WRITABLE}" < "default object" .
10 | EOF
11 | ) | grep -q "$STATUS_UNSUPPORTED_MEDIA"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ct/PUT-base.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | (
4 | curl -w "%{http_code}\n" -f -s \
5 | -X PUT \
6 | -H "Accept: application/n-triples" \
7 | -H "Content-Type: application/n-triples" \
8 | --data-binary @- \
9 | "${BASE_URL_WRITABLE}" < "new object PUT" .
11 | EOF
12 | ) \
13 | | grep -q "${STATUS_CREATED}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ct/PUT-ntriples.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # set new resource description
8 |
9 | (
10 | curl -w "%{http_code}\n" -f -s \
11 | -X PUT \
12 | -H "Accept: application/n-triples" \
13 | -H "Content-Type: application/n-triples" \
14 | --data-binary @- \
15 | "${BASE_URL_WRITABLE}default-subject" < "default object PUT" .
17 | <${BASE_URL_WRITABLE}default-subject-put> "another object PUT" .
18 | EOF
19 | ) \
20 | | grep -q "${STATUS_OK}"
21 |
22 | # check that resource is accessible
23 |
24 | curl -f -s \
25 | -H "Accept: application/n-triples" \
26 | "${BASE_URL_WRITABLE}default-subject" \
27 | | grep '"default object PUT"' > /dev/null
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ct/PUT-rdfxml.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # set new resource description
8 |
9 | (
10 | curl -w "%{http_code}\n" -f -s \
11 | -X PUT \
12 | -H "Accept: application/n-triples" \
13 | -H "Content-Type: application/rdf+xml" \
14 | --data-binary @- \
15 | "${BASE_URL_WRITABLE}default-subject" <
17 |
18 |
19 | default object PUT
20 |
21 |
22 | another object PUT
23 |
24 |
25 | EOF
26 | ) \
27 | | grep -q "${STATUS_OK}"
28 |
29 | # check that resource is accessible
30 |
31 | curl -f -s \
32 | -H "Accept: application/n-triples" \
33 | "${BASE_URL_WRITABLE}default-subject" \
34 | | grep '"default object PUT"' > /dev/null
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ct/PUT-to-non-existing.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # set new resource description
8 |
9 | (
10 | curl -w "%{http_code}\n" -f -s \
11 | -X PUT \
12 | -H "Accept: application/n-triples" \
13 | -H "Content-Type: application/n-triples" \
14 | --data-binary @- \
15 | "${BASE_URL_WRITABLE}non-existing" < "new object PUT" .
17 | <${BASE_URL_WRITABLE}non-existing-put> "another new object PUT" .
18 | EOF
19 | ) \
20 | | grep -q "${STATUS_CREATED}"
21 |
22 | # check that resource is accessible
23 |
24 | curl -f -s \
25 | -H "Accept: application/n-triples" \
26 | "${BASE_URL_WRITABLE}non-existing" \
27 | | grep '"new object PUT"' > /dev/null
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/DELETE-invalid-update-500.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | curl -w "%{http_code}\n" -f -s \
4 | -X DELETE \
5 | "${BASE_URL_WRITABLE}invalid-update" \
6 | | grep -q "${STATUS_INTERNAL_SERVER_ERROR}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/DELETE-missing-update-501.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # check that missing ldt:update gives 501
8 |
9 | curl -w "%{http_code}\n" -f -s \
10 | -X DELETE \
11 | "${BASE_URL_WRITABLE}missing-update" \
12 | | grep -q "${STATUS_NOT_IMPLEMENTED}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/DELETE-non-match-404.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | curl -w "%{http_code}\n" -f -s \
4 | -X DELETE \
5 | "${BASE_URL_WRITABLE}non-match" \
6 | | grep -q "${STATUS_NOT_FOUND}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/DELETE-optional-param-set-404.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # check that parameter value results in query pattern non-match
8 |
9 | curl -w "%{http_code}\n" -f -s \
10 | -X DELETE \
11 | "${BASE_URL_WRITABLE}optional-param?object=non-matching-literal" \
12 | | grep -q "${STATUS_NOT_FOUND}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/DELETE-optional-param-set.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # check that parameter value results in query pattern match
8 |
9 | curl -w "%{http_code}\n" -f -s \
10 | -X DELETE \
11 | "${BASE_URL_WRITABLE}optional-param?object=optional%20object" \
12 | | grep -q "${STATUS_NO_CONTENT}"
13 |
14 | # check that resource is gone
15 |
16 | curl -w "%{http_code}\n" -f -s \
17 | "${BASE_URL_WRITABLE}optional-param?object=optional%20object" \
18 | | grep -q "${STATUS_NOT_FOUND}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/GET-304.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # request N-Quads twice - supply ETag second time and expect 304 Not Modified
4 |
5 | etag=$(
6 | curl -f -s -I -G \
7 | -H "Accept: application/n-triples" \
8 | "${BASE_URL}default-subject" \
9 | | grep 'ETag' \
10 | | tr -d '\r' \
11 | | sed -En 's/^ETag: (.*)$/\1/p')
12 |
13 | curl -w "%{http_code}\n" -f -s -G \
14 | -H "Accept: application/n-triples" \
15 | "${BASE_URL}default-subject" \
16 | -H "If-None-Match: $etag" \
17 | | grep -q "${STATUS_NOT_MODIFIED}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/GET-base-404.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | curl -w "%{http_code}\n" -f -s \
4 | -H "Accept: application/n-triples" \
5 | "${BASE_URL}" \
6 | | grep -q "${STATUS_NOT_FOUND}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/GET-inherited-param-not-set.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # check that parameter value results in query pattern match
4 |
5 | curl -f -s \
6 | -H "Accept: application/n-triples" \
7 | "${BASE_URL}inherited-param" \
8 | | rapper -q --input ntriples --output ntriples /dev/stdin - \
9 | | tr -s '\n' '\t' \
10 | | grep '"inherited object"' \
11 | | grep "${BASE_URL}inherited-object" > /dev/null
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/GET-inherited-param-set-404.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # check that parameter value results in query pattern non-match
4 |
5 | curl -w "%{http_code}\n" -f -s \
6 | -H "Accept: application/n-triples" \
7 | "${BASE_URL}inherited-param?object=non-matching-literal" \
8 | | grep -q "${STATUS_NOT_FOUND}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/GET-inherited-param-set.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # check that parameter value results in query pattern match
4 |
5 | curl -f -s \
6 | -H "Accept: application/n-triples" \
7 | "${BASE_URL}inherited-param?object=inherited%20object" \
8 | | rapper -q --input ntriples --output ntriples /dev/stdin - \
9 | | tr -s '\n' '\t' \
10 | | grep '"inherited object"' \
11 | | grep -v "${BASE_URL}inherited-object" > /dev/null
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/GET-invalid-query-500.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | curl -w "%{http_code}\n" -f -s \
4 | -H "Accept: application/n-triples" \
5 | "${BASE_URL}invalid-query" \
6 | | grep -q "${STATUS_INTERNAL_SERVER_ERROR}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/GET-mandatory-param-not-set-400.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # check that parameter is required
4 | # test disabled: currently ParameterException in TemplateCall is not handled by ParameterExceptionMapper which gives 500 instead of 400
5 |
6 | #curl -w "%{http_code}\n" -f -v \
7 | # -H "Accept: application/n-triples" \
8 | # "${BASE_URL}mandatory-param" \
9 | #| grep -q "${STATUS_BAD_REQUEST}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/GET-mandatory-param-set-404.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # check that parameter value results in query pattern non-match
4 |
5 | curl -w "%{http_code}\n" -f -s \
6 | -H "Accept: application/n-triples" \
7 | "${BASE_URL}mandatory-param?object=non-matching-literal" \
8 | | grep -q "${STATUS_NOT_FOUND}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/GET-mandatory-param-set.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # check that parameter value results in query pattern match
4 |
5 | curl -f -s \
6 | -H "Accept: application/n-triples" \
7 | "${BASE_URL}mandatory-param?object=mandatory%20object" \
8 | | rapper -q --input ntriples --output ntriples /dev/stdin - \
9 | | tr -s '\n' '\t' \
10 | | grep '"mandatory object"' \
11 | | grep -v "${BASE_URL}mandatory-object" > /dev/null
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/GET-match-extended.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | curl -f -s -I \
4 | "${BASE_URL}default-subject" \
5 | | tr -d '\r\n' \
6 | | grep 'Link: ; rel=https://www.w3.org/ns/ldt#template' > /dev/null
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/GET-match-super.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | curl -f -s -I \
4 | "${BASE_URL}super" \
5 | | tr -d '\r\n' \
6 | | grep 'Link: ; rel=https://www.w3.org/ns/ldt#template' > /dev/null
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/GET-missing-query-500.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | curl -w "%{http_code}\n" -f -s \
4 | -H "Accept: application/n-triples" \
5 | "${BASE_URL}missing-query" \
6 | | grep -q "${STATUS_INTERNAL_SERVER_ERROR}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/GET-non-match-404-error.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # check that error responses include RDF description of the error
4 |
5 | curl -s \
6 | -H "Accept: application/n-triples" \
7 | "${BASE_URL}non-match" \
8 | | rapper -q --input ntriples --output ntriples /dev/stdin - \
9 | | tr -s '\n' '\t' \
10 | | grep " ." > /dev/null
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/GET-non-match-404.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | curl -w "%{http_code}\n" -f -s \
4 | -H "Accept: application/n-triples" \
5 | "${BASE_URL}non-match" \
6 | | grep -q "${STATUS_NOT_FOUND}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/GET-optional-default-param.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # check that parameter value results in query pattern match
4 |
5 | curl -f -s \
6 | -H "Accept: application/n-triples" \
7 | "${BASE_URL}optional-default-param" \
8 | | rapper -q --input ntriples --output ntriples /dev/stdin - \
9 | | tr -s '\n' '\t' \
10 | | grep '"optional default object"' \
11 | | grep -v "${BASE_URL}optional-default-object" > /dev/null
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/GET-optional-param-not-set.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # check that parameter value results in query pattern match
4 |
5 | curl -f -s \
6 | -H "Accept: application/n-triples" \
7 | "${BASE_URL}optional-param" \
8 | | rapper -q --input ntriples --output ntriples /dev/stdin - \
9 | | tr -s '\n' '\t' \
10 | | grep '"optional object"' \
11 | | grep "${BASE_URL}optional-object" > /dev/null
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/GET-optional-param-set-404.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # check that parameter value results in query pattern non-match
4 |
5 | curl -w "%{http_code}\n" -f -s \
6 | -H "Accept: application/n-triples" \
7 | "${BASE_URL}optional-param?object=non-matching-literal" \
8 | | grep -q "${STATUS_NOT_FOUND}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/GET-optional-param-set.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # check that parameter value results in query pattern match
4 |
5 | curl -f -s \
6 | -H "Accept: application/n-triples" \
7 | "${BASE_URL}optional-param?object=optional%20object" \
8 | | rapper -q --input ntriples --output ntriples /dev/stdin - \
9 | | tr -s '\n' '\t' \
10 | | grep '"optional object"' \
11 | | grep -v "${BASE_URL}optional-object" > /dev/null
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/GET-this-unbound.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # check that parameter value results in query pattern match
4 |
5 | curl -w "%{http_code}\n" -f -s \
6 | -H "Accept: application/n-triples" \
7 | "${BASE_URL}this-unbound" \
8 | | grep -q "${STATUS_OK}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/GET-value-type-param-set.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # check that parameter value is parsed according its value type and results in a match
4 |
5 | curl -f -s \
6 | -H "Accept: application/n-triples" \
7 | "${BASE_URL}value-type-param?object=42" \
8 | | rapper -q --input ntriples --output ntriples /dev/stdin - \
9 | | tr -s '\n' '\t' \
10 | | grep '"42"^^' \
11 | | grep -v "${BASE_URL}value-type-object" > /dev/null
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/GET.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # check that parameter value results in query pattern match
4 |
5 | curl -w "%{http_code}\n" -f -s \
6 | -H "Accept: application/n-triples" \
7 | "${BASE_URL}default-subject" \
8 | | grep -q "${STATUS_OK}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/POST-non-match.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | curl -w "%{http_code}\n" -f -s \
4 | -X POST \
5 | -H "Accept: application/n-triples" \
6 | -H "Content-Type: application/n-triples" \
7 | "${BASE_URL_WRITABLE}non-match" \
8 | | grep -q "${STATUS_OK}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/POST-shacl-constraint-422.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # expect 422 Unprocessable Entity because missing fails SHACLConstrainedType constraint validation
8 |
9 | (
10 | curl -w "%{http_code}\n" -f -s \
11 | -H "Accept: application/n-triples" \
12 | -H "Content-Type: application/n-triples" \
13 | --data-binary @- \
14 | "${BASE_URL_WRITABLE}default-subject" < .
16 | EOF
17 | ) \
18 | | grep -q "${STATUS_UNPROCESSABLE_ENTITY}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/POST-shacl-constraint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # succeeds SHACLConstrainedType constraint validation
8 |
9 | (
10 | curl -w "%{http_code}\n" -f -s \
11 | -H "Accept: application/n-triples" \
12 | -H "Content-Type: application/n-triples" \
13 | --data-binary @- \
14 | "${BASE_URL_WRITABLE}default-subject" < .
16 | <${BASE_URL_WRITABLE}default-subject-post> "constrained object" .
17 | EOF
18 | ) \
19 | | grep -q "${STATUS_OK}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/POST-spin-constraint-422.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # expect 422 Unprocessable Entity because missing fails SPINConstrainedType constraint validation
8 |
9 | (
10 | curl -w "%{http_code}\n" -f -s \
11 | -H "Accept: application/n-triples" \
12 | -H "Content-Type: application/n-triples" \
13 | --data-binary @- \
14 | "${BASE_URL_WRITABLE}default-subject" < .
16 | EOF
17 | ) \
18 | | grep -q "${STATUS_UNPROCESSABLE_ENTITY}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/POST-spin-constraint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # succeeds SPINConstrainedType constraint validation
8 |
9 | (
10 | curl -w "%{http_code}\n" -f -s \
11 | -H "Accept: application/n-triples" \
12 | -H "Content-Type: application/n-triples" \
13 | --data-binary @- \
14 | "${BASE_URL_WRITABLE}default-subject" < .
16 | <${BASE_URL_WRITABLE}default-subject-post> "constrained object" .
17 | EOF
18 | ) \
19 | | grep -q "${STATUS_OK}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/PUT-base.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | (
4 | curl -w "%{http_code}\n" -f -s \
5 | -X PUT \
6 | -H "Accept: application/n-triples" \
7 | -H "Content-Type: application/n-triples" \
8 | --data-binary @- \
9 | "${BASE_URL_WRITABLE}" < "new object PUT" .
11 | EOF
12 | ) \
13 | | grep -q "${STATUS_CREATED}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/PUT-non-match.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | curl -w "%{http_code}\n" -f -s \
4 | -X PUT \
5 | -H "Accept: application/n-triples" \
6 | -H "Content-Type: application/n-triples" \
7 | "${BASE_URL_WRITABLE}non-match" \
8 | | grep -q "${STATUS_CREATED}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/PUT-shacl-constraint-422.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # expect 422 Unprocessable Entity because missing fails SHACLConstrainedType constraint validation
8 |
9 | (
10 | curl -w "%{http_code}\n" -f -s \
11 | -X PUT \
12 | -H "Accept: application/n-triples" \
13 | -H "Content-Type: application/n-triples" \
14 | --data-binary @- \
15 | "${BASE_URL_WRITABLE}default-subject" < .
17 | EOF
18 | ) \
19 | | grep -q "${STATUS_UNPROCESSABLE_ENTITY}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/PUT-shacl-constraint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # succeeds SHACLConstrainedType constraint validation
8 |
9 | (
10 | curl -w "%{http_code}\n" -f -s \
11 | -X PUT \
12 | -H "Accept: application/n-triples" \
13 | -H "Content-Type: application/n-triples" \
14 | --data-binary @- \
15 | "${BASE_URL_WRITABLE}default-subject" < .
17 | <${BASE_URL_WRITABLE}default-subject-post> "constrained object" .
18 | EOF
19 | ) \
20 | | grep -q "${STATUS_OK}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/PUT-spin-constraint-422.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # expect 422 Unprocessable Entity because missing fails SPINConstrainedType constraint validation
8 |
9 | (
10 | curl -w "%{http_code}\n" -f -s \
11 | -X PUT \
12 | -H "Accept: application/n-triples" \
13 | -H "Content-Type: application/n-triples" \
14 | --data-binary @- \
15 | "${BASE_URL_WRITABLE}default-subject" < .
17 | EOF
18 | ) \
19 | | grep -q "${STATUS_UNPROCESSABLE_ENTITY}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/PUT-spin-constraint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # succeeds SPINConstrainedType constraint validation
8 |
9 | (
10 | curl -w "%{http_code}\n" -f -s \
11 | -X PUT \
12 | -H "Accept: application/n-triples" \
13 | -H "Content-Type: application/n-triples" \
14 | --data-binary @- \
15 | "${BASE_URL_WRITABLE}default-subject" < .
17 | <${BASE_URL_WRITABLE}default-subject-post> "constrained object" .
18 | EOF
19 | ) \
20 | | grep -q "${STATUS_OK}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/custom/location-mapping.n3:
--------------------------------------------------------------------------------
1 | @prefix lm: .
2 |
3 | [] lm:mapping
4 |
5 | [ lm:name "https://github.com/AtomGraph/Processor/blob/develop/http-tests/custom#" ; lm:altName "com/atomgraph/processor/http-tests/custom/ontology.ttl" ]
6 | .
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ngt/DELETE-404.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | curl -w "%{http_code}\n" -f -s \
8 | -X DELETE \
9 | "${BASE_URL_WRITABLE}non-existing" \
10 | | grep -q "${STATUS_NOT_FOUND}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ngt/DELETE-with-param.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # check that unrecognized parameters are allowed
8 |
9 | curl -w "%{http_code}\n" -f -s \
10 | -X DELETE \
11 | "${BASE_URL_WRITABLE}named-subject?param=value" \
12 | | grep -q "${STATUS_NO_CONTENT}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ngt/DELETE.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # delete resource
8 |
9 | curl -w "%{http_code}\n" -f -s \
10 | -X DELETE \
11 | "${BASE_URL_WRITABLE}named-subject" \
12 | | grep -q "${STATUS_NO_CONTENT}"
13 |
14 | # check that deleted resource is really gone
15 |
16 | curl -w "%{http_code}\n" -f -s \
17 | "${BASE_URL_WRITABLE}named-subject" \
18 | | grep -q "${STATUS_NOT_FOUND}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ngt/GET-304.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # request N-Triples twice - supply ETag second time and expect 304 Not Modified
4 |
5 | etag=$(
6 | curl -f -s -I -G \
7 | -H "Accept: application/n-triples" \
8 | "${BASE_URL}named-subject" \
9 | | grep 'ETag' \
10 | | tr -d '\r' \
11 | | sed -En 's/^ETag: (.*)$/\1/p')
12 |
13 | curl -w "%{http_code}\n" -f -s -G \
14 | -H "Accept: application/n-triples" \
15 | "${BASE_URL}named-subject" \
16 | -H "If-None-Match: $etag" \
17 | | grep -q "${STATUS_NOT_MODIFIED}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ngt/GET-404.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | curl -w "%{http_code}\n" -f -s \
4 | "${BASE_URL}non-existing" \
5 | | grep -q "${STATUS_NOT_FOUND}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ngt/GET-406.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | curl -w "%{http_code}\n" -f -s \
4 | -H "Accept: application/not-accepted" \
5 | "${BASE_URL}named-subject" \
6 | | grep -q "${STATUS_NOT_ACCEPTABLE}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ngt/GET-base-404.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | curl -w "%{http_code}\n" -f -s \
4 | -H "Accept: application/n-triples" \
5 | "${BASE_URL}" \
6 | | grep -q "${STATUS_NOT_FOUND}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ngt/GET-etag-header.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | curl -f -s -I \
4 | -H "Accept: application/n-triples" \
5 | "${BASE_URL}named-subject" \
6 | | tr -d '\r\n' \
7 | | grep 'ETag: "' > /dev/null
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ngt/GET-link-headers.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | curl -f -s -I \
4 | -H "Accept: application/n-triples" \
5 | "${BASE_URL}named-subject" \
6 | | tr -d '\r\n' \
7 | | grep 'Link: ; rel=https://www.w3.org/ns/ldt#template' \
8 | | grep 'Link: ; rel=https://www.w3.org/ns/ldt#ontology' \
9 | | grep "Link: <${BASE_URL}>; rel=https://www.w3.org/ns/ldt#base" > /dev/null
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ngt/GET-ntriples.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # use conneg to request N-Triples as the preferred format
4 |
5 | curl -f -s \
6 | -H "Accept: application/n-triples; q=1.0, application/rdf+xml; q=0.9" \
7 | "${BASE_URL}named-subject" \
8 | | rapper -q --input ntriples --output ntriples /dev/stdin - \
9 | | tr -s '\n' '\t' \
10 | | grep '"named object"' \
11 | | grep "${BASE_URL}named-object" > /dev/null
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ngt/GET-with-param.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # check that unrecognized parameters are allowed
4 |
5 | curl -w "%{http_code}\n" -f -s \
6 | -H "Accept: application/n-triples" \
7 | "${BASE_URL}named-subject?param=value" \
8 | | grep -q "${STATUS_OK}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ngt/GET.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | curl -w "%{http_code}\n" -f -s \
4 | "${BASE_URL}named-subject" \
5 | | grep -q "${STATUS_OK}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ngt/POST-400.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # intentionally corrupt N-Triples syntax should give Bad Request
4 |
5 | (
6 | curl -w "%{http_code}\n" -f -s \
7 | -H "Accept: application/n-triples" \
8 | -H "Content-Type: application/n-triples" \
9 | --data-binary @- \
10 | "${BASE_URL_WRITABLE}" < http://example.com/named-predicate "named object POST" .
12 | EOF
13 | ) \
14 | | grep -q "${STATUS_BAD_REQUEST}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ngt/POST-415.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | (
4 | curl -w "%{http_code}\n" -f -s \
5 | -H "Accept: application/n-triples" \
6 | -H "Content-Type: application/not-accepted" --data-binary @- \
7 | "${BASE_URL_WRITABLE}" < "named object" .
9 | EOF
10 | ) | grep -q "$STATUS_UNSUPPORTED_MEDIA"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ngt/POST-ntriples.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # append new resource description
8 |
9 | (
10 | curl -w "%{http_code}\n" -f -s \
11 | -H "Accept: application/n-triples" \
12 | -H "Content-Type: application/n-triples" \
13 | --data-binary @- \
14 | "${BASE_URL_WRITABLE}" < "named object POST" .
16 | <${BASE_URL_WRITABLE}named-subject-post> "another object POST" .
17 | EOF
18 | ) \
19 | | grep -q "${STATUS_OK}"
20 |
21 | # check that resource is accessible
22 |
23 | curl -f -s \
24 | -H "Accept: application/n-triples" \
25 | "${BASE_URL_WRITABLE}named-subject-post" \
26 | | tr -d '\n' \
27 | | grep '"named object POST"' \
28 | | grep '"another object POST"' > /dev/null
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ngt/POST-rdfxml.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # append new resource description
8 |
9 | (
10 | curl -w "%{http_code}\n" -f -s \
11 | -H "Accept: application/n-triples" \
12 | -H "Content-Type: application/rdf+xml" \
13 | --data-binary @- \
14 | "${BASE_URL_WRITABLE}" <
16 |
17 |
18 | named object POST
19 | another object POST
20 |
21 |
22 | EOF
23 | ) \
24 | | grep -q "${STATUS_OK}"
25 |
26 | # check that resource is accessible
27 |
28 | curl -f -s \
29 | -H "Accept: application/n-triples" \
30 | "${BASE_URL_WRITABLE}named-subject-post" \
31 | | tr -d '\n' \
32 | | grep '"named object POST"' \
33 | | grep '"another object POST"' > /dev/null
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ngt/POST-to-non-existing.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # append new resource description
8 |
9 | (
10 | curl -w "%{http_code}\n" -f -s \
11 | -H "Accept: application/n-triples" \
12 | -H "Content-Type: application/n-triples" \
13 | --data-binary @- \
14 | "${BASE_URL_WRITABLE}non-existing" < "named object POST" .
16 | <${BASE_URL_WRITABLE}named-subject-post> "another object POST" .
17 | EOF
18 | ) \
19 | | grep -q "${STATUS_OK}"
20 |
21 | # check that resource is accessible
22 |
23 | curl -f -s \
24 | -H "Accept: application/n-triples" \
25 | "${BASE_URL_WRITABLE}named-subject-post" \
26 | | tr -d '\n' \
27 | | grep '"named object POST"' \
28 | | grep '"another object POST"' > /dev/null
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ngt/PUT-415.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | (
4 | curl -w "%{http_code}\n" -f -s \
5 | -X PUT \
6 | -H "Accept: application/n-triples" \
7 | -H "Content-Type: application/not-accepted" --data-binary @- \
8 | "${BASE_URL_WRITABLE}" < "named object" .
10 | EOF
11 | ) | grep -q "$STATUS_UNSUPPORTED_MEDIA"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ngt/PUT-base.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | (
4 | curl -w "%{http_code}\n" -f -s \
5 | -X PUT \
6 | -H "Accept: application/n-triples" \
7 | -H "Content-Type: application/n-triples" \
8 | --data-binary @- \
9 | "${BASE_URL_WRITABLE}" < "new object PUT" .
11 | EOF
12 | ) \
13 | | grep -q "${STATUS_CREATED}"
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ngt/PUT-ntriples.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # append new resource description
8 |
9 | (
10 | curl -w "%{http_code}\n" -f -s \
11 | -X PUT \
12 | -H "Accept: application/n-triples" \
13 | -H "Content-Type: application/n-triples" \
14 | --data-binary @- \
15 | "${BASE_URL_WRITABLE}named-subject" < "named object PUT" .
17 | <${BASE_URL_WRITABLE}named-subject-put> "another object PUT" .
18 | EOF
19 | ) \
20 | | grep -q "${STATUS_OK}"
21 |
22 | # check that resource is accessible
23 |
24 | curl -f -s \
25 | -H "Accept: application/n-triples" \
26 | "${BASE_URL_WRITABLE}named-subject" \
27 | | grep '"named object PUT"' > /dev/null
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ngt/PUT-rdfxml.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # append new resource description
8 |
9 | (
10 | curl -w "%{http_code}\n" -f -s \
11 | -X PUT \
12 | -H "Accept: application/n-triples" \
13 | -H "Content-Type: application/rdf+xml" \
14 | --data-binary @- \
15 | "${BASE_URL_WRITABLE}named-subject" <
17 |
18 |
19 | named object PUT
20 |
21 |
22 | another object PUT
23 |
24 |
25 | EOF
26 | ) \
27 | | grep -q "${STATUS_OK}"
28 |
29 | # check that resource is accessible
30 |
31 | curl -f -s \
32 | -H "Accept: application/n-triples" \
33 | "${BASE_URL_WRITABLE}named-subject" \
34 | | grep '"named object PUT"' > /dev/null
--------------------------------------------------------------------------------
/http-tests/linked-data-templates/ngt/PUT-to-non-existing.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # re-initialize writable dataset
4 |
5 | initialize_dataset "$BASE_URL_WRITABLE" "../../dataset.trig" "$ENDPOINT_URL_WRITABLE"
6 |
7 | # set new resource description
8 |
9 | (
10 | curl -w "%{http_code}\n" -f -s \
11 | -X PUT \
12 | -H "Accept: application/n-triples" \
13 | -H "Content-Type: application/n-triples" \
14 | --data-binary @- \
15 | "${BASE_URL_WRITABLE}non-existing" < "new object PUT" .
17 | <${BASE_URL_WRITABLE}non-existing-put> "another new object PUT" .
18 | EOF
19 | ) \
20 | | grep -q "${STATUS_CREATED}"
21 |
22 | # check that resource is accessible
23 |
24 | curl -f -s \
25 | -H "Accept: application/n-triples" \
26 | "${BASE_URL_WRITABLE}non-existing" \
27 | | grep '"new object PUT"' > /dev/null
--------------------------------------------------------------------------------
/http-tests/run.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | export STATUS_OK=200
4 | export STATUS_DELETE_SUCCESS='200|204'
5 | export STATUS_PATCH_SUCCESS='200|201|204'
6 | # export POST_SUCCESS='200|201|204'
7 | export STATUS_POST_SUCCESS='200|201|204'
8 | # export PUT_SUCCESS='201|204'
9 | # export STATUS_PUT_SUCCESS='200|201|204'
10 | export STATUS_CREATED=201
11 | export STATUS_NO_CONTENT=204
12 | export STATUS_UPDATED='201|204'
13 | # export DELETE_SUCCESS=204
14 | export STATUS_NOT_MODIFIED=304
15 | export STATUS_BAD_REQUEST=400
16 | export STATUS_UNAUTHORIZED=401
17 | export STATUS_NOT_FOUND=404
18 | export STATUS_NOT_ACCEPTABLE=406
19 | export STATUS_UNSUPPORTED_MEDIA=415
20 | export STATUS_INTERNAL_SERVER_ERROR=500
21 | export STATUS_NOT_IMPLEMENTED=501
22 |
23 | function run_tests()
24 | {
25 | local error_count=0
26 | for script_pathname in "$@"
27 | do
28 | echo -n "$script_pathname";
29 | script_filename=$(basename "$script_pathname")
30 | script_directory=$(dirname "$script_pathname")
31 | ( cd "$script_directory" || exit;
32 | bash -e "$script_filename";
33 | )
34 | if [[ $? == "0" ]]
35 | then
36 | echo " ok"
37 | else
38 | echo " failed";
39 | (( error_count += 1))
40 | fi
41 | done
42 | return $error_count
43 | }
44 |
45 | function initialize_dataset()
46 | {
47 | echo "@base <${1}> ." \
48 | | cat - "${2}" \
49 | | curl -f -s \
50 | -X PUT \
51 | --data-binary @- \
52 | -H "Content-Type: application/trig" \
53 | "${3}data" > /dev/null
54 | }
55 |
56 | export -f initialize_dataset
57 |
58 | export ENDPOINT_URL="http://localhost:3030/ds/"
59 | export ENDPOINT_URL_WRITABLE="http://localhost:3031/ds/"
60 |
61 | error_count=0
62 |
63 | ### Core Templates ontology tests ###
64 |
65 | export BASE_URL="http://localhost:8080/"
66 | export BASE_URL_WRITABLE="http://localhost:8081/"
67 |
68 | initialize_dataset "$BASE_URL" "dataset.trig" "$ENDPOINT_URL"
69 | initialize_dataset "$BASE_URL_WRITABLE" "dataset.trig" "$ENDPOINT_URL_WRITABLE"
70 |
71 | printf "\n ### Core Templates ontology tests ###\n\n"
72 |
73 | run_tests $(find ./linked-data-templates/ct/ -type f -name '*.sh*')
74 | (( error_count += $? ))
75 |
76 | ### Named Graph Templates ontology tests ###
77 |
78 | export BASE_URL="http://localhost:8082/"
79 | export BASE_URL_WRITABLE="http://localhost:8083/"
80 |
81 | initialize_dataset "$BASE_URL" "dataset.trig" "$ENDPOINT_URL"
82 | initialize_dataset "$BASE_URL_WRITABLE" "dataset.trig" "$ENDPOINT_URL_WRITABLE"
83 |
84 | printf "\n### Named Graph Templates ontology tests ###\n\n"
85 |
86 | run_tests $(find ./linked-data-templates/ngt/ -type f -name '*.sh*')
87 | (( error_count += $? ))
88 |
89 | ### Custom LDT ontology tests ###
90 |
91 | export BASE_URL="http://localhost:8085/"
92 | export BASE_URL_WRITABLE="http://localhost:8086/"
93 |
94 | initialize_dataset "$BASE_URL" "dataset.trig" "$ENDPOINT_URL"
95 | initialize_dataset "$BASE_URL_WRITABLE" "dataset.trig" "$ENDPOINT_URL_WRITABLE"
96 |
97 | printf "\n### Custom LDT ontology tests ###\n\n"
98 |
99 | run_tests $(find ./linked-data-templates/custom/ -type f -name '*.sh*')
100 | (( error_count += $? ))
101 |
102 | ### SPARQL Protocol query tests ###
103 |
104 | export BASE_URL="http://localhost:8080/"
105 | export BASE_URL_WRITABLE="http://localhost:8081/"
106 |
107 | initialize_dataset "$BASE_URL" "dataset.trig" "$ENDPOINT_URL"
108 | initialize_dataset "$BASE_URL_WRITABLE" "dataset.trig" "$ENDPOINT_URL_WRITABLE"
109 |
110 | printf "\n### SPARQL Protocol query tests ###\n\n"
111 |
112 | run_tests $(find ./sparql-protocol/query/ -type f -name '*.sh*')
113 | (( error_count += $? ))
114 |
115 | ### SPARQL Protocol update tests ###
116 |
117 | export BASE_URL="http://localhost:8080/"
118 | export BASE_URL_WRITABLE="http://localhost:8081/"
119 |
120 | initialize_dataset "$BASE_URL" "dataset.trig" "$ENDPOINT_URL"
121 | initialize_dataset "$BASE_URL_WRITABLE" "dataset.trig" "$ENDPOINT_URL_WRITABLE"
122 |
123 | printf "\n### SPARQL Protocol update tests ###\n\n"
124 |
125 | run_tests $(find ./sparql-protocol/update/ -type f -name '*.sh*')
126 | (( error_count += $? ))
127 |
128 | ### Graph Store Protocol tests ###
129 |
130 | export BASE_URL="http://localhost:8080/"
131 | export BASE_URL_WRITABLE="http://localhost:8081/"
132 |
133 | initialize_dataset "$BASE_URL" "dataset.trig" "$ENDPOINT_URL"
134 | initialize_dataset "$BASE_URL_WRITABLE" "dataset.trig" "$ENDPOINT_URL_WRITABLE"
135 |
136 | printf "\n### Graph Store Protocol tests ###\n\n"
137 |
138 | run_tests $(find ./graph-store-protocol/ -maxdepth 1 -type f -name '*.sh*')
139 | (( error_count += $? ))
140 |
141 | # use custom ontology with GraphItem template for direct identification tests
142 |
143 | export BASE_URL="http://localhost:8085/"
144 | export BASE_URL_WRITABLE="http://localhost:8086/"
145 |
146 | initialize_dataset "$BASE_URL" "dataset.trig" "$ENDPOINT_URL"
147 | initialize_dataset "$BASE_URL_WRITABLE" "dataset.trig" "$ENDPOINT_URL_WRITABLE"
148 |
149 | printf "\n### Graph Store Protocol (direct identification) tests ###\n\n"
150 |
151 | run_tests $(find ./graph-store-protocol/direct/ -type f -name '*.sh*')
152 | (( error_count += $? ))
153 |
154 | ### Exit
155 |
156 | exit $error_count
--------------------------------------------------------------------------------
/http-tests/sparql-protocol/query/GET-304.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # query twice - supply ETag second time and expect 304 Not Modified
4 |
5 | etag=$(
6 | curl -f -s -I -G \
7 | -H "Accept: application/n-triples" \
8 | "${BASE_URL}sparql" \
9 | --data-urlencode "query=DESCRIBE *" \
10 | | grep 'ETag' \
11 | | tr -d '\r' \
12 | | sed -En 's/^ETag: (.*)$/\1/p')
13 |
14 | curl -w "%{http_code}\n" -f -s -G \
15 | -H "Accept: application/n-triples" \
16 | "${BASE_URL}sparql" \
17 | --data-urlencode "query=DESCRIBE *" \
18 | -H "If-None-Match: $etag" \
19 | | grep -q "${STATUS_NOT_MODIFIED}"
--------------------------------------------------------------------------------
/http-tests/sparql-protocol/query/GET-csv-results.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # use conneg to request CSV results as the preferred format
4 |
5 | curl -f -s -G \
6 | -H "Accept: text/csv" \
7 | "${BASE_URL}sparql" \
8 | --data-urlencode "query=SELECT * { GRAPH <${BASE_URL}graphs/name/> { <${BASE_URL}named-subject> ?o } }" \
9 | | tr -s '\r\n' '|' \
10 | | grep "o|named object|${BASE_URL}named-object|" > /dev/null
--------------------------------------------------------------------------------
/http-tests/sparql-protocol/query/GET-default-graph-uri.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # SPARQL query with specified default graph URI
4 |
5 | curl -f -s -G \
6 | -H "Accept: application/n-triples" \
7 | "${BASE_URL}sparql" \
8 | --data-urlencode "query=CONSTRUCT WHERE { ?s ?p ?o }" \
9 | --data-urlencode "default-graph-uri=${BASE_URL}graphs/name/" \
10 | | rapper -q --input ntriples --output ntriples /dev/stdin - \
11 | | tr -s '\n' '\t' \
12 | | grep "${BASE_URL}named-subject" \
13 | | grep -v "${BASE_URL}default-subject" > /dev/null
--------------------------------------------------------------------------------
/http-tests/sparql-protocol/query/GET-json-results.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # use conneg to request JSON results as the preferred format
4 |
5 | curl -f -s -G \
6 | -H "Accept: application/sparql-results+json" \
7 | "${BASE_URL}sparql" \
8 | --data-urlencode "query=SELECT * { GRAPH <${BASE_URL}graphs/name/> { <${BASE_URL}named-subject> ?o } }" \
9 | | tr -s '\n' '\t' \
10 | | grep "{ \"type\": \"literal\" , \"value\": \"named object\" }" \
11 | | grep "{ \"type\": \"uri\" , \"value\": \"${BASE_URL}named-object\" }" > /dev/null
--------------------------------------------------------------------------------
/http-tests/sparql-protocol/query/GET-named-graph-uri.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # SPARQL query with specified named graph URI
4 |
5 | curl -f -s -G \
6 | -H "Accept: application/n-triples" \
7 | "${BASE_URL}sparql" \
8 | --data-urlencode "query=CONSTRUCT { ?s ?p ?o } WHERE { GRAPH <${BASE_URL}graphs/name/> { ?s ?p ?o } }" \
9 | --data-urlencode "named-graph-uri=${BASE_URL}graphs/name/" \
10 | | rapper -q --input ntriples --output ntriples /dev/stdin - \
11 | | tr -s '\n' '\t' \
12 | | grep "${BASE_URL}named-subject" \
13 | | grep -v "${BASE_URL}another-named-subject" > /dev/null
--------------------------------------------------------------------------------
/http-tests/sparql-protocol/query/GET-ntriples.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # use conneg to request N-Triples as the preferred format
4 |
5 | curl -f -s -G \
6 | -H "Accept: application/n-triples" \
7 | "${BASE_URL}sparql" \
8 | --data-urlencode "query=CONSTRUCT { <${BASE_URL}named-subject> ?o } { GRAPH <${BASE_URL}graphs/name/> { <${BASE_URL}named-subject> ?o } }" \
9 | | rapper -q --input ntriples --output ntriples /dev/stdin - \
10 | | tr -s '\n' '\t' \
11 | | grep '"named object"' \
12 | | grep "${BASE_URL}named-object" > /dev/null
--------------------------------------------------------------------------------
/http-tests/sparql-protocol/query/GET-query-invalid-400.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # query string invalid
4 |
5 | curl -w "%{http_code}\n" -f -s -G \
6 | -H "Accept: application/n-triples" \
7 | "${BASE_URL}sparql" \
8 | --data-urlencode "query=WHATEVER" \
9 | | grep -q "${STATUS_BAD_REQUEST}"
--------------------------------------------------------------------------------
/http-tests/sparql-protocol/query/GET-query-not-set-400.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # query parameter not set
4 |
5 | curl -w "%{http_code}\n" -f -s \
6 | -H "Accept: application/n-triples" \
7 | "${BASE_URL}sparql" \
8 | | grep -q "${STATUS_BAD_REQUEST}"
--------------------------------------------------------------------------------
/http-tests/sparql-protocol/query/GET-tsv-results.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # use conneg to request TSV results as the preferred format
4 |
5 | curl -f -s -G \
6 | -H "Accept: text/tab-separated-values" \
7 | "${BASE_URL}sparql" \
8 | --data-urlencode "query=SELECT * { GRAPH <${BASE_URL}graphs/name/> { <${BASE_URL}named-subject> ?o } }" \
9 | | tr -s '\r\n' '|' \
10 | | grep "?o|\"named object\"|<${BASE_URL}named-object>|" > /dev/null
--------------------------------------------------------------------------------
/http-tests/sparql-protocol/query/GET-xml-results.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # use conneg to request XML results as the preferred format
4 |
5 | curl -f -s -G \
6 | -H "Accept: application/sparql-results+xml" \
7 | "${BASE_URL}sparql" \
8 | --data-urlencode "query=SELECT * { GRAPH <${BASE_URL}graphs/name/> { <${BASE_URL}named-subject> ?o } }" \
9 | | tr -s '\n' '\t' \
10 | | grep "named object" \
11 | | grep "${BASE_URL}named-object" > /dev/null
--------------------------------------------------------------------------------
/http-tests/sparql-protocol/query/GET.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # SPARQL query
4 |
5 | curl -w "%{http_code}\n" -f -s -G \
6 | -H "Accept: application/n-triples" \
7 | "${BASE_URL}sparql" \
8 | --data-urlencode "query=DESCRIBE *" \
9 | | grep -q "${STATUS_OK}"
--------------------------------------------------------------------------------
/http-tests/sparql-protocol/query/POST-default-graph-uri.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # SPARQL query with specified default graph URI
4 |
5 | curl -f -s \
6 | -H "Accept: application/n-triples" \
7 | "${BASE_URL}sparql" \
8 | --data-urlencode "query=CONSTRUCT WHERE { ?s ?p ?o }" \
9 | --data-urlencode "default-graph-uri=${BASE_URL}graphs/name/" \
10 | | rapper -q --input ntriples --output ntriples /dev/stdin - \
11 | | tr -s '\n' '\t' \
12 | | grep "${BASE_URL}named-subject" \
13 | | grep -v "${BASE_URL}default-subject" > /dev/null
--------------------------------------------------------------------------------
/http-tests/sparql-protocol/query/POST-directly-default-graph-uri.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # SPARQL query with specified named graph URI, directly as POST body
4 |
5 | # separate URL-encoding step because we cannot combine -G with --data-binary
6 | encoded_url=$(curl -w "%{url_effective}\n" -G -s -o /dev/null \
7 | --data-urlencode "default-graph-uri=${BASE_URL}graphs/name/" \
8 | "${BASE_URL}sparql")
9 |
10 | (
11 | curl -f -s \
12 | -H "Content-Type: application/sparql-query" \
13 | -H "Accept: application/n-triples" \
14 | "${encoded_url}" \
15 | --data-binary @- < /dev/null
--------------------------------------------------------------------------------
/http-tests/sparql-protocol/query/POST-directly-named-graph-uri.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # SPARQL query with specified named graph URI, directly as POST body
4 |
5 | # separate URL-encoding step because we cannot combine -G with --data-binary
6 | encoded_url=$(curl -w "%{url_effective}\n" -G -s -o /dev/null \
7 | --data-urlencode "named-graph-uri=${BASE_URL}graphs/name/" \
8 | "${BASE_URL}sparql")
9 |
10 | (
11 | curl -f -s \
12 | -H "Content-Type: application/sparql-query" \
13 | -H "Accept: application/n-triples" \
14 | "${encoded_url}" \
15 | --data-binary @- < { ?s ?p ?o } }
17 | EOF
18 | ) \
19 | | rapper -q --input ntriples --output ntriples /dev/stdin - \
20 | | tr -s '\n' '\t' \
21 | | grep "${BASE_URL}named-subject" \
22 | | grep -v "${BASE_URL}another-named-subject" > /dev/null
--------------------------------------------------------------------------------
/http-tests/sparql-protocol/query/POST-directly-ntriples.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # use conneg to request N-Triples as the preferred format; send query as POST body
4 |
5 | (
6 | curl -f -s \
7 | -H "Content-Type: application/sparql-query" \
8 | -H "Accept: application/n-triples" \
9 | "${BASE_URL}sparql" \
10 | --data-binary @- < ?o } { GRAPH <${BASE_URL}graphs/name/> { <${BASE_URL}named-subject> ?o } }
12 | EOF
13 | ) \
14 | | rapper -q --input ntriples --output ntriples /dev/stdin - \
15 | | tr -s '\n' '\t' \
16 | | grep '"named object"' \
17 | | grep "${BASE_URL}named-object" > /dev/null
--------------------------------------------------------------------------------
/http-tests/sparql-protocol/query/POST-directly-query-invalid-400.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # query string invalid, directly as POST body
4 |
5 | (
6 | curl -w "%{http_code}\n" -f -s \
7 | -H "Content-Type: application/sparql-query" \
8 | -H "Accept: application/n-triples" \
9 | "${BASE_URL}sparql" \
10 | --data-binary @- < { <${BASE_URL}named-subject> ?o } }
12 | EOF
13 | ) \
14 | | tr -s '\n' '\t' \
15 | | grep "named object" \
16 | | grep "${BASE_URL}named-object" > /dev/null
--------------------------------------------------------------------------------
/http-tests/sparql-protocol/query/POST-directly.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # SPARQL query directly as POST body
4 |
5 | (
6 | curl -w "%{http_code}\n" -f -s \
7 | -H "Content-Type: application/sparql-query" \
8 | -H "Accept: application/n-triples" \
9 | "${BASE_URL}sparql" \
10 | --data-binary @- < { <${BASE_URL}named-subject> ?o } }" \
9 | | tr -s '\n' '\t' \
10 | | grep "{ \"type\": \"literal\" , \"value\": \"named object\" }" \
11 | | grep "{ \"type\": \"uri\" , \"value\": \"${BASE_URL}named-object\" }" > /dev/null
--------------------------------------------------------------------------------
/http-tests/sparql-protocol/query/POST-named-graph-uri.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # SPARQL query with specified named graph URI
4 |
5 | curl -f -s \
6 | -H "Accept: application/n-triples" \
7 | "${BASE_URL}sparql" \
8 | --data-urlencode "query=CONSTRUCT { ?s ?p ?o } WHERE { GRAPH <${BASE_URL}graphs/name/> { ?s ?p ?o } }" \
9 | --data-urlencode "named-graph-uri=${BASE_URL}graphs/name/" \
10 | | rapper -q --input ntriples --output ntriples /dev/stdin - \
11 | | tr -s '\n' '\t' \
12 | | grep "${BASE_URL}named-subject" \
13 | | grep -v "${BASE_URL}another-named-subject" > /dev/null
--------------------------------------------------------------------------------
/http-tests/sparql-protocol/query/POST-ntriples.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # use conneg to request N-Triples as the preferred format
4 |
5 | curl -f -s \
6 | -H "Accept: application/n-triples" \
7 | "${BASE_URL}sparql" \
8 | --data-urlencode "query=CONSTRUCT { <${BASE_URL}named-subject> ?o } { GRAPH <${BASE_URL}graphs/name/> { <${BASE_URL}named-subject> ?o } }" \
9 | | rapper -q --input ntriples --output ntriples /dev/stdin - \
10 | | tr -s '\n' '\t' \
11 | | grep '"named object"' \
12 | | grep "${BASE_URL}named-object" > /dev/null
--------------------------------------------------------------------------------
/http-tests/sparql-protocol/query/POST-query-invalid-400.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # query string invalid
4 |
5 | curl -w "%{http_code}\n" -f -s \
6 | -H "Accept: application/n-triples" \
7 | "${BASE_URL}sparql" \
8 | --data-urlencode "query=WHATEVER" \
9 | | grep -q "${STATUS_BAD_REQUEST}"
--------------------------------------------------------------------------------
/http-tests/sparql-protocol/query/POST-query-not-set-400.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # query parameter not set
4 |
5 | curl -X POST -w "%{http_code}\n" -f -s \
6 | -H "Accept: application/n-triples" \
7 | -H "Content-Type: application/x-www-form-urlencoded" \
8 | "${BASE_URL}sparql" \
9 | | grep -q "${STATUS_BAD_REQUEST}"
--------------------------------------------------------------------------------
/http-tests/sparql-protocol/query/POST-xml-results.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # use conneg to request XML results as the preferred format
4 |
5 | curl -f -s \
6 | -H "Accept: application/sparql-results+xml" \
7 | "${BASE_URL}sparql" \
8 | --data-urlencode "query=SELECT * { GRAPH <${BASE_URL}graphs/name/> { <${BASE_URL}named-subject> ?o } }" \
9 | | tr -s '\n' '\t' \
10 | | grep "named object" \
11 | | grep "${BASE_URL}named-object" > /dev/null
--------------------------------------------------------------------------------
/http-tests/sparql-protocol/query/POST.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # SPARQL query
4 |
5 | curl -w "%{http_code}\n" -f -s \
6 | -H "Accept: application/n-triples" \
7 | "${BASE_URL}sparql" \
8 | --data-urlencode "query=DESCRIBE *" \
9 | | grep -q "${STATUS_OK}"
--------------------------------------------------------------------------------
/http-tests/sparql-protocol/update/POST-directly.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # SPARQL update directly as POST body
4 |
5 | (
6 | curl -w "%{http_code}\n" -f -s \
7 | -H "Content-Type: application/sparql-update" \
8 | -H "Accept: application/n-triples" \
9 | "${BASE_URL}sparql" \
10 | --data-binary @- <.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.atomgraph.processor.exception;
18 |
19 | import com.atomgraph.processor.model.Template;
20 | import com.atomgraph.processor.model.Parameter;
21 |
22 | /**
23 | *
24 | * @author Martynas Jusevičius {@literal }
25 | */
26 | public class ParameterException extends RuntimeException
27 | {
28 |
29 | public ParameterException(String paramName, Template template)
30 | {
31 | super("Parameter '" + paramName + "' not supported by Template '" + template.toString() + "'");
32 | }
33 |
34 | public ParameterException(Parameter param, Template template)
35 | {
36 | super("Argument with predicate '" + param.getPredicate() + "' is not optional in Template '" + template.toString() + "' but no value is supplied");
37 | }
38 |
39 | }
40 |
--------------------------------------------------------------------------------
/src/main/java/com/atomgraph/processor/factory/OntologyFactory.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2021 Martynas Jusevičius .
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.atomgraph.processor.factory;
17 |
18 | import java.util.Optional;
19 | import jakarta.ws.rs.ext.Provider;
20 | import org.apache.jena.ontology.Ontology;
21 | import org.glassfish.hk2.api.Factory;
22 |
23 | /**
24 | *
25 | * @author Martynas Jusevičius {@literal }
26 | */
27 | @Provider
28 | public class OntologyFactory implements Factory>
29 | {
30 |
31 | private final Ontology ontology;
32 |
33 | public OntologyFactory(Ontology ontology)
34 | {
35 | this.ontology = ontology;
36 | }
37 |
38 | @Override
39 | public Optional provide()
40 | {
41 | return getOntology();
42 | }
43 |
44 | @Override
45 | public void dispose(Optional t)
46 | {
47 | }
48 |
49 | protected Optional getOntology()
50 | {
51 | return Optional.of(ontology);
52 | }
53 |
54 | }
55 |
--------------------------------------------------------------------------------
/src/main/java/com/atomgraph/processor/factory/TemplateCallFactory.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2016 Martynas Jusevičius .
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.atomgraph.processor.factory;
17 |
18 | import com.atomgraph.processor.model.Template;
19 | import com.atomgraph.processor.model.TemplateCall;
20 | import com.atomgraph.processor.model.impl.TemplateCallImpl;
21 | import com.atomgraph.processor.util.TemplateMatcher;
22 | import java.net.URI;
23 | import java.util.Optional;
24 | import jakarta.inject.Inject;
25 | import jakarta.ws.rs.core.Context;
26 | import jakarta.ws.rs.core.MultivaluedMap;
27 | import jakarta.ws.rs.core.UriInfo;
28 | import jakarta.ws.rs.ext.Provider;
29 | import org.apache.jena.ontology.Ontology;
30 | import org.apache.jena.rdf.model.ModelFactory;
31 | import org.glassfish.hk2.api.Factory;
32 | import org.slf4j.Logger;
33 | import org.slf4j.LoggerFactory;
34 |
35 | /**
36 | * Template call provider.
37 | *
38 | * @see com.atomgraph.processor.model.impl.TemplateCallImpl
39 | * @author Martynas Jusevičius {@literal }
40 | */
41 | @Provider
42 | public class TemplateCallFactory implements Factory>
43 | {
44 |
45 | private static final Logger log = LoggerFactory.getLogger(TemplateCallFactory.class);
46 |
47 | @Context UriInfo uriInfo;
48 |
49 | @Inject Optional ontology;
50 |
51 | @Override
52 | public Optional provide()
53 | {
54 | return getTemplateCall();
55 | }
56 |
57 | @Override
58 | public void dispose(Optional tc)
59 | {
60 | }
61 |
62 | public Optional getTemplateCall()
63 | {
64 | Template template = getTemplate();
65 | if (template != null) return getTemplateCall(template, getUriInfo().getAbsolutePath(), getUriInfo().getQueryParameters());
66 |
67 | return Optional.empty();
68 | }
69 |
70 | public Optional getTemplateCall(Template template, URI absolutePath, MultivaluedMap queryParams)
71 | {
72 | if (template == null) throw new IllegalArgumentException("Template cannot be null");
73 | if (absolutePath == null) throw new IllegalArgumentException("URI cannot be null");
74 | if (queryParams == null) throw new IllegalArgumentException("MultivaluedMap cannot be null");
75 |
76 | //if (log.isDebugEnabled()) log.debug("Building Optional from Template {}", template);
77 | TemplateCall templateCall = new TemplateCallImpl(ModelFactory.createDefaultModel().createResource(absolutePath.toString()), template).
78 | applyArguments(queryParams). // apply URL query parameters
79 | applyDefaults().
80 | validateOptionals(); // validate (non-)optional arguments
81 | templateCall.build(); // build state URI
82 |
83 | return Optional.of(templateCall);
84 | }
85 |
86 | public Template getTemplate()
87 | {
88 | if (getOntology().isPresent()) return getTemplate(getOntology().get(), getUriInfo());
89 |
90 | return null;
91 | }
92 |
93 | public Template getTemplate(Ontology ontology, UriInfo uriInfo)
94 | {
95 | return new TemplateMatcher(ontology).match(uriInfo.getAbsolutePath(), uriInfo.getBaseUri());
96 | }
97 |
98 | public Optional getOntology()
99 | {
100 | return ontology;
101 | }
102 |
103 | public UriInfo getUriInfo()
104 | {
105 | return uriInfo;
106 | }
107 |
108 | }
109 |
--------------------------------------------------------------------------------
/src/main/java/com/atomgraph/processor/model/Application.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2016 Martynas Jusevičius .
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.atomgraph.processor.model;
17 |
18 | import org.apache.jena.rdf.model.Resource;
19 |
20 | /**
21 | *
22 | * @author Martynas Jusevičius {@literal }
23 | */
24 | public interface Application extends com.atomgraph.core.model.Application
25 | {
26 |
27 | Resource getOntology();
28 |
29 | }
30 |
--------------------------------------------------------------------------------
/src/main/java/com/atomgraph/processor/model/Parameter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2016 Martynas Jusevičius .
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.atomgraph.processor.model;
17 |
18 | /**
19 | *
20 | * @author Martynas Jusevičius {@literal }
21 | */
22 | public interface Parameter extends com.atomgraph.spinrdf.model.Argument
23 | {
24 |
25 | }
26 |
--------------------------------------------------------------------------------
/src/main/java/com/atomgraph/processor/model/Template.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2016 Martynas Jusevičius .
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.atomgraph.processor.model;
17 |
18 | import java.util.Comparator;
19 | import java.util.List;
20 | import java.util.Locale;
21 | import java.util.Map;
22 | import jakarta.ws.rs.core.CacheControl;
23 | import org.apache.jena.ontology.OntClass;
24 | import org.apache.jena.rdf.model.Property;
25 | import org.apache.jena.rdf.model.Resource;
26 | import org.glassfish.jersey.uri.UriTemplate;
27 |
28 | /**
29 | *
30 | * @author Martynas Jusevičius {@literal }
31 | */
32 | public interface Template extends OntClass
33 | {
34 |
35 | static public final Comparator COMPARATOR = new Comparator()
36 | {
37 |
38 | @Override
39 | public int compare(Template template1, Template template2)
40 | {
41 | // Template always has default priority
42 | double diff = template2.getPriority() - template1.getPriority();
43 | if (diff > 0) return 1;
44 | if (diff < 0) return -1;
45 |
46 | return UriTemplate.COMPARATOR.compare(template1.getMatch(), template2.getMatch());
47 | }
48 |
49 | };
50 |
51 | UriTemplate getMatch();
52 |
53 | String getFragmentTemplate();
54 |
55 | Resource getQuery();
56 |
57 | Resource getUpdate();
58 |
59 | Double getPriority();
60 |
61 | Map getParameters();
62 |
63 | Map getLocalParameters();
64 |
65 | Map getParameterMap();
66 |
67 | List getLanguages();
68 |
69 | Resource getLoadClass();
70 |
71 | CacheControl getCacheControl();
72 |
73 | List getSuperTemplates();
74 |
75 | }
76 |
--------------------------------------------------------------------------------
/src/main/java/com/atomgraph/processor/model/TemplateCall.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Martynas Jusevičius .
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.atomgraph.processor.model;
17 |
18 | import jakarta.ws.rs.core.MultivaluedMap;
19 | import org.apache.jena.query.QuerySolutionMap;
20 | import org.apache.jena.rdf.model.Property;
21 | import org.apache.jena.rdf.model.RDFNode;
22 | import org.apache.jena.rdf.model.Resource;
23 | import org.apache.jena.rdf.model.Statement;
24 | import org.apache.jena.rdf.model.StmtIterator;
25 |
26 | /**
27 | *
28 | * @author Martynas Jusevičius {@literal }
29 | */
30 | public interface TemplateCall
31 | {
32 |
33 | Template getTemplate();
34 |
35 | TemplateCall applyArguments(MultivaluedMap queryParams);
36 |
37 | TemplateCall applyDefaults();
38 |
39 | StmtIterator listArguments();
40 |
41 | boolean hasArgument(Property predicate);
42 |
43 | Resource getArgument(Property predicate);
44 |
45 | boolean hasArgument(String varName, RDFNode object);
46 |
47 | Resource getArgument(String varName, RDFNode object);
48 |
49 | Statement getArgumentProperty(Property predicate);
50 |
51 | TemplateCall arg(Parameter param, RDFNode value);
52 |
53 | TemplateCall arg(Resource arg);
54 |
55 | TemplateCall validateOptionals();
56 |
57 | QuerySolutionMap getQuerySolutionMap();
58 |
59 | Resource build();
60 |
61 | }
62 |
--------------------------------------------------------------------------------
/src/main/java/com/atomgraph/processor/model/impl/ApplicationImpl.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2016 Martynas Jusevičius .
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.atomgraph.processor.model.impl;
17 |
18 | import com.atomgraph.core.model.Service;
19 | import com.atomgraph.processor.model.Application;
20 | import org.apache.jena.rdf.model.Resource;
21 |
22 | /**
23 | *
24 | * @author Martynas Jusevičius {@literal }
25 | */
26 | public class ApplicationImpl implements Application
27 | {
28 | private final Resource ontology;
29 | private final Service service;
30 |
31 | public ApplicationImpl(Service service, Resource ontology)
32 | {
33 | if (ontology == null) throw new IllegalArgumentException("Resource cannot be null");
34 | if (service == null) throw new IllegalArgumentException("Service cannot be null");
35 | this.ontology = ontology;
36 | this.service = service;
37 | }
38 |
39 | @Override
40 | public Resource getOntology()
41 | {
42 | return ontology;
43 | }
44 |
45 | @Override
46 | public Service getService()
47 | {
48 | return service;
49 | }
50 |
51 | }
52 |
--------------------------------------------------------------------------------
/src/main/java/com/atomgraph/processor/model/impl/ParameterImpl.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2016 Martynas Jusevičius .
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.atomgraph.processor.model.impl;
17 |
18 | import org.apache.jena.enhanced.EnhGraph;
19 | import org.apache.jena.enhanced.EnhNode;
20 | import org.apache.jena.enhanced.Implementation;
21 | import org.apache.jena.graph.Node;
22 | import org.apache.jena.ontology.ConversionException;
23 | import org.apache.jena.vocabulary.RDF;
24 | import com.atomgraph.processor.vocabulary.LDT;
25 | import org.slf4j.Logger;
26 | import org.slf4j.LoggerFactory;
27 | import com.atomgraph.processor.model.Parameter;
28 |
29 | /**
30 | *
31 | * @author Martynas Jusevičius {@literal }
32 | */
33 | public class ParameterImpl extends com.atomgraph.spinrdf.model.impl.ArgumentImpl implements Parameter
34 | {
35 |
36 | private static final Logger log = LoggerFactory.getLogger(ParameterImpl.class);
37 |
38 | public static Implementation factory = new Implementation()
39 | {
40 |
41 | @Override
42 | public EnhNode wrap(Node node, EnhGraph enhGraph)
43 | {
44 | if (canWrap(node, enhGraph))
45 | {
46 | return new ParameterImpl(node, enhGraph);
47 | }
48 | else
49 | {
50 | throw new ConversionException( "Cannot convert node " + node.toString() + " to Parameter: it does not have rdf:type ldt:Parameter or equivalent");
51 | }
52 | }
53 |
54 | @Override
55 | public boolean canWrap(Node node, EnhGraph eg)
56 | {
57 | if (eg == null) throw new IllegalArgumentException("EnhGraph cannot be null");
58 |
59 | return eg.asGraph().contains(node, RDF.type.asNode(), LDT.Parameter.asNode());
60 | }
61 | };
62 |
63 | public ParameterImpl(Node node, EnhGraph enhGraph)
64 | {
65 | super(node, enhGraph);
66 | }
67 |
68 | @Override
69 | public String toString()
70 | {
71 | StringBuilder sb = new StringBuilder();
72 | sb.append("[<").
73 | append(getPredicate().getURI()).
74 | append(">");
75 | if (getDefaultValue() != null)
76 | sb.append(", ").
77 | append(getDefaultValue());
78 | if (getValueType() != null)
79 | sb.append(", ").
80 | append(getValueType());
81 | sb.append("]");
82 | return sb.toString();
83 | }
84 |
85 | }
86 |
--------------------------------------------------------------------------------
/src/main/java/com/atomgraph/processor/server/filter/response/ResponseHeaderFilter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2021 Martynas Jusevičius .
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.atomgraph.processor.server.filter.response;
18 |
19 | import com.atomgraph.core.util.Link;
20 | import com.atomgraph.processor.model.TemplateCall;
21 | import com.atomgraph.processor.vocabulary.LDT;
22 | import java.io.IOException;
23 | import java.net.URI;
24 | import java.util.Optional;
25 | import jakarta.inject.Inject;
26 | import jakarta.ws.rs.container.ContainerRequestContext;
27 | import jakarta.ws.rs.container.ContainerResponseContext;
28 | import jakarta.ws.rs.container.ContainerResponseFilter;
29 | import jakarta.ws.rs.core.Context;
30 | import jakarta.ws.rs.core.HttpHeaders;
31 | import jakarta.ws.rs.core.UriInfo;
32 | import org.apache.jena.ontology.Ontology;
33 |
34 | /**
35 | *
36 | * @author {@literal Martynas Jusevičius }
37 | */
38 | public class ResponseHeaderFilter implements ContainerResponseFilter
39 | {
40 |
41 | @Inject jakarta.inject.Provider> ontology;
42 | @Inject jakarta.inject.Provider> templateCall;
43 |
44 | @Context UriInfo uriInfo;
45 |
46 | @Override
47 | public void filter(ContainerRequestContext request, ContainerResponseContext response) throws IOException
48 | {
49 | response.getHeaders().add(HttpHeaders.LINK, new Link(getUriInfo().getBaseUri(), LDT.base.getURI(), null));
50 |
51 | if (getOntology().isPresent()) // if it's not present, Link headers might be forwarded by ProxyResourceBase
52 | response.getHeaders().add(HttpHeaders.LINK, new Link(URI.create(getOntology().get().getURI()), LDT.ontology.getURI(), null));
53 | if (getTemplateCall().isPresent())
54 | response.getHeaders().add(HttpHeaders.LINK, new Link(URI.create(getTemplateCall().get().getTemplate().getURI()), LDT.template.getURI(), null));
55 | }
56 |
57 | public Optional getOntology()
58 | {
59 | return ontology.get();
60 | }
61 |
62 | public Optional getTemplateCall()
63 | {
64 | return templateCall.get();
65 | }
66 |
67 | public UriInfo getUriInfo()
68 | {
69 | return uriInfo;
70 | }
71 |
72 | }
73 |
--------------------------------------------------------------------------------
/src/main/java/com/atomgraph/processor/server/io/SkolemizingDatasetProvider.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Martynas Jusevičius .
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.atomgraph.processor.server.io;
17 |
18 | import com.atomgraph.processor.util.Skolemizer;
19 | import com.atomgraph.server.exception.SkolemizationException;
20 | import com.atomgraph.server.io.ValidatingDatasetProvider;
21 | import java.util.Iterator;
22 | import jakarta.ws.rs.core.Context;
23 | import jakarta.ws.rs.core.Request;
24 | import jakarta.ws.rs.core.UriBuilder;
25 | import jakarta.ws.rs.core.UriInfo;
26 | import org.apache.jena.ontology.Ontology;
27 | import org.apache.jena.query.Dataset;
28 | import org.apache.jena.rdf.model.Model;
29 | import org.apache.jena.rdf.model.ResIterator;
30 | import org.apache.jena.rdf.model.Resource;
31 | import org.slf4j.Logger;
32 | import org.slf4j.LoggerFactory;
33 |
34 | /**
35 | * Dataset provider that skolemizes read triples in each graph against class URI templates in an ontology.
36 | *
37 | * @author Martynas Jusevičius {@literal }
38 | */
39 | public class SkolemizingDatasetProvider extends ValidatingDatasetProvider
40 | {
41 |
42 | private static final Logger log = LoggerFactory.getLogger(SkolemizingDatasetProvider.class);
43 |
44 | @Context private Request request;
45 | @Context UriInfo uriInfo;
46 |
47 | @Override
48 | public Dataset process(Dataset dataset)
49 | {
50 | dataset = super.process(dataset); // validation
51 |
52 | process(dataset.getDefaultModel());
53 |
54 | Iterator it = dataset.listNames();
55 | while (it.hasNext())
56 | {
57 | String graphURI = it.next();
58 | process(dataset.getNamedModel(graphURI));
59 | }
60 |
61 | return dataset;
62 | }
63 |
64 | public Model process(Model model)
65 | {
66 | ResIterator it = model.listSubjects();
67 | try
68 | {
69 | while (it.hasNext())
70 | {
71 | Resource resource = it.next();
72 | process(resource);
73 | }
74 | }
75 | finally
76 | {
77 | it.close();
78 | }
79 |
80 | if (getOntology().isPresent()) return skolemize(getOntology().get(), getUriInfo().getBaseUriBuilder(), getUriInfo().getAbsolutePathBuilder(), model);
81 | else return model;
82 | }
83 |
84 | public Resource process(Resource resource)
85 | {
86 | return resource;
87 | }
88 |
89 | public Model skolemize(Ontology ontology, UriBuilder baseUriBuilder, UriBuilder absolutePathBuilder, Model model)
90 | {
91 | try
92 | {
93 | return new Skolemizer(ontology, baseUriBuilder, absolutePathBuilder).build(model); // not optimal to create Skolemizer for each Model
94 | }
95 | catch (IllegalArgumentException ex)
96 | {
97 | throw new SkolemizationException(ex, model);
98 | }
99 | }
100 |
101 | public Request getRequest()
102 | {
103 | return request;
104 | }
105 |
106 | public UriInfo getUriInfo()
107 | {
108 | return uriInfo;
109 | }
110 |
111 | }
112 |
--------------------------------------------------------------------------------
/src/main/java/com/atomgraph/processor/server/io/SkolemizingModelProvider.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2015 Martynas Jusevičius .
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.atomgraph.processor.server.io;
18 |
19 | import org.apache.jena.ontology.Ontology;
20 | import org.apache.jena.rdf.model.Model;
21 | import jakarta.ws.rs.core.Context;
22 | import jakarta.ws.rs.core.Request;
23 | import jakarta.ws.rs.core.UriBuilder;
24 | import com.atomgraph.server.exception.SkolemizationException;
25 | import com.atomgraph.processor.util.Skolemizer;
26 | import com.atomgraph.server.io.ValidatingModelProvider;
27 | import jakarta.ws.rs.HttpMethod;
28 | import org.apache.jena.rdf.model.ResIterator;
29 | import org.apache.jena.rdf.model.Resource;
30 | import org.slf4j.Logger;
31 | import org.slf4j.LoggerFactory;
32 |
33 | /**
34 | * Model provider that skolemizes read triples against class URI templates in an ontology.
35 | *
36 | * @author Martynas Jusevičius {@literal }
37 | */
38 | public class SkolemizingModelProvider extends ValidatingModelProvider
39 | {
40 | private static final Logger log = LoggerFactory.getLogger(SkolemizingModelProvider.class);
41 |
42 | @Context private Request request;
43 |
44 | @Override
45 | public Model processRead(Model model)
46 | {
47 | if (getRequest().getMethod().equalsIgnoreCase(HttpMethod.POST) || getRequest().getMethod().equalsIgnoreCase(HttpMethod.PUT))
48 | {
49 | ResIterator it = model.listSubjects();
50 | try
51 | {
52 | while (it.hasNext())
53 | {
54 | Resource resource = it.next();
55 | process(resource);
56 | }
57 | }
58 | finally
59 | {
60 | it.close();
61 | }
62 |
63 | if (getOntology().isPresent()) return skolemize(getOntology().get(), getUriInfo().getBaseUriBuilder(), getUriInfo().getAbsolutePathBuilder(), super.processRead(model));
64 | else return model;
65 | }
66 |
67 | return super.processRead(model);
68 | }
69 |
70 | public Resource process(Resource resource)
71 | {
72 | return resource;
73 | }
74 |
75 | public Model skolemize(Ontology ontology, UriBuilder baseUriBuilder, UriBuilder absolutePathBuilder, Model model)
76 | {
77 | try
78 | {
79 | return new Skolemizer(ontology, baseUriBuilder, absolutePathBuilder).build(model);
80 | }
81 | catch (IllegalArgumentException ex)
82 | {
83 | throw new SkolemizationException(ex, model);
84 | }
85 | }
86 |
87 | public Request getRequest()
88 | {
89 | return request;
90 | }
91 |
92 | }
93 |
--------------------------------------------------------------------------------
/src/main/java/com/atomgraph/processor/server/mapper/ParameterExceptionMapper.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2016 Martynas Jusevičius .
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.atomgraph.processor.server.mapper;
18 |
19 | import com.atomgraph.core.MediaTypes;
20 | import org.apache.jena.rdf.model.ResourceFactory;
21 | import jakarta.ws.rs.core.Response;
22 | import jakarta.ws.rs.ext.ExceptionMapper;
23 | import com.atomgraph.processor.exception.ParameterException;
24 | import com.atomgraph.server.mapper.ExceptionMapperBase;
25 | import jakarta.inject.Inject;
26 |
27 | /**
28 | *
29 | * @author Martynas Jusevičius {@literal }
30 | */
31 | public class ParameterExceptionMapper extends ExceptionMapperBase implements ExceptionMapper
32 | {
33 |
34 | @Inject
35 | public ParameterExceptionMapper(MediaTypes mediaTypes)
36 | {
37 | super(mediaTypes);
38 | }
39 |
40 | @Override
41 | public Response toResponse(ParameterException ex)
42 | {
43 | return getResponseBuilder(toResource(ex, Response.Status.BAD_REQUEST,
44 | ResourceFactory.createResource("http://www.w3.org/2011/http-statusCodes#BadRequest")).
45 | getModel()).
46 | status(Response.Status.BAD_REQUEST).
47 | build();
48 | }
49 |
50 | }
51 |
--------------------------------------------------------------------------------
/src/main/java/com/atomgraph/processor/server/model/QueriedResource.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2014 Martynas Jusevičius .
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.atomgraph.processor.server.model;
18 |
19 | import org.apache.jena.update.UpdateRequest;
20 |
21 | /**
22 | * RDF resource, representation of which was queried from a SPARQL endpoint.
23 | *
24 | * @author Martynas Jusevičius {@literal }
25 | */
26 | public interface QueriedResource extends com.atomgraph.core.model.QueriedResource
27 | {
28 |
29 | /**
30 | * Returns the SPARQL update that is used to update the RDF description of this resource.
31 | *
32 | * @return update request
33 | */
34 | public UpdateRequest getUpdate();
35 |
36 | }
37 |
--------------------------------------------------------------------------------
/src/main/java/com/atomgraph/processor/server/model/Resource.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2014 Martynas Jusevičius .
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.atomgraph.processor.server.model;
18 |
19 | import com.atomgraph.processor.model.Application;
20 | import com.atomgraph.processor.model.TemplateCall;
21 | import java.util.Optional;
22 | import org.apache.jena.ontology.Ontology;
23 |
24 | /**
25 | *
26 | * @author Martynas Jusevičius {@literal }
27 | */
28 | public interface Resource
29 | {
30 |
31 | Application getApplication();
32 |
33 | Ontology getOntology();
34 |
35 | Optional getTemplateCall();
36 |
37 | }
38 |
--------------------------------------------------------------------------------
/src/main/java/com/atomgraph/processor/server/resource/graph/Item.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2014 Martynas Jusevičius .
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.atomgraph.processor.server.resource.graph;
18 |
19 | import com.atomgraph.core.MediaTypes;
20 | import org.apache.jena.ontology.Ontology;
21 | import org.apache.jena.rdf.model.Model;
22 | import jakarta.ws.rs.core.Context;
23 | import jakarta.ws.rs.core.HttpHeaders;
24 | import jakarta.ws.rs.core.Request;
25 | import jakarta.ws.rs.core.Response;
26 | import jakarta.ws.rs.core.UriInfo;
27 | import com.atomgraph.core.model.Service;
28 | import com.atomgraph.processor.server.model.impl.ResourceBase;
29 | import com.atomgraph.processor.model.TemplateCall;
30 | import java.util.Optional;
31 | import jakarta.inject.Inject;
32 | import jakarta.ws.rs.NotFoundException;
33 | import jakarta.ws.rs.container.ResourceContext;
34 | import org.slf4j.Logger;
35 | import org.slf4j.LoggerFactory;
36 |
37 | /**
38 | * Named graph resource.
39 | * Implements direct graph identification of the SPARQL Graph Store Protocol.
40 | *
41 | * @author Martynas Jusevičius {@literal }
42 | * @see com.atomgraph.core.model.GraphStore
43 | * @see 4.1 Direct Graph Identification
44 | */
45 | public class Item extends ResourceBase
46 | {
47 |
48 | private static final Logger log = LoggerFactory.getLogger(Item.class);
49 |
50 | @Inject
51 | public Item(@Context UriInfo uriInfo, @Context Request request, @Context MediaTypes mediaTypes,
52 | Service service, com.atomgraph.processor.model.Application application, Optional ontology, Optional templateCall,
53 | @Context HttpHeaders httpHeaders, @Context ResourceContext resourceContext)
54 | {
55 | super(uriInfo, request, mediaTypes,
56 | service, application, ontology, templateCall,
57 | httpHeaders, resourceContext);
58 | if (log.isDebugEnabled()) log.debug("Constructing {} as direct indication of GRAPH {}", getClass(), uriInfo.getAbsolutePath());
59 | }
60 |
61 | @Override
62 | public Response get()
63 | {
64 | if (!getService().getDatasetAccessor().containsModel(getURI().toString()))
65 | {
66 | if (log.isDebugEnabled()) log.debug("GET Graph Store named graph with URI: {} not found", getURI());
67 | throw new NotFoundException("Named graph not found");
68 | }
69 |
70 | Model model = getService().getDatasetAccessor().getModel(getURI().toString());
71 | if (log.isDebugEnabled()) log.debug("GET Graph Store named graph with URI: {} found, returning Model of size(): {}", getURI(), model.size());
72 | return getResponse(model);
73 | }
74 |
75 | @Override
76 | public Response post(Model model)
77 | {
78 | boolean existingGraph = getService().getDatasetAccessor().containsModel(getURI().toString());
79 |
80 | // is this implemented correctly? The specification is not very clear.
81 | if (log.isDebugEnabled()) log.debug("POST Model to named graph with URI: {} Did it already exist? {}", getURI(), existingGraph);
82 | getService().getDatasetAccessor().add(getURI().toString(), model);
83 |
84 | if (existingGraph) return Response.ok().build();
85 | else return Response.created(getURI()).build();
86 | }
87 |
88 | @Override
89 | public Response put(Model model)
90 | {
91 | boolean existingGraph = getService().getDatasetAccessor().containsModel(getURI().toString());
92 |
93 | if (log.isDebugEnabled()) log.debug("PUT Model to named graph with URI: {} Did it already exist? {}", getURI(), existingGraph);
94 | getService().getDatasetAccessor().putModel(getURI().toString(), model);
95 |
96 | if (existingGraph) return Response.ok().build();
97 | else return Response.created(getURI()).build();
98 | }
99 |
100 | @Override
101 | public Response delete()
102 | {
103 | if (!getService().getDatasetAccessor().containsModel(getURI().toString()))
104 | {
105 | if (log.isDebugEnabled()) log.debug("DELETE named graph with URI {}: not found", getURI());
106 | throw new NotFoundException("Named graph not found");
107 | }
108 | else
109 | {
110 | if (log.isDebugEnabled()) log.debug("DELETE named graph with URI: {}", getURI());
111 | getService().getDatasetAccessor().deleteModel(getURI().toString());
112 | return Response.noContent().build(); // TO-DO: NoContentException?
113 | }
114 | }
115 |
116 | }
--------------------------------------------------------------------------------
/src/main/java/com/atomgraph/processor/util/InsertDataBuilder.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2021 Martynas Jusevičius .
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.atomgraph.processor.util;
18 |
19 | import java.io.ByteArrayOutputStream;
20 | import java.io.IOException;
21 | import java.io.UnsupportedEncodingException;
22 | import java.nio.charset.StandardCharsets;
23 | import org.apache.jena.rdf.model.Model;
24 | import org.apache.jena.riot.Lang;
25 | import org.apache.jena.update.UpdateFactory;
26 | import org.apache.jena.update.UpdateRequest;
27 |
28 | /**
29 | *
30 | * @author {@literal Martynas Jusevičius }
31 | */
32 | public class InsertDataBuilder
33 | {
34 |
35 | private final Model model;
36 | private String baseURI, graphURI;
37 |
38 | private InsertDataBuilder(Model model)
39 | {
40 | this.model = model;
41 | }
42 |
43 | public static InsertDataBuilder fromModel(Model model)
44 | {
45 | return new InsertDataBuilder(model);
46 | }
47 |
48 | public InsertDataBuilder base(String baseURI)
49 | {
50 | this.baseURI = baseURI;
51 | return this;
52 | }
53 |
54 | public InsertDataBuilder graph(String graphURI)
55 | {
56 | this.graphURI = graphURI;
57 | return this;
58 | }
59 |
60 | public UpdateRequest build() throws UnsupportedEncodingException, IOException
61 | {
62 | try (ByteArrayOutputStream baos = new ByteArrayOutputStream())
63 | {
64 | model.write(baos, Lang.NTRIPLES.getName());
65 | String body = "INSERT DATA {\n";
66 |
67 | if (getGraph() != null) body += "GRAPH <" + getGraph() + "> {\n";
68 | body += baos.toString(StandardCharsets.UTF_8.name()) + "\n";
69 | if (getGraph() != null) body += "}\n";
70 |
71 | body += "}";
72 |
73 | return UpdateFactory.create(body, getBase());
74 | }
75 | }
76 |
77 | private String getBase()
78 | {
79 | return baseURI;
80 | }
81 |
82 | private String getGraph()
83 | {
84 | return graphURI;
85 | }
86 |
87 | }
88 |
--------------------------------------------------------------------------------
/src/main/java/com/atomgraph/processor/util/OntologyLoader.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2014 Martynas Jusevičius
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | *
16 | */
17 | package com.atomgraph.processor.util;
18 |
19 | import com.atomgraph.core.util.jena.DataManager;
20 | import org.apache.jena.ontology.OntDocumentManager;
21 | import org.apache.jena.ontology.OntModel;
22 | import org.apache.jena.ontology.OntModelSpec;
23 | import org.apache.jena.ontology.OntResource;
24 | import org.apache.jena.ontology.Ontology;
25 | import org.apache.jena.util.iterator.ExtendedIterator;
26 | import java.util.HashMap;
27 | import java.util.Map;
28 | import org.apache.jena.rdf.model.ModelFactory;
29 | import com.atomgraph.server.exception.OntologyException;
30 | import org.slf4j.Logger;
31 | import org.slf4j.LoggerFactory;
32 |
33 | /**
34 | * Application ontology provider.
35 | *
36 | * @see org.apache.jena.ontology.Ontology
37 | * @author Martynas Jusevičius {@literal }
38 | */
39 | public class OntologyLoader
40 | {
41 | private static final Logger log = LoggerFactory.getLogger(OntologyLoader.class);
42 |
43 | private final OntDocumentManager ontDocumentManager;
44 | private final String ontologyURI;
45 |
46 | public OntologyLoader(final OntDocumentManager ontDocumentManager, final String ontologyURI,
47 | final OntModelSpec materializationSpec, final boolean materialize)
48 | {
49 | if (ontDocumentManager == null) throw new IllegalArgumentException("OntDocumentManager cannot be null");
50 | if (ontologyURI == null) throw new IllegalArgumentException("URI cannot be null");
51 | if (materializationSpec == null) throw new IllegalArgumentException("OntModelSpec cannot be null");
52 |
53 | this.ontDocumentManager = ontDocumentManager;
54 | this.ontologyURI = ontologyURI;
55 |
56 | // materialize OntModel inferences to avoid invoking rules engine on every request
57 | if (!ontDocumentManager.getFileManager().hasCachedModel(ontologyURI))
58 | {
59 | OntModel ontModel = ontDocumentManager.getOntology(ontologyURI, materializationSpec);
60 | Ontology ontology = ontModel.getOntology(ontologyURI);
61 | OntModel materializedModel = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM); // no inference
62 | materializedModel.add(ontModel);
63 | ontDocumentManager.addModel(ontologyURI, new OntModelReadOnly(materializedModel), true); // make immutable
64 |
65 | ImportCycleChecker checker = new ImportCycleChecker();
66 | checker.check(ontology);
67 | if (checker.getCycleOntology() != null)
68 | {
69 | if (log.isErrorEnabled()) log.error("Sitemap contains an ontology which forms an import cycle: {}", checker.getCycleOntology());
70 | throw new OntologyException("Sitemap contains an ontology which forms an import cycle: " + checker.getCycleOntology().getURI());
71 | }
72 | }
73 | }
74 |
75 | public class ImportCycleChecker
76 | {
77 | private final Map marked = new HashMap<>(), onStack = new HashMap<>();
78 | private Ontology cycleOntology = null;
79 |
80 | public void check(Ontology ontology)
81 | {
82 | if (ontology == null) throw new IllegalArgumentException("Ontology cannot be null");
83 |
84 | marked.put(ontology, Boolean.TRUE);
85 | onStack.put(ontology, Boolean.TRUE);
86 |
87 | ExtendedIterator it = ontology.listImports();
88 | try
89 | {
90 | while (it.hasNext())
91 | {
92 | OntResource importRes = it.next();
93 | if (importRes.canAs(Ontology.class))
94 | {
95 | Ontology imported = importRes.asOntology();
96 | if (marked.get(imported) == null)
97 | check(imported);
98 | else if (onStack.get(imported))
99 | {
100 | cycleOntology = imported;
101 | return;
102 | }
103 | }
104 | }
105 |
106 | onStack.put(ontology, Boolean.FALSE);
107 | }
108 | finally
109 | {
110 | it.close();
111 | }
112 | }
113 |
114 | public Ontology getCycleOntology()
115 | {
116 | return cycleOntology;
117 | }
118 |
119 | }
120 |
121 | public Ontology getOntology()
122 | {
123 | OntModelSpec loadSpec = new OntModelSpec(OntModelSpec.OWL_MEM);
124 |
125 | // attempt to use DataManager to retrieve owl:import Models
126 | if (getOntDocumentManager().getFileManager() instanceof DataManager dataManager)
127 | loadSpec.setImportModelGetter(dataManager);
128 |
129 | return getOntDocumentManager().getOntology(getOntologyURI(), loadSpec).getOntology(getOntologyURI());
130 | }
131 |
132 | public OntDocumentManager getOntDocumentManager()
133 | {
134 | return ontDocumentManager;
135 | }
136 |
137 | public String getOntologyURI()
138 | {
139 | return ontologyURI;
140 | }
141 |
142 | }
--------------------------------------------------------------------------------
/src/main/java/com/atomgraph/processor/util/RDFNodeFactory.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2016 Martynas Jusevičius .
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.atomgraph.processor.util;
17 |
18 | import org.apache.jena.datatypes.RDFDatatype;
19 | import org.apache.jena.datatypes.xsd.XSDDatatype;
20 | import org.apache.jena.graph.NodeFactory;
21 | import org.apache.jena.rdf.model.RDFNode;
22 | import org.apache.jena.rdf.model.Resource;
23 | import org.apache.jena.rdf.model.ResourceFactory;
24 | import org.apache.jena.vocabulary.XSD;
25 |
26 | /**
27 | *
28 | * @author Martynas Jusevičius {@literal }
29 | */
30 | public class RDFNodeFactory
31 | {
32 |
33 | public static final RDFNode createTyped(String value, Resource valueType)
34 | {
35 | if (value == null) throw new IllegalArgumentException("Param value cannot be null");
36 |
37 | // without value type, return default xsd:string value
38 | if (valueType == null) return ResourceFactory.createTypedLiteral(value, XSDDatatype.XSDstring);
39 |
40 | // if value type is from XSD namespace, value is treated as typed literal with XSD datatype
41 | if (valueType.getNameSpace().equals(XSD.getURI()))
42 | {
43 | RDFDatatype dataType = NodeFactory.getType(valueType.getURI());
44 | return ResourceFactory.createTypedLiteral(value, dataType);
45 | }
46 | // otherwise, value is treated as URI resource
47 | else
48 | return ResourceFactory.createResource(value);
49 | }
50 |
51 | }
52 |
--------------------------------------------------------------------------------
/src/main/java/com/atomgraph/processor/util/RulePrinter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2015 Martynas Jusevičius