diff --git a/app.py b/app.py
index f2e898a..7327d92 100644
--- a/app.py
+++ b/app.py
@@ -10,6 +10,7 @@
from fastapi.responses import JSONResponse
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel, ConfigDict, Field
+from typing import Union
import urllib
# import other py's from this repository
@@ -215,15 +216,19 @@ class QuerySolution(BaseModel):
class Bindings(BaseModel):
bindings: list[QuerySolution]
-class SPARQLResultResponse(BaseModel):
+class SPARQLSelectResponse(BaseModel):
head: Vars
results: Bindings
+class SPARQLAskResponse(BaseModel):
+ head: dict
+ boolean: bool
+
class Gaps(BaseModel):
pattern: str
gaps: list[list[str]]
-class SPARQLResultWithGapsResponse(BaseModel):
+class SPARQLSelectWithGapsResponse(BaseModel):
head: Vars
results: Bindings
knowledge_gaps: list[Gaps]
@@ -289,6 +294,7 @@ async def root():
# see the docs for examples how to use this route
@app.get('/query/',
tags=["SPARQL query execution"],
+ response_model=Union[SPARQLSelectResponse,SPARQLAskResponse],
description="""
This GET operation implements the GET query operation defined by the [SPARQL 1.1 Protocol](https://www.w3.org/TR/sparql11-protocol/#query-operation):
@@ -302,7 +308,7 @@ async def root():
""",
openapi_extra = OPENAPI_EXTRA_GET_REQUEST
)
-async def get(request: Request) -> SPARQLResultResponse:
+async def get(request: Request):
# get the body from the request, which should be empty
body = await request.body()
body = body.decode()
@@ -342,7 +348,7 @@ async def get(request: Request) -> SPARQLResultResponse:
""",
openapi_extra = OPENAPI_EXTRA_POST_REQUEST
)
-async def post(request: Request) -> SPARQLResultResponse:
+async def post(request: Request) -> SPARQLSelectResponse:
# get byte query out of request with await
query = await request.body()
@@ -375,7 +381,7 @@ async def post(request: Request) -> SPARQLResultResponse:
""",
openapi_extra = OPENAPI_EXTRA_POST_REQUEST_FOR_GAPS
)
-async def post(request: Request) -> SPARQLResultWithGapsResponse:
+async def post(request: Request) -> SPARQLSelectWithGapsResponse:
# get byte query out of request with await
query = await request.body()
# then get the requester_id and query string
@@ -411,8 +417,6 @@ async def post(request: Request):
return handle_update(requester_id, update, False)
-
-
####################
# HELPER FUNCTIONS #
####################
@@ -545,9 +549,9 @@ def handle_query(requester_id: str, query: str, gaps_enabled) -> dict:
try:
result = local_query_executor.executeQuery(graph, query)
if gaps_enabled:
- result['knowledge_gaps'] = knowledge_gaps
- if knowledge_gaps: #bindings should be empty
- result['results']['bindings'] = [{}]
+ result['knowledge_gaps'] = knowledge_gaps
+ if knowledge_gaps: #bindings should be empty
+ result['results']['bindings'] = [{}]
except Exception as e:
logger.debug(f"Query could not be executed on the local graph: {e}")
raise HTTPException(status_code=500,
diff --git a/docker-compose.yml b/docker-compose.yml
index dee66bf..7a883cc 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -9,7 +9,7 @@ services:
- TOKEN_ENABLED=${TOKEN_ENABLED}
- TOKENS_FILE_PATH=${TOKENS_FILE_PATH}
- SPARQL_ENDPOINT_NAME=${SPARQL_ENDPOINT_NAME}
- - LOG_LEVEL=INFO
+ - LOG_LEVEL=DEBUG
ports:
- "${PORT}:8000"
volumes:
diff --git a/local_query_executor.py b/local_query_executor.py
index 68b6db1..f2cc87c 100644
--- a/local_query_executor.py
+++ b/local_query_executor.py
@@ -7,6 +7,7 @@
import rdflib
from rdflib.util import from_n3
from rdflib import RDF, Graph, Namespace, URIRef, Literal
+from rdflib.plugins.sparql.parser import parseQuery
# enable logging
logger = logging.getLogger(__name__)
@@ -19,11 +20,25 @@
def executeQuery(graph: Graph, query: str) -> dict:
# run the original query on the graph to get the results
- result = graph.query(query)
- logger.debug(f'Result of the query when executed on the local graph {result.bindings}')
- logger.debug(f'Variables used in the result of the query {result.vars}')
- # reformat the result into a SPARQL 1.1 JSON result structure
- json_result = reformatResultIntoSPARQLJson(result)
+ logger.info(f"Query to be executed on local graph is: {query}")
+ parsed_query = parseQuery(query)
+
+ if parsed_query[1].name == "SelectQuery":
+ result = graph.query(query)
+ # the result object should contain bindings and vars
+ logger.info(f'Result of the SELECT query when executed on the local graph is: {result.bindings}')
+ # reformat the result into a SPARQL 1.1 JSON result structure
+ json_result = reformatResultIntoSPARQLJson(result)
+
+ if parsed_query[1].name == "AskQuery":
+ result = graph.query(query)
+ # the result object should contain an askAnswer field
+ logger.info(f"Result of the ASK query when executed on the local graph is: {result.askAnswer}")
+ json_result = {
+ "head" : {},
+ "boolean": result.askAnswer
+ }
+
return json_result
def reformatResultIntoSPARQLJson(result:dict) -> dict:
diff --git a/request_processor.py b/request_processor.py
index cd5aa0f..1c45f9e 100644
--- a/request_processor.py
+++ b/request_processor.py
@@ -66,13 +66,13 @@ def constructGraphFromKnowledgeNetwork(query: str, requester_id: str, gaps_enabl
except Exception as e:
# create a message that says that only SELECT queries are expected and raise that exception
replaceable_string = "Expected {SelectQuery | ConstructQuery | DescribeQuery | AskQuery}"
- message = str(e).replace(replaceable_string,"Expected SelectQuery")
+ message = str(e).replace(replaceable_string,"Expected SelectQuery or AskQuery")
raise Exception(message)
logger.info(f"Parsed query is: {parsed_query}")
# then determine whether the query is a SELECT query, because we only accept those!
- if not parsed_query[1].name == "SelectQuery":
- raise Exception(f"Only SELECT queries are supported!")
+ if not parsed_query[1].name == "SelectQuery" and not parsed_query[1].name == "AskQuery":
+ raise Exception(f"Only SELECT or ASK queries are supported!")
# now, create a prologue that contains the namespaces used in the query
prologue_new = PrologueNew()