Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 14 additions & 10 deletions app.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from fastapi.responses import JSONResponse
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel, ConfigDict, Field
from typing import Union
import urllib

# import other py's from this repository
Expand Down Expand Up @@ -215,15 +216,19 @@ class QuerySolution(BaseModel):
class Bindings(BaseModel):
bindings: list[QuerySolution]

class SPARQLResultResponse(BaseModel):
class SPARQLSelectResponse(BaseModel):
head: Vars
results: Bindings

class SPARQLAskResponse(BaseModel):
head: dict
boolean: bool

class Gaps(BaseModel):
pattern: str
gaps: list[list[str]]

class SPARQLResultWithGapsResponse(BaseModel):
class SPARQLSelectWithGapsResponse(BaseModel):
head: Vars
results: Bindings
knowledge_gaps: list[Gaps]
Expand Down Expand Up @@ -289,6 +294,7 @@ async def root():
# see the docs for examples how to use this route
@app.get('/query/',
tags=["SPARQL query execution"],
response_model=Union[SPARQLSelectResponse,SPARQLAskResponse],
description="""
This GET operation implements the GET query operation defined by the [SPARQL 1.1 Protocol](https://www.w3.org/TR/sparql11-protocol/#query-operation):
<br><br>
Expand All @@ -302,7 +308,7 @@ async def root():
""",
openapi_extra = OPENAPI_EXTRA_GET_REQUEST
)
async def get(request: Request) -> SPARQLResultResponse:
async def get(request: Request):
# get the body from the request, which should be empty
body = await request.body()
body = body.decode()
Expand Down Expand Up @@ -342,7 +348,7 @@ async def get(request: Request) -> SPARQLResultResponse:
""",
openapi_extra = OPENAPI_EXTRA_POST_REQUEST
)
async def post(request: Request) -> SPARQLResultResponse:
async def post(request: Request) -> SPARQLSelectResponse:
# get byte query out of request with await
query = await request.body()

Expand Down Expand Up @@ -375,7 +381,7 @@ async def post(request: Request) -> SPARQLResultResponse:
""",
openapi_extra = OPENAPI_EXTRA_POST_REQUEST_FOR_GAPS
)
async def post(request: Request) -> SPARQLResultWithGapsResponse:
async def post(request: Request) -> SPARQLSelectWithGapsResponse:
# get byte query out of request with await
query = await request.body()
# then get the requester_id and query string
Expand Down Expand Up @@ -411,8 +417,6 @@ async def post(request: Request):
return handle_update(requester_id, update, False)




####################
# HELPER FUNCTIONS #
####################
Expand Down Expand Up @@ -545,9 +549,9 @@ def handle_query(requester_id: str, query: str, gaps_enabled) -> dict:
try:
result = local_query_executor.executeQuery(graph, query)
if gaps_enabled:
result['knowledge_gaps'] = knowledge_gaps
if knowledge_gaps: #bindings should be empty
result['results']['bindings'] = [{}]
result['knowledge_gaps'] = knowledge_gaps
if knowledge_gaps: #bindings should be empty
result['results']['bindings'] = [{}]
except Exception as e:
logger.debug(f"Query could not be executed on the local graph: {e}")
raise HTTPException(status_code=500,
Expand Down
2 changes: 1 addition & 1 deletion docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ services:
- TOKEN_ENABLED=${TOKEN_ENABLED}
- TOKENS_FILE_PATH=${TOKENS_FILE_PATH}
- SPARQL_ENDPOINT_NAME=${SPARQL_ENDPOINT_NAME}
- LOG_LEVEL=INFO
- LOG_LEVEL=DEBUG
ports:
- "${PORT}:8000"
volumes:
Expand Down
25 changes: 20 additions & 5 deletions local_query_executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import rdflib
from rdflib.util import from_n3
from rdflib import RDF, Graph, Namespace, URIRef, Literal
from rdflib.plugins.sparql.parser import parseQuery

# enable logging
logger = logging.getLogger(__name__)
Expand All @@ -19,11 +20,25 @@

def executeQuery(graph: Graph, query: str) -> dict:
# run the original query on the graph to get the results
result = graph.query(query)
logger.debug(f'Result of the query when executed on the local graph {result.bindings}')
logger.debug(f'Variables used in the result of the query {result.vars}')
# reformat the result into a SPARQL 1.1 JSON result structure
json_result = reformatResultIntoSPARQLJson(result)
logger.info(f"Query to be executed on local graph is: {query}")
parsed_query = parseQuery(query)

if parsed_query[1].name == "SelectQuery":
result = graph.query(query)
# the result object should contain bindings and vars
logger.info(f'Result of the SELECT query when executed on the local graph is: {result.bindings}')
# reformat the result into a SPARQL 1.1 JSON result structure
json_result = reformatResultIntoSPARQLJson(result)

if parsed_query[1].name == "AskQuery":
result = graph.query(query)
# the result object should contain an askAnswer field
logger.info(f"Result of the ASK query when executed on the local graph is: {result.askAnswer}")
json_result = {
"head" : {},
"boolean": result.askAnswer
}

return json_result

def reformatResultIntoSPARQLJson(result:dict) -> dict:
Expand Down
6 changes: 3 additions & 3 deletions request_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,13 +66,13 @@ def constructGraphFromKnowledgeNetwork(query: str, requester_id: str, gaps_enabl
except Exception as e:
# create a message that says that only SELECT queries are expected and raise that exception
replaceable_string = "Expected {SelectQuery | ConstructQuery | DescribeQuery | AskQuery}"
message = str(e).replace(replaceable_string,"Expected SelectQuery")
message = str(e).replace(replaceable_string,"Expected SelectQuery or AskQuery")
raise Exception(message)

logger.info(f"Parsed query is: {parsed_query}")
# then determine whether the query is a SELECT query, because we only accept those!
if not parsed_query[1].name == "SelectQuery":
raise Exception(f"Only SELECT queries are supported!")
if not parsed_query[1].name == "SelectQuery" and not parsed_query[1].name == "AskQuery":
raise Exception(f"Only SELECT or ASK queries are supported!")

# now, create a prologue that contains the namespaces used in the query
prologue_new = PrologueNew()
Expand Down