/
HttpTriplestoreConnector.scala
1097 lines (947 loc) · 41.9 KB
/
HttpTriplestoreConnector.scala
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
/*
* Copyright © 2021 - 2022 Swiss National Data and Service Center for the Humanities and/or DaSCH Service Platform contributors.
* SPDX-License-Identifier: Apache-2.0
*/
package org.knora.webapi.store.triplestore.http
import akka.actor.Actor
import akka.actor.ActorLogging
import akka.actor.ActorSystem
import akka.actor.Status
import akka.event.LoggingAdapter
import org.apache.commons.lang3.StringUtils
import org.apache.http.Consts
import org.apache.http.HttpEntity
import org.apache.http.HttpHost
import org.apache.http.HttpRequest
import org.apache.http.NameValuePair
import org.apache.http.auth.AuthScope
import org.apache.http.auth.UsernamePasswordCredentials
import org.apache.http.client.AuthCache
import org.apache.http.client.config.RequestConfig
import org.apache.http.client.entity.UrlEncodedFormEntity
import org.apache.http.client.methods.CloseableHttpResponse
import org.apache.http.client.methods.HttpGet
import org.apache.http.client.methods.HttpPost
import org.apache.http.client.methods.HttpPut
import org.apache.http.client.protocol.HttpClientContext
import org.apache.http.client.utils.URIBuilder
import org.apache.http.entity.ContentType
import org.apache.http.entity.FileEntity
import org.apache.http.entity.StringEntity
import org.apache.http.impl.auth.BasicScheme
import org.apache.http.impl.client.BasicAuthCache
import org.apache.http.impl.client.BasicCredentialsProvider
import org.apache.http.impl.client.CloseableHttpClient
import org.apache.http.impl.client.HttpClients
import org.apache.http.message.BasicNameValuePair
import org.apache.http.util.EntityUtils
import org.knora.webapi._
import org.knora.webapi.exceptions._
import org.knora.webapi.feature.FeatureFactoryConfig
import org.knora.webapi.instrumentation.InstrumentationSupport
import org.knora.webapi.messages.store.triplestoremessages.SparqlResultProtocol._
import org.knora.webapi.messages.store.triplestoremessages._
import org.knora.webapi.messages.util.FakeTriplestore
import org.knora.webapi.messages.util.rdf._
import org.knora.webapi.settings.KnoraDispatchers
import org.knora.webapi.settings.KnoraSettings
import org.knora.webapi.store.triplestore.RdfDataObjectFactory
import org.knora.webapi.util.ActorUtil._
import org.knora.webapi.util.FileUtil
import spray.json._
import java.io.BufferedInputStream
import java.net.URI
import java.nio.charset.StandardCharsets
import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.Paths
import java.nio.file.StandardCopyOption
import java.util
import scala.collection.mutable
import scala.concurrent.ExecutionContext
import scala.jdk.CollectionConverters._
import scala.util.Failure
import scala.util.Success
import scala.util.Try
/**
* Submits SPARQL queries and updates to a triplestore over HTTP. Supports different triplestores, which can be configured in
* `application.conf`.
*/
class HttpTriplestoreConnector extends Actor with ActorLogging with InstrumentationSupport {
// MIME type constants.
private val mimeTypeApplicationJson = "application/json"
private val mimeTypeApplicationSparqlResultsJson = "application/sparql-results+json"
private val mimeTypeTextTurtle = "text/turtle"
private val mimeTypeApplicationSparqlUpdate = "application/sparql-update"
private val mimeTypeApplicationNQuads = "application/n-quads"
private implicit val system: ActorSystem = context.system
private val settings = KnoraSettings(system)
implicit val executionContext: ExecutionContext = system.dispatchers.lookup(KnoraDispatchers.KnoraBlockingDispatcher)
override val log: LoggingAdapter = akka.event.Logging(system, this.getClass.getName)
private val targetHost: HttpHost = new HttpHost(settings.triplestoreHost, settings.triplestorePort, "http")
private val credsProvider: BasicCredentialsProvider = new BasicCredentialsProvider
credsProvider.setCredentials(
new AuthScope(targetHost.getHostName, targetHost.getPort),
new UsernamePasswordCredentials(settings.triplestoreUsername, settings.triplestorePassword)
)
// Reading data should be quick, except when it is not ;-)
private val queryTimeoutMillis = settings.triplestoreQueryTimeout.toMillis.toInt
private val queryRequestConfig = RequestConfig
.custom()
.setConnectTimeout(queryTimeoutMillis)
.setConnectionRequestTimeout(queryTimeoutMillis)
.setSocketTimeout(queryTimeoutMillis)
.build
private val queryHttpClient: CloseableHttpClient = HttpClients.custom
.setDefaultCredentialsProvider(credsProvider)
.setDefaultRequestConfig(queryRequestConfig)
.build
// Some updates could take a while.
private val updateTimeoutMillis = settings.triplestoreUpdateTimeout.toMillis.toInt
private val updateTimeoutConfig = RequestConfig
.custom()
.setConnectTimeout(updateTimeoutMillis)
.setConnectionRequestTimeout(updateTimeoutMillis)
.setSocketTimeout(updateTimeoutMillis)
.build
private val updateHttpClient: CloseableHttpClient = HttpClients.custom
.setDefaultCredentialsProvider(credsProvider)
.setDefaultRequestConfig(updateTimeoutConfig)
.build
// For updates that could take a very long time.
private val longTimeoutMillis = settings.triplestoreUpdateTimeout.toMillis.toInt * 10
private val longRequestConfig = RequestConfig
.custom()
.setConnectTimeout(longTimeoutMillis)
.setConnectionRequestTimeout(longTimeoutMillis)
.setSocketTimeout(longTimeoutMillis)
.build
private val longRequestClient: CloseableHttpClient = HttpClients.custom
.setDefaultCredentialsProvider(credsProvider)
.setDefaultRequestConfig(longRequestConfig)
.build
private val queryPath: String = s"/${settings.triplestoreDatabaseName}/query"
private val sparqlUpdatePath: String = s"/${settings.triplestoreDatabaseName}/update"
private val checkRepositoryPath: String = "/$/server"
private val graphPath: String = s"/${settings.triplestoreDatabaseName}/get"
private val repositoryDownloadPath = s"/${settings.triplestoreDatabaseName}"
private val repositoryUploadPath = repositoryDownloadPath
private val logDelimiter = "\n" + StringUtils.repeat('=', 80) + "\n"
private val dataInsertPath = s"/${settings.triplestoreDatabaseName}/data"
/**
* Receives a message requesting a SPARQL select or update, and returns an appropriate response message or
* [[Status.Failure]]. If a serious error occurs (i.e. an error that isn't the client's fault), this
* method first returns `Failure` to the sender, then throws an exception.
*/
def receive: PartialFunction[Any, Unit] = {
case SparqlSelectRequest(sparql: String) => try2Message(sender(), sparqlHttpSelect(sparql), log)
case sparqlConstructRequest: SparqlConstructRequest =>
try2Message(sender(), sparqlHttpConstruct(sparqlConstructRequest), log)
case sparqlExtendedConstructRequest: SparqlExtendedConstructRequest =>
try2Message(sender(), sparqlHttpExtendedConstruct(sparqlExtendedConstructRequest), log)
case SparqlConstructFileRequest(
sparql: String,
graphIri: IRI,
outputFile: Path,
outputFormat: QuadFormat,
featureFactoryConfig: FeatureFactoryConfig
) =>
try2Message(
sender(),
sparqlHttpConstructFile(sparql, graphIri, outputFile, outputFormat, featureFactoryConfig),
log
)
case NamedGraphFileRequest(
graphIri: IRI,
outputFile: Path,
outputFormat: QuadFormat,
featureFactoryConfig: FeatureFactoryConfig
) =>
try2Message(sender(), sparqlHttpGraphFile(graphIri, outputFile, outputFormat, featureFactoryConfig), log)
case NamedGraphDataRequest(graphIri: IRI) => try2Message(sender(), sparqlHttpGraphData(graphIri), log)
case SparqlUpdateRequest(sparql: String) => try2Message(sender(), sparqlHttpUpdate(sparql), log)
case SparqlAskRequest(sparql: String) => try2Message(sender(), sparqlHttpAsk(sparql), log)
case ResetRepositoryContent(rdfDataObjects: Seq[RdfDataObject], prependDefaults: Boolean) =>
try2Message(sender(), resetTripleStoreContent(rdfDataObjects, prependDefaults), log)
case DropAllTRepositoryContent() => try2Message(sender(), dropAllTriplestoreContent(), log)
case InsertRepositoryContent(rdfDataObjects: Seq[RdfDataObject]) =>
try2Message(sender(), insertDataIntoTriplestore(rdfDataObjects), log)
case HelloTriplestore(msg: String) if msg == settings.triplestoreType => sender() ! HelloTriplestore(settings.triplestoreType)
case CheckTriplestoreRequest() => try2Message(sender(), checkTriplestore(), log)
case SearchIndexUpdateRequest(subjectIri: Option[String]) =>
try2Message(sender(), Success(SparqlUpdateResponse()), log)
case DownloadRepositoryRequest(outputFile: Path, featureFactoryConfig: FeatureFactoryConfig) =>
try2Message(sender(), downloadRepository(outputFile, featureFactoryConfig), log)
case UploadRepositoryRequest(inputFile: Path) => try2Message(sender(), uploadRepository(inputFile), log)
case InsertGraphDataContentRequest(graphContent: String, graphName: String) =>
try2Message(sender(), insertDataGraphRequest(graphContent, graphName), log)
case SimulateTimeoutRequest() => try2Message(sender(), doSimulateTimeout(), log)
case other =>
sender() ! Status.Failure(
UnexpectedMessageException(s"Unexpected message $other of type ${other.getClass.getCanonicalName}")
)
}
/**
* Simulates a read timeout.
*/
private def doSimulateTimeout(): Try[SparqlSelectResult] = {
val sparql = """SELECT ?foo WHERE {
| BIND("foo" AS ?foo)
|}""".stripMargin
sparqlHttpSelect(sparql = sparql, simulateTimeout = true)
}
/**
* Given a SPARQL SELECT query string, runs the query, returning the result as a [[SparqlSelectResult]].
*
* @param sparql the SPARQL SELECT query string.
* @param simulateTimeout if `true`, simulate a read timeout.
* @return a [[SparqlSelectResult]].
*/
private def sparqlHttpSelect(sparql: String, simulateTimeout: Boolean = false): Try[SparqlSelectResult] = {
def parseJsonResponse(sparql: String, resultStr: String): Try[SparqlSelectResult] = {
val parseTry = Try {
resultStr.parseJson.convertTo[SparqlSelectResult]
}
parseTry match {
case Success(parsed) => Success(parsed)
case Failure(e) =>
if (resultStr.contains("## Query cancelled due to timeout during execution")) {
log.error(e, "Triplestore timed out while sending a response, after sending statuscode 200.")
Failure(
TriplestoreTimeoutException(
"Triplestore timed out while sending a response, after sending statuscode 200.",
e,
log
)
)
} else {
log.error(
e,
s"Couldn't parse response from triplestore:$logDelimiter$resultStr${logDelimiter}in response to SPARQL query:$logDelimiter$sparql"
)
Failure(TriplestoreResponseException("Couldn't parse Turtle from triplestore", e, log))
}
}
}
for {
// Are we using the fake triplestore?
resultStr <-
if (settings.useFakeTriplestore) {
// Yes: get the response from it.
Try(FakeTriplestore.data(sparql))
} else {
// No: get the response from the real triplestore over HTTP.
getSparqlHttpResponse(sparql, isUpdate = false, simulateTimeout = simulateTimeout)
}
// Are we preparing a fake triplestore?
_ = if (settings.prepareFakeTriplestore) {
// Yes: add the query and the response to it.
FakeTriplestore.add(sparql, resultStr, log)
}
// _ = println(s"SPARQL: $logDelimiter$sparql")
// _ = println(s"Result: $logDelimiter$resultStr")
// Parse the response as a JSON object and generate a response message.
responseMessage <- parseJsonResponse(sparql, resultStr)
} yield responseMessage
}
/**
* Given a SPARQL CONSTRUCT query string, runs the query, returning the result as a [[SparqlConstructResponse]].
*
* @param sparqlConstructRequest the request message.
* @return a [[SparqlConstructResponse]]
*/
private def sparqlHttpConstruct(sparqlConstructRequest: SparqlConstructRequest): Try[SparqlConstructResponse] = {
// println(logDelimiter + sparql)
val rdfFormatUtil: RdfFormatUtil = RdfFeatureFactory.getRdfFormatUtil(sparqlConstructRequest.featureFactoryConfig)
def parseTurtleResponse(
sparql: String,
turtleStr: String,
rdfFormatUtil: RdfFormatUtil
): Try[SparqlConstructResponse] = {
val parseTry = Try {
val rdfModel: RdfModel = rdfFormatUtil.parseToRdfModel(rdfStr = turtleStr, rdfFormat = Turtle)
val statementMap: mutable.Map[IRI, Seq[(IRI, String)]] = mutable.Map.empty
for (st: Statement <- rdfModel) {
val subjectIri = st.subj.stringValue
val predicateIri = st.pred.stringValue
val objectIri = st.obj.stringValue
val currentStatementsForSubject: Seq[(IRI, String)] =
statementMap.getOrElse(subjectIri, Vector.empty[(IRI, String)])
statementMap += (subjectIri -> (currentStatementsForSubject :+ (predicateIri, objectIri)))
}
SparqlConstructResponse(statementMap.toMap)
}
parseTry match {
case Success(parsed) => Success(parsed)
case Failure(e) =>
if (turtleStr.contains("## Query cancelled due to timeout during execution")) {
log.error(e, "Triplestore timed out while sending a response, after sending statuscode 200.")
Failure(
TriplestoreTimeoutException(
"Triplestore timed out while sending a response, after sending statuscode 200.",
e,
log
)
)
} else {
log.error(
e,
s"Couldn't parse response from triplestore:$logDelimiter$turtleStr${logDelimiter}in response to SPARQL query:$logDelimiter$sparql"
)
Failure(TriplestoreResponseException("Couldn't parse Turtle from triplestore", e, log))
}
}
}
for {
turtleStr <-
getSparqlHttpResponse(sparqlConstructRequest.sparql, isUpdate = false, acceptMimeType = mimeTypeTextTurtle)
response <- parseTurtleResponse(
sparql = sparqlConstructRequest.sparql,
turtleStr = turtleStr,
rdfFormatUtil = rdfFormatUtil
)
} yield response
}
/**
* Given a SPARQL CONSTRUCT query string, runs the query, saving the result in a file.
*
* @param sparql the SPARQL CONSTRUCT query string.
* @param graphIri the named graph IRI to be used in the output file.
* @param outputFile the output file.
* @param outputFormat the output file format.
* @return a [[FileWrittenResponse]].
*/
private def sparqlHttpConstructFile(
sparql: String,
graphIri: IRI,
outputFile: Path,
outputFormat: QuadFormat,
featureFactoryConfig: FeatureFactoryConfig
): Try[FileWrittenResponse] = {
val rdfFormatUtil: RdfFormatUtil = RdfFeatureFactory.getRdfFormatUtil(featureFactoryConfig)
for {
turtleStr <- getSparqlHttpResponse(sparql, isUpdate = false, acceptMimeType = mimeTypeTextTurtle)
_ = rdfFormatUtil.turtleToQuadsFile(
rdfSource = RdfStringSource(turtleStr),
graphIri = graphIri,
outputFile = outputFile,
outputFormat = outputFormat
)
} yield FileWrittenResponse()
}
/**
* Given a SPARQL CONSTRUCT query string, runs the query, returns the result as a [[SparqlExtendedConstructResponse]].
*
* @param sparqlExtendedConstructRequest the request message.
* @return a [[SparqlExtendedConstructResponse]]
*/
private def sparqlHttpExtendedConstruct(
sparqlExtendedConstructRequest: SparqlExtendedConstructRequest
): Try[SparqlExtendedConstructResponse] = {
// println(sparql)
val rdfFormatUtil: RdfFormatUtil =
RdfFeatureFactory.getRdfFormatUtil(sparqlExtendedConstructRequest.featureFactoryConfig)
val parseTry = for {
turtleStr <- getSparqlHttpResponse(
sparqlExtendedConstructRequest.sparql,
isUpdate = false,
acceptMimeType = mimeTypeTextTurtle
)
response <- SparqlExtendedConstructResponse.parseTurtleResponse(
turtleStr = turtleStr,
rdfFormatUtil = rdfFormatUtil,
log = log
)
} yield response
parseTry match {
case Success(parsed) => Success(parsed)
case Failure(timeout: TriplestoreTimeoutException) => Failure(timeout)
case Failure(e) =>
Failure(
TriplestoreResponseException(
s"Couldn't parse Turtle from triplestore: ${sparqlExtendedConstructRequest}",
e,
log
)
)
}
}
/**
* Performs a SPARQL update operation.
*
* @param sparqlUpdate the SPARQL update.
* @return a [[SparqlUpdateResponse]].
*/
private def sparqlHttpUpdate(sparqlUpdate: String): Try[SparqlUpdateResponse] =
// println(logDelimiter + sparqlUpdate)
for {
// Send the request to the triplestore.
_ <- getSparqlHttpResponse(sparqlUpdate, isUpdate = true)
} yield SparqlUpdateResponse()
/**
* Performs a SPARQL ASK query.
*
* @param sparql the SPARQL ASK query.
* @return a [[SparqlAskResponse]].
*/
def sparqlHttpAsk(sparql: String): Try[SparqlAskResponse] =
for {
resultString <- getSparqlHttpResponse(sparql, isUpdate = false)
_ = log.debug("sparqlHttpAsk - resultString: {}", resultString)
result: Boolean = resultString.parseJson.asJsObject.getFields("boolean").head.convertTo[Boolean]
} yield SparqlAskResponse(result)
private def resetTripleStoreContent(
rdfDataObjects: Seq[RdfDataObject],
prependDefaults: Boolean = true
): Try[ResetRepositoryContentACK] = {
log.debug("resetTripleStoreContent")
val resetTriplestoreResult = for {
// drop old content
_ <- dropAllTriplestoreContent()
// insert new content
_ <- insertDataIntoTriplestore(rdfDataObjects, prependDefaults)
// any errors throwing exceptions until now are already covered so we can ACK the request
result = ResetRepositoryContentACK()
} yield result
resetTriplestoreResult
}
private def dropAllTriplestoreContent(): Try[DropAllRepositoryContentACK] = {
log.debug("==>> Drop All Data Start")
val dropAllSparqlString =
"""
DROP ALL
"""
val response: Try[DropAllRepositoryContentACK] = for {
result: String <- getSparqlHttpResponse(dropAllSparqlString, isUpdate = true)
_ = log.debug(s"==>> Drop All Data End, Result: $result")
} yield DropAllRepositoryContentACK()
response.recover { case t: Exception =>
throw TriplestoreResponseException("Reset: Failed to execute DROP ALL", t, log)
}
}
/**
* Inserts the data referenced inside the `rdfDataObjects` by appending it to a default set of `rdfDataObjects`
* based on the list defined in `application.conf` under the `app.triplestore.default-rdf-data` key.
*
* @param rdfDataObjects a sequence of paths and graph names referencing data that needs to be inserted.
* @param prependDefaults denotes if the rdfDataObjects list should be prepended with a default set. Default is `true`.
* @return [[InsertTriplestoreContentACK]]
*/
private def insertDataIntoTriplestore(
rdfDataObjects: Seq[RdfDataObject],
prependDefaults: Boolean = true
): Try[InsertTriplestoreContentACK] = {
val httpContext: HttpClientContext = makeHttpContext
try {
log.debug("==>> Loading Data Start")
val defaultRdfDataList = settings.tripleStoreConfig.getConfigList("default-rdf-data")
val defaultRdfDataObjectList = defaultRdfDataList.asScala.map { config =>
RdfDataObjectFactory(config)
}
val completeRdfDataObjectList = if (prependDefaults) {
//prepend default data objects like those of knora-base, knora-admin, etc.
defaultRdfDataObjectList ++ rdfDataObjects
} else {
rdfDataObjects
}
log.debug("insertDataIntoTriplestore - completeRdfDataObjectList: {}", completeRdfDataObjectList)
// Iterate over the list of graphs and try inserting each one.
for (elem <- completeRdfDataObjectList) {
val graphName: String = elem.name
if (graphName.toLowerCase == "default") {
throw TriplestoreUnsupportedFeatureException("Requests to the default graph are not supported")
}
val uriBuilder: URIBuilder = new URIBuilder(dataInsertPath)
uriBuilder.addParameter("graph", graphName) //Note: addParameter encodes the graphName URL
val httpPost: HttpPost = new HttpPost(uriBuilder.build())
// Add the input file to the body of the request.
// here we need to tweak the base directory path from "webapi"
// to the parent folder where the files can be found
val inputFile = Paths.get("..", elem.path)
if (!Files.exists(inputFile)) {
throw BadRequestException(s"File ${inputFile} does not exist")
}
val fileEntity = new FileEntity(inputFile.toFile, ContentType.create(mimeTypeTextTurtle, "UTF-8"))
httpPost.setEntity(fileEntity)
val makeResponse: CloseableHttpResponse => InsertGraphDataContentResponse = returnInsertGraphDataResponse(
graphName
)
// Do the post request for the graph.
doHttpRequest(
client = longRequestClient,
request = httpPost,
context = httpContext,
processResponse = makeResponse
)
log.debug(s"added: $graphName")
}
log.debug("==>> Loading Data End")
// Return success if all graphs are inserted successfully.
Success(InsertTriplestoreContentACK())
} catch {
case e: TriplestoreUnsupportedFeatureException => Failure(e)
case e: Exception =>
Failure(TriplestoreResponseException("Reset: Failed to execute insert into triplestore", e, log))
}
}
/**
* Checks connection to the triplestore.
*/
private def checkTriplestore(): Try[CheckTriplestoreResponse] = checkFusekiTriplestore()
/**
* Checks the Fuseki triplestore if it is available and configured correctly. If the it is not
* configured, tries to automatically configure (initialize) the required dataset.
*/
private def checkFusekiTriplestore(afterAutoInit: Boolean = false): Try[CheckTriplestoreResponse] = {
import org.knora.webapi.messages.store.triplestoremessages.FusekiJsonProtocol._
try {
log.debug("checkFusekiRepository entered")
// Call an endpoint that returns all datasets.
val context: HttpClientContext = makeHttpContext
val httpGet = new HttpGet(checkRepositoryPath)
httpGet.addHeader("Accept", mimeTypeApplicationJson)
val responseStr: String = {
var maybeResponse: Option[CloseableHttpResponse] = None
val responseTry: Try[String] = Try {
maybeResponse = Some(queryHttpClient.execute(targetHost, httpGet, context))
EntityUtils.toString(maybeResponse.get.getEntity, StandardCharsets.UTF_8)
}
maybeResponse.foreach(_.close())
responseTry.get
}
val nameShouldBe = settings.triplestoreDatabaseName
val fusekiServer: FusekiServer = JsonParser(responseStr).convertTo[FusekiServer]
val neededDataset: Option[FusekiDataset] =
fusekiServer.datasets.find(dataset => dataset.dsName == s"/$nameShouldBe" && dataset.dsState)
if (neededDataset.nonEmpty) {
// everything looks good
Success(
CheckTriplestoreResponse(
triplestoreStatus = TriplestoreStatus.ServiceAvailable,
msg = "Triplestore is available."
)
)
} else {
// none of the available datasets meet our requirements
log.info(s"None of the active datasets meet our requirement of name: $nameShouldBe")
if (settings.triplestoreAutoInit) {
// try to auto-init if we didn't tried it already
if (afterAutoInit) {
// we already tried to auto-init but it wasn't successful
Success(
CheckTriplestoreResponse(
triplestoreStatus = TriplestoreStatus.NotInitialized,
msg =
s"Sorry, we tried to auto-initialize and still none of the active datasets meet our requirement of name: $nameShouldBe"
)
)
} else {
// try to auto-init
log.info("Triplestore auto-init is set. Trying to auto-initialize.")
initJenaFusekiTriplestore()
}
} else {
Success(
CheckTriplestoreResponse(
triplestoreStatus = TriplestoreStatus.NotInitialized,
msg = s"None of the active datasets meet our requirement of name: $nameShouldBe"
)
)
}
}
} catch {
case e: Exception =>
// println("checkRepository - exception", e)
Success(
CheckTriplestoreResponse(
triplestoreStatus = TriplestoreStatus.ServiceUnavailable,
msg = s"Triplestore not available: ${e.getMessage}"
)
)
}
}
/**
* Initialize the Jena Fuseki triplestore. Currently only works for
* 'knora-test' and 'knora-test-unit' repository names. To be used, the
* API needs to be started with 'KNORA_WEBAPI_TRIPLESTORE_AUTOINIT' set
* to 'true' (settings.triplestoreAutoInit). This is set to `true` for tests
* (`test/resources/test.conf`).Usage is only recommended for automated
* testing and not for production use.
*/
private def initJenaFusekiTriplestore(): Try[CheckTriplestoreResponse] = {
// TODO: Needs https://github.com/scalameta/metals/issues/3623 to be resolved
// val configFileName = s"webapi/scripts/fuseki-repository-config.ttl.template"
val configFileName = s"fuseki-repository-config.ttl.template"
val triplestoreConfig: String =
try {
// take config from the classpath and write to triplestore
FileUtil.readTextResource(configFileName).replace("@REPOSITORY@", settings.triplestoreDatabaseName)
} catch {
case _: NotFoundException =>
log.error(s"Cannot initialize repository. Config $configFileName not found.")
""
}
val httpContext: HttpClientContext = makeHttpContext
val httpPost: HttpPost = new HttpPost("/$/datasets")
val stringEntity = new StringEntity(triplestoreConfig, ContentType.create(mimeTypeTextTurtle))
httpPost.setEntity(stringEntity)
doHttpRequest(
client = updateHttpClient,
request = httpPost,
context = httpContext,
processResponse = returnUploadResponse
)
// do the check again
checkFusekiTriplestore(true)
}
/**
* Makes a triplestore URI for downloading a named graph.
*
* @param graphIri the IRI of the named graph.
* @return a triplestore-specific URI for downloading the named graph.
*/
private def makeNamedGraphDownloadUri(graphIri: IRI): URI = {
val uriBuilder: URIBuilder = new URIBuilder(graphPath)
uriBuilder.setParameter("graph", s"$graphIri")
uriBuilder.build()
}
/**
* Requests the contents of a named graph, saving the response in a file.
*
* @param graphIri the IRI of the named graph.
* @param outputFile the file to be written.
* @param outputFormat the output file format.
* @param featureFactoryConfig the feature factory configuration.
* @return a string containing the contents of the graph in N-Quads format.
*/
private def sparqlHttpGraphFile(
graphIri: IRI,
outputFile: Path,
outputFormat: QuadFormat,
featureFactoryConfig: FeatureFactoryConfig
): Try[FileWrittenResponse] = {
val httpContext: HttpClientContext = makeHttpContext
val httpGet = new HttpGet(makeNamedGraphDownloadUri(graphIri))
httpGet.addHeader("Accept", mimeTypeTextTurtle)
val makeResponse: CloseableHttpResponse => FileWrittenResponse = writeResponseFile(
outputFile = outputFile,
featureFactoryConfig = featureFactoryConfig,
maybeGraphIriAndFormat = Some(GraphIriAndFormat(graphIri = graphIri, quadFormat = outputFormat))
)
doHttpRequest(
client = queryHttpClient,
request = httpGet,
context = httpContext,
processResponse = makeResponse
)
}
/**
* Requests the contents of a named graph, returning the response as Turtle.
*
* @param graphIri the IRI of the named graph.
* @return a string containing the contents of the graph in Turtle format.
*/
private def sparqlHttpGraphData(graphIri: IRI): Try[NamedGraphDataResponse] = {
val httpContext: HttpClientContext = makeHttpContext
val httpGet = new HttpGet(makeNamedGraphDownloadUri(graphIri))
httpGet.addHeader("Accept", mimeTypeTextTurtle)
val makeResponse: CloseableHttpResponse => NamedGraphDataResponse = returnGraphDataAsTurtle(graphIri)
doHttpRequest(
client = queryHttpClient,
request = httpGet,
context = httpContext,
processResponse = makeResponse
)
}
/**
* Submits a SPARQL request to the triplestore and returns the response as a string.
*
* @param sparql the SPARQL request to be submitted.
* @param isUpdate `true` if this is an update request.
* @param acceptMimeType the MIME type to be provided in the HTTP Accept header.
* @param simulateTimeout if `true`, simulate a read timeout.
* @return the triplestore's response.
*/
private def getSparqlHttpResponse(
sparql: String,
isUpdate: Boolean,
acceptMimeType: String = mimeTypeApplicationSparqlResultsJson,
simulateTimeout: Boolean = false
): Try[String] = {
val httpContext: HttpClientContext = makeHttpContext
val (httpClient: CloseableHttpClient, httpPost: HttpPost) = if (isUpdate) {
// Send updates as application/sparql-update (as per SPARQL 1.1 Protocol §3.2.2, "UPDATE using POST directly").
val requestEntity = new StringEntity(sparql, ContentType.create(mimeTypeApplicationSparqlUpdate, "UTF-8"))
val updateHttpPost = new HttpPost(sparqlUpdatePath)
updateHttpPost.setEntity(requestEntity)
(updateHttpClient, updateHttpPost)
} else {
// Send queries as application/x-www-form-urlencoded (as per SPARQL 1.1 Protocol §2.1.2,
// "query via POST with URL-encoded parameters"), so we can include the "infer" parameter when using GraphDB.
val formParams = new util.ArrayList[NameValuePair]()
formParams.add(new BasicNameValuePair("query", sparql))
val requestEntity: UrlEncodedFormEntity = new UrlEncodedFormEntity(formParams, Consts.UTF_8)
val queryHttpPost: HttpPost = new HttpPost(queryPath)
queryHttpPost.setEntity(requestEntity)
queryHttpPost.addHeader("Accept", acceptMimeType)
(queryHttpClient, queryHttpPost)
}
doHttpRequest(
client = httpClient,
request = httpPost,
context = httpContext,
processResponse = returnResponseAsString,
simulateTimeout = simulateTimeout
)
}
/**
* Dumps the whole repository in N-Quads format, saving the response in a file.
*
* @param outputFile the output file.
* @param featureFactoryConfig the feature factory configuration.
* @return a string containing the contents of the graph in N-Quads format.
*/
private def downloadRepository(
outputFile: Path,
featureFactoryConfig: FeatureFactoryConfig
): Try[FileWrittenResponse] = {
val httpContext: HttpClientContext = makeHttpContext
val uriBuilder: URIBuilder = new URIBuilder(repositoryDownloadPath)
val httpGet = new HttpGet(uriBuilder.build())
httpGet.addHeader("Accept", mimeTypeApplicationNQuads)
val queryTimeoutMillis = settings.triplestoreQueryTimeout.toMillis.toInt * 10
val queryRequestConfig = RequestConfig
.custom()
.setConnectTimeout(queryTimeoutMillis)
.setConnectionRequestTimeout(queryTimeoutMillis)
.setSocketTimeout(queryTimeoutMillis)
.build
val queryHttpClient: CloseableHttpClient = HttpClients.custom
.setDefaultCredentialsProvider(credsProvider)
.setDefaultRequestConfig(queryRequestConfig)
.build
val makeResponse: CloseableHttpResponse => FileWrittenResponse = writeResponseFile(
outputFile = outputFile,
featureFactoryConfig = featureFactoryConfig
)
doHttpRequest(
client = queryHttpClient,
request = httpGet,
context = httpContext,
processResponse = makeResponse
)
}
/**
* Uploads repository content from an N-Quads file.
*
* @param inputFile an N-Quads file containing the content to be uploaded to the repository.
*/
private def uploadRepository(inputFile: Path): Try[RepositoryUploadedResponse] = {
val httpContext: HttpClientContext = makeHttpContext
val httpPost: HttpPost = new HttpPost(repositoryUploadPath)
val fileEntity = new FileEntity(inputFile.toFile, ContentType.create(mimeTypeApplicationNQuads, "UTF-8"))
httpPost.setEntity(fileEntity)
doHttpRequest(
client = longRequestClient,
request = httpPost,
context = httpContext,
processResponse = returnUploadResponse
)
}
/**
* Puts a data graph into the repository.
*
* @param graphContent a data graph in Turtle format to be inserted into the repository.
* @param graphName the name of the graph.
*/
private def insertDataGraphRequest(graphContent: String, graphName: String): Try[InsertGraphDataContentResponse] = {
val httpContext: HttpClientContext = makeHttpContext
val uriBuilder: URIBuilder = new URIBuilder(dataInsertPath)
uriBuilder.addParameter("graph", graphName)
val httpPut: HttpPut = new HttpPut(uriBuilder.build())
val requestEntity = new StringEntity(graphContent, ContentType.create(mimeTypeTextTurtle, "UTF-8"))
httpPut.setEntity(requestEntity)
val makeResponse: CloseableHttpResponse => InsertGraphDataContentResponse = returnInsertGraphDataResponse(graphName)
doHttpRequest(
client = updateHttpClient,
request = httpPut,
context = httpContext,
processResponse = makeResponse
)
}
/**
* Formulate HTTP context.
*
* @return httpContext with credentials and authorization
*/
private def makeHttpContext: HttpClientContext = {
val authCache: AuthCache = new BasicAuthCache
val basicAuth: BasicScheme = new BasicScheme
authCache.put(targetHost, basicAuth)
val httpContext: HttpClientContext = HttpClientContext.create
httpContext.setCredentialsProvider(credsProvider)
httpContext.setAuthCache(authCache)
httpContext
}
/**
* Makes an HTTP connection to the triplestore, and delegates processing of the response
* to a function.
*
* @param client the HTTP client to be used for the request.
* @param request the request to be sent.
* @param context the request context to be used.
* @param processResponse a function that processes the HTTP response.
* @param simulateTimeout if `true`, simulate a read timeout.
* @tparam T the return type of `processResponse`.
* @return the return value of `processResponse`.
*/
private def doHttpRequest[T](
client: CloseableHttpClient,
request: HttpRequest,
context: HttpClientContext,
processResponse: CloseableHttpResponse => T,
simulateTimeout: Boolean = false
): Try[T] = {
// Make an Option wrapper for the response, so we can close it if we get one,
// even if an error occurs.
var maybeResponse: Option[CloseableHttpResponse] = None
val triplestoreResponseTry = Try {
if (simulateTimeout) {
throw new java.net.SocketTimeoutException("Simulated read timeout")
}
val start = System.currentTimeMillis()
val response = client.execute(targetHost, request, context)
maybeResponse = Some(response)
val statusCode: Int = response.getStatusLine.getStatusCode
if (statusCode == 404) {
throw NotFoundException("The requested data was not found")
} else {
val statusCategory: Int = statusCode / 100
if (statusCategory != 2) {
Option(response.getEntity)
.map(responseEntity => EntityUtils.toString(responseEntity, StandardCharsets.UTF_8)) match {
case Some(responseEntityStr) =>
val msg = s"Triplestore responded with HTTP code $statusCode: $responseEntityStr"
log.error(msg)
if (statusCode == 503 && responseEntityStr.contains("Query timed out"))
throw TriplestoreTimeoutException(msg)
else throw TriplestoreResponseException(msg)
case None =>
log.error(s"Triplestore responded with HTTP code $statusCode")
throw TriplestoreResponseException(s"Triplestore responded with HTTP code $statusCode")
}
}
}
val took = System.currentTimeMillis() - start
metricsLogger.info(s"[$statusCode] Triplestore query took: ${took}ms")
processResponse(response)
}
maybeResponse.foreach(_.close)
// TODO: Can we make Fuseki abandon the query if it takes too long?
triplestoreResponseTry.recover {
case tre: TriplestoreResponseException => throw tre
case socketTimeoutException: java.net.SocketTimeoutException =>
val message =
"The triplestore took too long to process a request. This can happen because the triplestore needed too much time to search through the data that is currently in the triplestore. Query optimisation may help."
log.error(socketTimeoutException, message)
throw TriplestoreTimeoutException(message = message, e = socketTimeoutException, log = log)
case timeout: TriplestoreTimeoutException => throw timeout
case notFound: NotFoundException => throw notFound
case e: Exception =>
val message = "Failed to connect to triplestore"
log.error(e, message)
throw TriplestoreConnectionException(message = message, e = e, log = log)
}
}
def returnResponseAsString(response: CloseableHttpResponse): String =
Option(response.getEntity) match {
case None => ""
case Some(responseEntity) =>
EntityUtils.toString(responseEntity, StandardCharsets.UTF_8)
}
def returnGraphDataAsTurtle(graphIri: IRI)(response: CloseableHttpResponse): NamedGraphDataResponse =
Option(response.getEntity) match {
case None =>
log.error(s"Triplestore returned no content for graph $graphIri")