-
Notifications
You must be signed in to change notification settings - Fork 0
/
nlapi.go
1814 lines (1436 loc) · 57.9 KB
/
nlapi.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
// Package nlapi provides primitives to interact with the openapi HTTP API.
//
// Code generated by github.com/deepmap/oapi-codegen version v1.11.0 DO NOT EDIT.
package nlapi
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"io/ioutil"
"net/http"
"net/url"
"strings"
"github.com/deepmap/oapi-codegen/pkg/runtime"
)
const (
BearerAuthScopes = "bearerAuth.Scopes"
)
// Request
type AnalysisRequest struct {
// Document
Document *Document `json:"document,omitempty"`
}
// Analysis data
type AnalyzeDocument struct {
// Analyzed text
Content *string `json:"content,omitempty"`
// Entities
Entities *[]Entity `json:"entities,omitempty"`
// Knowledge Graph syncons data
Knowledge *[]KnowledgeEntry `json:"knowledge,omitempty"`
// Text language
Language *string `json:"language,omitempty"`
// Main lemmas
MainLemmas *[]MainLemma `json:"mainLemmas,omitempty"`
// Main phrases
MainPhrases *[]MainPhrase `json:"mainPhrases,omitempty"`
// Main sentences
MainSentences *[]MainSentence `json:"mainSentences,omitempty"`
// Main syncons
MainSyncons *[]MainSyncon `json:"mainSyncons,omitempty"`
// Paragraphs
Paragraphs *[]Paragraph `json:"paragraphs,omitempty"`
// Phrases
Phrases *[]Phrase `json:"phrases,omitempty"`
// Verb-mediated relations
Relations *[]Relation `json:"relations,omitempty"`
// Sentences
Sentences *[]Sentence `json:"sentences,omitempty"`
// Sentiment
Sentiment *struct {
// Items expressing sentiment
Items *[]SentimentItem `json:"items,omitempty"`
// Text negativity
Negativity *float32 `json:"negativity,omitempty"`
// Overall text sentiment score
Overall *float32 `json:"overall,omitempty"`
// Text positivity
Positivity *float32 `json:"positivity,omitempty"`
} `json:"sentiment,omitempty"`
// Tokens
Tokens *[]Token `json:"tokens,omitempty"`
// Knowledge Graph topics
Topics *[]DocumentTopic `json:"topics,omitempty"`
// Service version
Version *string `json:"version,omitempty"`
}
// Analysis resource response
type AnalyzeResponse struct {
// Analysis data
Data *AnalyzeDocument `json:"data,omitempty"`
// Service errors
Errors *[]ServiceError `json:"errors,omitempty"`
// Operation completed successfully (true/false)
Success *bool `json:"success,omitempty"`
}
// Atom info
type Atom struct {
// Zero-based position of the first character after the atom
End *int64 `json:"end,omitempty"`
// Lemma
Lemma *string `json:"lemma,omitempty"`
// Zero-based position of the first character of the atom
Start *int64 `json:"start,omitempty"`
// Expert.ai type
Type *string `json:"type,omitempty"`
}
// Categorization data
type CategorizeDocument struct {
// Categories
Categories *[]Category `json:"categories,omitempty"`
// Text language
Language *string `json:"language,omitempty"`
// Service version
Version *string `json:"version,omitempty"`
}
// Classification resource response
type CategorizeResponse struct {
// Categorization data
Data *CategorizeDocument `json:"data,omitempty"`
// Service errors
Errors *[]ServiceError `json:"errors,omitempty"`
// Operation completed successfully (true/false)
Success *bool `json:"success,omitempty"`
}
// Category
type Category struct {
// Score expressed as a percentage of the sum of the scores of all the candidate categories, winners and not (see the **score** property)
Frequency *float32 `json:"frequency,omitempty"`
// Hierarchical path
Hierarchy *[]string `json:"hierarchy,omitempty"`
// Category ID
Id *string `json:"id,omitempty"`
// Category label
Label *string `json:"label,omitempty"`
// Name of the software package containing the reference taxonomy
Namespace *string `json:"namespace,omitempty"`
// Positions of the portions of text that contributed to the selection of the category
Positions *[]DocumentPosition `json:"positions,omitempty"`
// Score assigned to the category to represent its relevance
Score *int32 `json:"score,omitempty"`
// True if the category is deemed particularly relevant
Winner *bool `json:"winner,omitempty"`
}
// Information about a context
type ContextInfo struct {
// Context description
Description *string `json:"description,omitempty"`
// Information about supported languages and available analyses
Languages *[]ContextLanguageInfo `json:"languages,omitempty"`
// Context name
Name *string `json:"name,omitempty"`
}
// Information about a supported language and available analyses
type ContextLanguageInfo struct {
// Available analyses
Analyses *[]string `json:"analyses,omitempty"`
// Language code
Code *string `json:"code,omitempty"`
// Language name
Name *string `json:"name,omitempty"`
}
// Available contexts
type ContextsResponse struct {
// List of contexts' information
Contexts *[]ContextInfo `json:"contexts,omitempty"`
}
// Dependency info
type Dependency struct {
// Number of the head token
Head *int64 `json:"head,omitempty"`
// Zero-based cardinal number of the token
Id *int64 `json:"id,omitempty"`
// (<a href='https://universaldependencies.org/u/dep/#universal-dependency-relations' target='_blank'>Dependency relation</a> between the token and the head token
Label *string `json:"label,omitempty"`
}
// Summary information about a detector
type DetectorInfo struct {
// OpenAPI document describing the use of the detector and its output
Contract *string `json:"contract,omitempty"`
// Detector description
Description *string `json:"description,omitempty"`
// Supported languages
Languages *[]LanguageInfo `json:"languages,omitempty"`
// Detector name
Name *string `json:"name,omitempty"`
}
// Available detectors
type DetectorsResponse struct {
// List of detectors' information
Detectors *[]DetectorInfo `json:"detectors,omitempty"`
}
// Document
type Document struct {
// The document's text
Text *string `json:"text,omitempty"`
}
// Position
type DocumentPosition struct {
// Zero-based position of the character after the last
End *int64 `json:"end,omitempty"`
// Zero-based position of the first character
Start *int64 `json:"start,omitempty"`
}
// Knowledge Graph topic info
type DocumentTopic struct {
// Language-independent topic ID
Id *int64 `json:"id,omitempty"`
// Topic name
Label *string `json:"label,omitempty"`
// Score assigned to the topic to represent its relevance
Score *float32 `json:"score,omitempty"`
// True if the topic is deemed particularly relevant
Winner *bool `json:"winner,omitempty"`
}
// Entity info
type Entity struct {
// Entity attributes inferred from the context or from the Knowledge Graph
Attributes *[]InferredAttribute `json:"attributes,omitempty"`
// Base form (lemma) of the entity name
Lemma *string `json:"lemma,omitempty"`
// Positions of the entity's mentions
Positions *[]DocumentPosition `json:"positions,omitempty"`
// Entity relevance
Relevance *int64 `json:"relevance,omitempty"`
// ID used to look up Knowledge Graph data in the `knowledge` array
Syncon *int64 `json:"syncon,omitempty"`
// Entity type
Type *string `json:"type,omitempty"`
}
// Inferred attribute
type InferredAttribute struct {
// Attribute name
Attribute *string `json:"attribute,omitempty"`
// Attribute's attributes
Attributes *[]InferredAttribute `json:"attributes,omitempty"`
// Lemma
Lemma *string `json:"lemma,omitempty"`
// ID used to look up Knowledge Graph data in the `knowledge` array
Syncon *int64 `json:"syncon,omitempty"`
// Attribute type
Type *string `json:"type,omitempty"`
}
// Knowledge Graph data for a syncon
type KnowledgeEntry struct {
// Textual rendering of the general conceptual category for the token in the Knowledge Graph
Label *string `json:"label,omitempty"`
// Syncon extended properties
Properties *[]Property `json:"properties,omitempty"`
// Syncon ID
Syncon *int64 `json:"syncon,omitempty"`
}
// Information about a supported language
type LanguageInfo struct {
// Language code
Code *string `json:"code,omitempty"`
// Language name
Name *string `json:"name,omitempty"`
}
// Main lemma info
type MainLemma struct {
// Positions of lemma occurrences
Positions *[]DocumentPosition `json:"positions,omitempty"`
// Score assigned to the lemma to represent its relevance
Score *float32 `json:"score,omitempty"`
// Lemma
Value *string `json:"value,omitempty"`
}
// Main phrase info
type MainPhrase struct {
// Positions of phrase occurrences
Positions *[]DocumentPosition `json:"positions,omitempty"`
// Score assigned to the phrase to represent its relevance
Score *float32 `json:"score,omitempty"`
// Phrase text
Value *string `json:"value,omitempty"`
}
// Main sentence
type MainSentence struct {
// Zero-based position of the first character after the sentence
End *int64 `json:"end,omitempty"`
// Score assigned to the sentence to represent its relevance
Score *float32 `json:"score,omitempty"`
// Zero-based position of the first character of the sentence
Start *int64 `json:"start,omitempty"`
// Sentence text
Value *string `json:"value,omitempty"`
}
// Main syncon info
type MainSyncon struct {
// Lemma
Lemma *string `json:"lemma,omitempty"`
// Positions of the expressions of the syncon
Positions *[]DocumentPosition `json:"positions,omitempty"`
// Score assigned to the syncon to represent its relevance
Score *float32 `json:"score,omitempty"`
// ID used to look up Knowledge Graph data in the `knowledge` array
Syncon *int64 `json:"syncon,omitempty"`
}
// Paragraph info
type Paragraph struct {
// Zero-based position of the first character after the paragraph
End *int64 `json:"end,omitempty"`
// Indexes (in the `sentences` array) of the sentences that make up the paragraph
Sentences *[]int32 `json:"sentences,omitempty"`
// Zero-based position of the first character of the paragraph
Start *int64 `json:"start,omitempty"`
}
// Phrase info
type Phrase struct {
// Zero-based position of the first character after the phrase
End *int64 `json:"end,omitempty"`
// Zero-based position of the first character of the phrase
Start *int64 `json:"start,omitempty"`
// Indexes (in the `tokens` array) of the tokens that make up the phrase
Tokens *[]int32 `json:"tokens,omitempty"`
// Phrase type
Type *string `json:"type,omitempty"`
}
// Syncon property
type Property struct {
// Property type
Type *string `json:"type,omitempty"`
// Property value
Value *string `json:"value,omitempty"`
}
// In a relation, a term that's directly or indirectly related to the verb
type RelatedItem struct {
// Lemma
Lemma *string `json:"lemma,omitempty"`
// Phrase index in the `phrases` array
Phrase *int64 `json:"phrase,omitempty"`
// Related terms
Related *[]RelatedItem `json:"related,omitempty"`
// Verb-item relation
Relation *string `json:"relation,omitempty"`
// Relevance
Relevance *int64 `json:"relevance,omitempty"`
// ID used to look up Knowledge Graph data in the `knowledge` array
Syncon *int64 `json:"syncon,omitempty"`
// Item text
Text *string `json:"text,omitempty"`
// Type
Type *string `json:"type,omitempty"`
// A concept that does not exist in the Knowledge Graph but heuristics recognized as a type of a known parent concept.
Vsyn *VirtualSyncon `json:"vsyn,omitempty"`
}
// Verb-mediated relation
type Relation struct {
// Related items
Related *[]RelatedItem `json:"related,omitempty"`
// Verb of a relation
Verb *RelationVerb `json:"verb,omitempty"`
}
// Verb of a relation
type RelationVerb struct {
// Lemma
Lemma *string `json:"lemma,omitempty"`
// Phrase index in the `phrases` array
Phrase *int64 `json:"phrase,omitempty"`
// Relevance
Relevance *int64 `json:"relevance,omitempty"`
// ID used to look up Knowledge Graph data in the `knowledge` array
Syncon *int64 `json:"syncon,omitempty"`
// Verb text
Text *string `json:"text,omitempty"`
// Type
Type *string `json:"type,omitempty"`
}
// Sentence info
type Sentence struct {
// Zero-based position of the first character after the sentence
End *int64 `json:"end,omitempty"`
// Indexes (in the `phrases` array) of the phrases that make up the sentence
Phrases *[]int32 `json:"phrases,omitempty"`
// Zero-based position of the first character of the sentence
Start *int64 `json:"start,omitempty"`
}
// Item or chain of items expressing sentiment
type SentimentItem struct {
// Items expressing sentiment
Items *[]SentimentItem `json:"items,omitempty"`
// Lemma
Lemma *string `json:"lemma,omitempty"`
// Item/chain sentiment score
Sentiment *float32 `json:"sentiment,omitempty"`
// ID used to look up Knowledge Graph data in the `knowledge` array
Syncon *int64 `json:"syncon,omitempty"`
}
// Error information
type ServiceError struct {
// Error code
Code *string `json:"code,omitempty"`
// Error message
Message *string `json:"message,omitempty"`
}
// Available taxonomies
type TaxonomiesResponse struct {
// List of taxonomies' information
Texonomies *[]TaxonomyInfo `json:"texonomies,omitempty"`
}
// A taxonomy's categories tree
type Taxonomy struct {
// Name of the software package containing the reference taxonomy
Namespace *string `json:"namespace,omitempty"`
// Categories tree
Taxonomy *[]TaxonomyCategory `json:"taxonomy,omitempty"`
}
// Taxonomy category
type TaxonomyCategory struct {
// Sub-categories
Categories *[]TaxonomyCategory `json:"categories,omitempty"`
// Category ID
Id *string `json:"id,omitempty"`
// Category label
Label *string `json:"label,omitempty"`
}
// Summary information about a taxonomy
type TaxonomyInfo struct {
// OpenAPI document describing extra ways to use the taxonomy and their outputs
Contract *string `json:"contract,omitempty"`
// Taxonomy description
Description *string `json:"description,omitempty"`
// Supported languages
Languages *[]LanguageInfo `json:"languages,omitempty"`
// Taxonomy name
Name *string `json:"name,omitempty"`
}
// Taxonomy categories
type TaxonomyResponse struct {
Data *[]Taxonomy `json:"data,omitempty"`
// Service errors
Errors *[]ServiceError `json:"errors,omitempty"`
// Operation completed successfully (true/false)
Success *bool `json:"success,omitempty"`
}
// Token info
type Token struct {
// Atoms that make up the token
Atoms *[]Atom `json:"atoms,omitempty"`
// Dependency info
Dependency *Dependency `json:"dependency,omitempty"`
// Zero-based position of the first character after the token
End *int64 `json:"end,omitempty"`
// Lemma
Lemma *string `json:"lemma,omitempty"`
// A semicolon separated list of <a href='https://universaldependencies.org/format.html#morphological-annotation'>CoNLL-U format</a> morphological features
Morphology *string `json:"morphology,omitempty"`
// Paragraph index in the `paragraphs` array
Paragraph *int64 `json:"paragraph,omitempty"`
// Phrase index in the `phrases` array
Phrase *int64 `json:"phrase,omitempty"`
// <a href='https://universaldependencies.org/u/pos/'>Universal Dependencies part-of-speech tag</a>
Pos *string `json:"pos,omitempty"`
// Sentence index in the `sentences` array
Sentence *int64 `json:"sentence,omitempty"`
// Zero-based position of the first character of the token
Start *int64 `json:"start,omitempty"`
// ID used to look up Knowledge Graph data in the `knowledge` array
Syncon *int64 `json:"syncon,omitempty"`
// Expert.ai type
Type *string `json:"type,omitempty"`
// A concept that does not exist in the Knowledge Graph but heuristics recognized as a type of a known parent concept.
Vsyn *VirtualSyncon `json:"vsyn,omitempty"`
}
// A concept that does not exist in the Knowledge Graph but heuristics recognized as a type of a known parent concept.
type VirtualSyncon struct {
// ID used to mark all the occurrences of the virtual concept in the text
Id *int64 `json:"id,omitempty"`
// Parent concept; ID is used to look up Knowledge Graph data in the `knowledge` array
Parent *int64 `json:"parent,omitempty"`
}
// Available contexts
type N200Contexts = ContextsResponse
// Available detectors
type N200Detectors = DetectorsResponse
// Available taxonomies
type N200Taxonomies = TaxonomiesResponse
// Taxonomy categories
type N200Taxonomy = TaxonomyResponse
// PostAnalyzeContextLanguageJSONBody defines parameters for PostAnalyzeContextLanguage.
type PostAnalyzeContextLanguageJSONBody = AnalysisRequest
// PostAnalyzeContextLanguageParamsLanguage defines parameters for PostAnalyzeContextLanguage.
type PostAnalyzeContextLanguageParamsLanguage string
// PostAnalyzeContextLanguageAnalysisJSONBody defines parameters for PostAnalyzeContextLanguageAnalysis.
type PostAnalyzeContextLanguageAnalysisJSONBody = AnalysisRequest
// PostAnalyzeContextLanguageAnalysisParamsLanguage defines parameters for PostAnalyzeContextLanguageAnalysis.
type PostAnalyzeContextLanguageAnalysisParamsLanguage string
// PostAnalyzeContextLanguageAnalysisParamsAnalysis defines parameters for PostAnalyzeContextLanguageAnalysis.
type PostAnalyzeContextLanguageAnalysisParamsAnalysis string
// PostCategorizeTaxonomyLanguageJSONBody defines parameters for PostCategorizeTaxonomyLanguage.
type PostCategorizeTaxonomyLanguageJSONBody = AnalysisRequest
// PostCategorizeTaxonomyLanguageParamsLanguage defines parameters for PostCategorizeTaxonomyLanguage.
type PostCategorizeTaxonomyLanguageParamsLanguage string
// PostDetectDetectorLanguageJSONBody defines parameters for PostDetectDetectorLanguage.
type PostDetectDetectorLanguageJSONBody = AnalysisRequest
// PostDetectDetectorLanguageParamsLanguage defines parameters for PostDetectDetectorLanguage.
type PostDetectDetectorLanguageParamsLanguage string
// GetTaxonomiesTaxonomyLanguageParamsLanguage defines parameters for GetTaxonomiesTaxonomyLanguage.
type GetTaxonomiesTaxonomyLanguageParamsLanguage string
// PostAnalyzeContextLanguageJSONRequestBody defines body for PostAnalyzeContextLanguage for application/json ContentType.
type PostAnalyzeContextLanguageJSONRequestBody = PostAnalyzeContextLanguageJSONBody
// PostAnalyzeContextLanguageAnalysisJSONRequestBody defines body for PostAnalyzeContextLanguageAnalysis for application/json ContentType.
type PostAnalyzeContextLanguageAnalysisJSONRequestBody = PostAnalyzeContextLanguageAnalysisJSONBody
// PostCategorizeTaxonomyLanguageJSONRequestBody defines body for PostCategorizeTaxonomyLanguage for application/json ContentType.
type PostCategorizeTaxonomyLanguageJSONRequestBody = PostCategorizeTaxonomyLanguageJSONBody
// PostDetectDetectorLanguageJSONRequestBody defines body for PostDetectDetectorLanguage for application/json ContentType.
type PostDetectDetectorLanguageJSONRequestBody = PostDetectDetectorLanguageJSONBody
// RequestEditorFn is the function signature for the RequestEditor callback function
type RequestEditorFn func(ctx context.Context, req *http.Request) error
// Doer performs HTTP requests.
//
// The standard http.Client implements this interface.
type HttpRequestDoer interface {
Do(req *http.Request) (*http.Response, error)
}
// Client which conforms to the OpenAPI3 specification for this service.
type Client struct {
// The endpoint of the server conforming to this interface, with scheme,
// https://api.deepmap.com for example. This can contain a path relative
// to the server, such as https://api.deepmap.com/dev-test, and all the
// paths in the swagger spec will be appended to the server.
Server string
// Doer for performing requests, typically a *http.Client with any
// customized settings, such as certificate chains.
Client HttpRequestDoer
// A list of callbacks for modifying requests which are generated before sending over
// the network.
RequestEditors []RequestEditorFn
}
// ClientOption allows setting custom parameters during construction
type ClientOption func(*Client) error
// Creates a new Client, with reasonable defaults
func NewClient(server string, opts ...ClientOption) (*Client, error) {
// create a client with sane default values
client := Client{
Server: server,
}
// mutate client and add all optional params
for _, o := range opts {
if err := o(&client); err != nil {
return nil, err
}
}
// ensure the server URL always has a trailing slash
if !strings.HasSuffix(client.Server, "/") {
client.Server += "/"
}
// create httpClient, if not already present
if client.Client == nil {
client.Client = &http.Client{}
}
return &client, nil
}
// WithHTTPClient allows overriding the default Doer, which is
// automatically created using http.Client. This is useful for tests.
func WithHTTPClient(doer HttpRequestDoer) ClientOption {
return func(c *Client) error {
c.Client = doer
return nil
}
}
// WithRequestEditorFn allows setting up a callback function, which will be
// called right before sending the request. This can be used to mutate the request.
func WithRequestEditorFn(fn RequestEditorFn) ClientOption {
return func(c *Client) error {
c.RequestEditors = append(c.RequestEditors, fn)
return nil
}
}
// The interface specification for the client above.
type ClientInterface interface {
// PostAnalyzeContextLanguage request with any body
PostAnalyzeContextLanguageWithBody(ctx context.Context, context string, language PostAnalyzeContextLanguageParamsLanguage, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error)
PostAnalyzeContextLanguage(ctx context.Context, context string, language PostAnalyzeContextLanguageParamsLanguage, body PostAnalyzeContextLanguageJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error)
// PostAnalyzeContextLanguageAnalysis request with any body
PostAnalyzeContextLanguageAnalysisWithBody(ctx context.Context, context string, language PostAnalyzeContextLanguageAnalysisParamsLanguage, analysis PostAnalyzeContextLanguageAnalysisParamsAnalysis, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error)
PostAnalyzeContextLanguageAnalysis(ctx context.Context, context string, language PostAnalyzeContextLanguageAnalysisParamsLanguage, analysis PostAnalyzeContextLanguageAnalysisParamsAnalysis, body PostAnalyzeContextLanguageAnalysisJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error)
// PostCategorizeTaxonomyLanguage request with any body
PostCategorizeTaxonomyLanguageWithBody(ctx context.Context, taxonomy string, language PostCategorizeTaxonomyLanguageParamsLanguage, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error)
PostCategorizeTaxonomyLanguage(ctx context.Context, taxonomy string, language PostCategorizeTaxonomyLanguageParamsLanguage, body PostCategorizeTaxonomyLanguageJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error)
// GetContexts request
GetContexts(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error)
// PostDetectDetectorLanguage request with any body
PostDetectDetectorLanguageWithBody(ctx context.Context, detector string, language PostDetectDetectorLanguageParamsLanguage, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error)
PostDetectDetectorLanguage(ctx context.Context, detector string, language PostDetectDetectorLanguageParamsLanguage, body PostDetectDetectorLanguageJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error)
// GetDetectors request
GetDetectors(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error)
// GetTaxonomies request
GetTaxonomies(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error)
// GetTaxonomiesTaxonomyLanguage request
GetTaxonomiesTaxonomyLanguage(ctx context.Context, taxonomy string, language GetTaxonomiesTaxonomyLanguageParamsLanguage, reqEditors ...RequestEditorFn) (*http.Response, error)
}
func (c *Client) PostAnalyzeContextLanguageWithBody(ctx context.Context, context string, language PostAnalyzeContextLanguageParamsLanguage, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) {
req, err := NewPostAnalyzeContextLanguageRequestWithBody(c.Server, context, language, contentType, body)
if err != nil {
return nil, err
}
req = req.WithContext(ctx)
if err := c.applyEditors(ctx, req, reqEditors); err != nil {
return nil, err
}
return c.Client.Do(req)
}
func (c *Client) PostAnalyzeContextLanguage(ctx context.Context, context string, language PostAnalyzeContextLanguageParamsLanguage, body PostAnalyzeContextLanguageJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) {
req, err := NewPostAnalyzeContextLanguageRequest(c.Server, context, language, body)
if err != nil {
return nil, err
}
req = req.WithContext(ctx)
if err := c.applyEditors(ctx, req, reqEditors); err != nil {
return nil, err
}
return c.Client.Do(req)
}
func (c *Client) PostAnalyzeContextLanguageAnalysisWithBody(ctx context.Context, context string, language PostAnalyzeContextLanguageAnalysisParamsLanguage, analysis PostAnalyzeContextLanguageAnalysisParamsAnalysis, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) {
req, err := NewPostAnalyzeContextLanguageAnalysisRequestWithBody(c.Server, context, language, analysis, contentType, body)
if err != nil {
return nil, err
}
req = req.WithContext(ctx)
if err := c.applyEditors(ctx, req, reqEditors); err != nil {
return nil, err
}
return c.Client.Do(req)
}
func (c *Client) PostAnalyzeContextLanguageAnalysis(ctx context.Context, context string, language PostAnalyzeContextLanguageAnalysisParamsLanguage, analysis PostAnalyzeContextLanguageAnalysisParamsAnalysis, body PostAnalyzeContextLanguageAnalysisJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) {
req, err := NewPostAnalyzeContextLanguageAnalysisRequest(c.Server, context, language, analysis, body)
if err != nil {
return nil, err
}
req = req.WithContext(ctx)
if err := c.applyEditors(ctx, req, reqEditors); err != nil {
return nil, err
}
return c.Client.Do(req)
}
func (c *Client) PostCategorizeTaxonomyLanguageWithBody(ctx context.Context, taxonomy string, language PostCategorizeTaxonomyLanguageParamsLanguage, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) {
req, err := NewPostCategorizeTaxonomyLanguageRequestWithBody(c.Server, taxonomy, language, contentType, body)
if err != nil {
return nil, err
}
req = req.WithContext(ctx)
if err := c.applyEditors(ctx, req, reqEditors); err != nil {
return nil, err
}
return c.Client.Do(req)
}
func (c *Client) PostCategorizeTaxonomyLanguage(ctx context.Context, taxonomy string, language PostCategorizeTaxonomyLanguageParamsLanguage, body PostCategorizeTaxonomyLanguageJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) {
req, err := NewPostCategorizeTaxonomyLanguageRequest(c.Server, taxonomy, language, body)
if err != nil {
return nil, err
}
req = req.WithContext(ctx)
if err := c.applyEditors(ctx, req, reqEditors); err != nil {
return nil, err
}
return c.Client.Do(req)
}
func (c *Client) GetContexts(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) {
req, err := NewGetContextsRequest(c.Server)
if err != nil {
return nil, err
}
req = req.WithContext(ctx)
if err := c.applyEditors(ctx, req, reqEditors); err != nil {
return nil, err
}
return c.Client.Do(req)
}
func (c *Client) PostDetectDetectorLanguageWithBody(ctx context.Context, detector string, language PostDetectDetectorLanguageParamsLanguage, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) {
req, err := NewPostDetectDetectorLanguageRequestWithBody(c.Server, detector, language, contentType, body)
if err != nil {
return nil, err
}
req = req.WithContext(ctx)
if err := c.applyEditors(ctx, req, reqEditors); err != nil {
return nil, err
}
return c.Client.Do(req)
}
func (c *Client) PostDetectDetectorLanguage(ctx context.Context, detector string, language PostDetectDetectorLanguageParamsLanguage, body PostDetectDetectorLanguageJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) {
req, err := NewPostDetectDetectorLanguageRequest(c.Server, detector, language, body)
if err != nil {
return nil, err
}
req = req.WithContext(ctx)
if err := c.applyEditors(ctx, req, reqEditors); err != nil {
return nil, err
}
return c.Client.Do(req)
}
func (c *Client) GetDetectors(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) {
req, err := NewGetDetectorsRequest(c.Server)
if err != nil {
return nil, err
}
req = req.WithContext(ctx)
if err := c.applyEditors(ctx, req, reqEditors); err != nil {
return nil, err
}
return c.Client.Do(req)
}
func (c *Client) GetTaxonomies(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) {
req, err := NewGetTaxonomiesRequest(c.Server)
if err != nil {
return nil, err
}
req = req.WithContext(ctx)
if err := c.applyEditors(ctx, req, reqEditors); err != nil {
return nil, err
}
return c.Client.Do(req)
}
func (c *Client) GetTaxonomiesTaxonomyLanguage(ctx context.Context, taxonomy string, language GetTaxonomiesTaxonomyLanguageParamsLanguage, reqEditors ...RequestEditorFn) (*http.Response, error) {
req, err := NewGetTaxonomiesTaxonomyLanguageRequest(c.Server, taxonomy, language)
if err != nil {
return nil, err
}
req = req.WithContext(ctx)
if err := c.applyEditors(ctx, req, reqEditors); err != nil {
return nil, err
}
return c.Client.Do(req)
}
// NewPostAnalyzeContextLanguageRequest calls the generic PostAnalyzeContextLanguage builder with application/json body
func NewPostAnalyzeContextLanguageRequest(server string, context string, language PostAnalyzeContextLanguageParamsLanguage, body PostAnalyzeContextLanguageJSONRequestBody) (*http.Request, error) {
var bodyReader io.Reader
buf, err := json.Marshal(body)
if err != nil {
return nil, err
}
bodyReader = bytes.NewReader(buf)
return NewPostAnalyzeContextLanguageRequestWithBody(server, context, language, "application/json", bodyReader)
}
// NewPostAnalyzeContextLanguageRequestWithBody generates requests for PostAnalyzeContextLanguage with any type of body
func NewPostAnalyzeContextLanguageRequestWithBody(server string, context string, language PostAnalyzeContextLanguageParamsLanguage, contentType string, body io.Reader) (*http.Request, error) {
var err error
var pathParam0 string
pathParam0, err = runtime.StyleParamWithLocation("simple", false, "context", runtime.ParamLocationPath, context)
if err != nil {
return nil, err
}
var pathParam1 string
pathParam1, err = runtime.StyleParamWithLocation("simple", false, "language", runtime.ParamLocationPath, language)
if err != nil {
return nil, err
}
serverURL, err := url.Parse(server)
if err != nil {
return nil, err
}
operationPath := fmt.Sprintf("/analyze/%s/%s", pathParam0, pathParam1)
if operationPath[0] == '/' {
operationPath = "." + operationPath
}
queryURL, err := serverURL.Parse(operationPath)
if err != nil {
return nil, err
}
req, err := http.NewRequest("POST", queryURL.String(), body)
if err != nil {
return nil, err
}
req.Header.Add("Content-Type", contentType)
return req, nil
}
// NewPostAnalyzeContextLanguageAnalysisRequest calls the generic PostAnalyzeContextLanguageAnalysis builder with application/json body
func NewPostAnalyzeContextLanguageAnalysisRequest(server string, context string, language PostAnalyzeContextLanguageAnalysisParamsLanguage, analysis PostAnalyzeContextLanguageAnalysisParamsAnalysis, body PostAnalyzeContextLanguageAnalysisJSONRequestBody) (*http.Request, error) {
var bodyReader io.Reader
buf, err := json.Marshal(body)
if err != nil {
return nil, err
}
bodyReader = bytes.NewReader(buf)
return NewPostAnalyzeContextLanguageAnalysisRequestWithBody(server, context, language, analysis, "application/json", bodyReader)
}
// NewPostAnalyzeContextLanguageAnalysisRequestWithBody generates requests for PostAnalyzeContextLanguageAnalysis with any type of body
func NewPostAnalyzeContextLanguageAnalysisRequestWithBody(server string, context string, language PostAnalyzeContextLanguageAnalysisParamsLanguage, analysis PostAnalyzeContextLanguageAnalysisParamsAnalysis, contentType string, body io.Reader) (*http.Request, error) {
var err error
var pathParam0 string
pathParam0, err = runtime.StyleParamWithLocation("simple", false, "context", runtime.ParamLocationPath, context)
if err != nil {
return nil, err
}
var pathParam1 string
pathParam1, err = runtime.StyleParamWithLocation("simple", false, "language", runtime.ParamLocationPath, language)
if err != nil {
return nil, err
}