diff --git a/connectors/elasticsearch/config.go b/connectors/elasticsearch/config.go new file mode 100755 index 0000000..4d125ce --- /dev/null +++ b/connectors/elasticsearch/config.go @@ -0,0 +1,164 @@ +package elasticsearch + +import ( + "crypto/tls" + "crypto/x509" + _ "embed" + "fmt" + "gopkg.in/yaml.v3" + "net/http" + "strings" + + "github.com/elastic/go-elasticsearch/v8" + "github.com/pkg/errors" +) + +//go:embed readme.md +var docString string + +type Config struct { + Hosts []string `yaml:"hosts"` // List of Elasticsearch nodes (e.g., ["http://localhost:9200"]) + Username string `yaml:"user"` // Elasticsearch username (if authentication is enabled) + Password string `yaml:"password"` // Elasticsearch password + EnableTLS bool `yaml:"enableTLS"` // Enable TLS/SSL connection + CertFile string `yaml:"certFile"` + IsReadonly bool `yaml:"is_readonly"` +} + +func (c Config) Readonly() bool { + return c.IsReadonly +} + +func (c Config) ExtraPrompt() []string { + return []string{ + "Database: Elasticsearch (NoSQL search engine).", + "Queries must use Elasticsearch Query DSL in JSON format.", + "Paginate with 'from' and 'size' instead of 'offset' and 'limit'.", + "Elasticsearch Query DSL with Mustache templating syntax.", + "Elasticsearch queries are written as JSON objects and sent to the _search/template endpoint.", + "For Elasticsearch, queries must be written using Mustache syntax.", + "Use double curly braces {{param}} for dynamic variables.", + "Hierarchical data should use 'nested' fields or parent-child relationships.", + "The final output must contain *only valid single JSON* with no additional commentary, explanations, or markdown formatting!", + } +} + +// TLSConfig generates TLS settings +func (c Config) TLSConfig() (*tls.Config, error) { + if !c.EnableTLS { + return nil, nil + } + + rootCertPool := x509.NewCertPool() + if len(c.CertFile) > 0 { + ok := rootCertPool.AppendCertsFromPEM([]byte(c.CertFile)) + if !ok { + return nil, errors.New("unable to add TLS certificate to cert pool") + } + } + + return &tls.Config{ + RootCAs: rootCertPool, + InsecureSkipVerify: len(c.CertFile) == 0, + }, nil +} + +// UnmarshalYAML allows parsing either a direct connection string or a full configuration object. +func (c *Config) UnmarshalYAML(value *yaml.Node) error { + // Attempt to parse as a single connection string (e.g., "http://user:pass@localhost:9200") + var connString string + if err := value.Decode(&connString); err == nil { + // If it's a valid connection string, extract components + esConfig, err := parseElasticsearchConnString(connString) + if err != nil { + return err + } + + c.Hosts = esConfig.Addresses + c.Username = esConfig.Username + c.Password = esConfig.Password + return nil + } + + // If not a string, attempt to parse as a full configuration object + type configAlias Config // Use alias to avoid infinite recursion + var alias configAlias + if err := value.Decode(&alias); err != nil { + return errors.Wrap(err, "failed to unmarshal YAML into Config") + } + + // Copy parsed fields into the original struct + *c = Config(alias) + return nil +} + +// MakeConfig constructs an Elasticsearch configuration +func (c Config) MakeConfig() (*elasticsearch.Config, error) { + if len(c.Hosts) == 0 { + return nil, errors.New("no Elasticsearch hosts provided") + } + + // Ensure correct protocol handling + addresses := make([]string, len(c.Hosts)) + for i, host := range c.Hosts { + if !strings.HasPrefix(host, "http://") && !strings.HasPrefix(host, "https://") { + protocol := "http" + if c.EnableTLS { + protocol = "https" + } + host = fmt.Sprintf("%s://%s", protocol, host) + } + addresses[i] = host + } + + tlsConfig, err := c.TLSConfig() + if err != nil { + return nil, err + } + + return &elasticsearch.Config{ + Addresses: addresses, + Username: c.Username, + Password: c.Password, + Transport: &http.Transport{ + TLSClientConfig: tlsConfig, + }, + }, nil +} + +func (c Config) Type() string { + return "elasticsearch" +} + +func (c Config) Doc() string { + return docString +} + +// parseElasticsearchConnString parses the connection string into an Elasticsearch Config +func parseElasticsearchConnString(connString string) (*elasticsearch.Config, error) { + if !strings.HasPrefix(connString, "http://") && !strings.HasPrefix(connString, "https://") { + return nil, errors.New("invalid Elasticsearch connection string format") + } + + var username, password, host string + host = connString + + if strings.Contains(connString, "@") { + parts := strings.SplitN(connString, "@", 2) + authParts := strings.SplitN(parts[0], "://", 2) + if len(authParts) == 2 { + creds := strings.SplitN(authParts[1], ":", 2) + if len(creds) == 2 { + username = creds[0] + password = creds[1] + } + } + host = parts[1] + } + + return &elasticsearch.Config{ + Addresses: []string{host}, + Username: username, + Password: password, + }, nil +} diff --git a/connectors/elasticsearch/connector.go b/connectors/elasticsearch/connector.go new file mode 100755 index 0000000..10f6f51 --- /dev/null +++ b/connectors/elasticsearch/connector.go @@ -0,0 +1,424 @@ +package elasticsearch + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "github.com/centralmind/gateway/castx" + "github.com/centralmind/gateway/connectors" + "github.com/centralmind/gateway/model" + "github.com/elastic/go-elasticsearch/v8" + "golang.org/x/xerrors" + "io" + "strings" +) + +const limitOfDocuments = 5 + +func init() { + connectors.Register(func(cfg Config) (connectors.Connector, error) { + config, err := cfg.MakeConfig() + if err != nil { + return nil, xerrors.Errorf("unable to prepare Elasticsearch config: %w", err) + } + client, err := elasticsearch.NewClient(*config) + if err != nil { + return nil, xerrors.Errorf("unable to create Elasticsearch client: %w", err) + } + return &Connector{ + config: cfg, + client: client, + }, nil + }) +} + +var _ connectors.Connector = (*Connector)(nil) + +// Connector implements the connectors.Connector interface for Elasticsearch +type Connector struct { + config Config + client *elasticsearch.Client +} + +func (c *Connector) Config() connectors.Config { + return &c.config +} + +// Ping checks if Elasticsearch is reachable +func (c *Connector) Ping(ctx context.Context) error { + res, err := c.client.Ping(c.client.Ping.WithContext(ctx)) + if err != nil { + return fmt.Errorf("failed to ping Elasticsearch: %w", err) + } + defer res.Body.Close() + return nil +} + +// Query executes a search query in Elasticsearch +func (c *Connector) Query(ctx context.Context, endpoint model.Endpoint, params map[string]any) ([]map[string]any, error) { + processed, err := castx.ParamsE(endpoint, params) + if err != nil { + return nil, xerrors.Errorf("unable to process params: %w", err) + } + + finalQuery := map[string]interface{}{ + "source": endpoint.Query, + "params": processed, + } + + var buf bytes.Buffer + err = json.NewEncoder(&buf).Encode(finalQuery) + if err != nil { + return nil, xerrors.Errorf("unable to encode query: %w", err) + } + + res, err := c.client.API.SearchTemplate( + &buf, + c.client.SearchTemplate.WithContext(ctx), + ) + if err != nil { + return nil, xerrors.Errorf("failed to execute search query: %w", err) + } + + defer res.Body.Close() + + // Read response body + body, err := io.ReadAll(res.Body) + if err != nil { + return nil, xerrors.Errorf("failed to read Elasticsearch response: %w", err) + } + + // Check for errors + if res.IsError() { + return nil, xerrors.Errorf("Elasticsearch returned an error: %s", body) + } + + // Parse JSON response + var result map[string]interface{} + err = json.Unmarshal(body, &result) + if err != nil { + return nil, xerrors.Errorf("failed to parse Elasticsearch response: %w", err) + } + + var hits []interface{} + if hitsMap, ok := result["hits"].(map[string]interface{}); ok { + if hitsList, ok := hitsMap["hits"].([]interface{}); ok { + hits = hitsList + } else { + return nil, xerrors.Errorf("'hits' key is missing or not a list") + } + } else { + return nil, xerrors.Errorf("'hits' key is missing or not a map") + } + // Process the results + results := make([]map[string]interface{}, 0) + for _, hit := range hits { + hitMap, ok := hit.(map[string]interface{}) + if !ok { + continue + } + + source, ok := hitMap["_source"].(map[string]interface{}) + if !ok { + continue + } + + results = append(results, source) + } + + return results, nil +} + +// Discovery retrieves available indices in Elasticsearch +func (c *Connector) Discovery(ctx context.Context) ([]model.Table, error) { + // Get a list of indices, but limit processing for large indices + res, err := c.client.Cat.Indices( + c.client.Cat.Indices.WithContext(ctx), + c.client.Cat.Indices.WithFormat("json"), + ) + if err != nil { + return nil, xerrors.Errorf("failed to get indices: %w", err) + } + defer res.Body.Close() + + // Read response body + body, err := io.ReadAll(res.Body) + if err != nil { + return nil, xerrors.Errorf("failed to read indices response: %w", err) + } + + // Parse JSON response + var indices []map[string]interface{} + if err := json.Unmarshal(body, &indices); err != nil { + return nil, xerrors.Errorf("failed to parse indices response: %w", err) + } + // Process only a subset of indices (if necessary) + var tables []model.Table + for _, index := range indices { + indexName, ok := index["index"].(string) + if !ok { + continue + } + + // Instead of fetching all mappings, **sample documents** to infer fields + columns, err := c.sampleIndexFields(ctx, indexName) + if err != nil { + return nil, xerrors.Errorf("failed to infer schema for index %s: %w", indexName, err) + } + + // Get document count efficiently + rowCount, err := c.getDocumentCount(ctx, indexName) + if err != nil { + return nil, xerrors.Errorf("failed to get row count for index %s: %w", indexName, err) + } + + tables = append(tables, model.Table{ + Name: indexName, + Columns: columns, + RowCount: rowCount, + }) + } + + return tables, nil +} + +func (c *Connector) getDocumentCount(ctx context.Context, indexName string) (int, error) { + // Execute _count API request + res, err := c.client.Count( + c.client.Count.WithContext(ctx), + c.client.Count.WithIndex(indexName), + ) + if err != nil { + return 0, xerrors.Errorf("failed to execute _count API: %w", err) + } + defer res.Body.Close() + + // Read response body + body, err := io.ReadAll(res.Body) + if err != nil { + return 0, xerrors.Errorf("failed to read _count response: %w", err) + } + + // Parse JSON response + var countResponse map[string]interface{} + if err := json.Unmarshal(body, &countResponse); err != nil { + return 0, xerrors.Errorf("failed to parse _count response: %w", err) + } + + // Extract document count + count, ok := countResponse["count"].(float64) + if !ok { + return 0, xerrors.Errorf("unexpected _count response format") + } + + return int(count), nil +} + +// Sample retrieves a few sample documents from an index +func (c *Connector) Sample(ctx context.Context, table model.Table) ([]map[string]any, error) { + query := map[string]interface{}{ + "size": limitOfDocuments, + "query": map[string]interface{}{ + "match_all": map[string]interface{}{}, + }, + } + + // Convert query to JSON + queryBytes, err := json.Marshal(query) + if err != nil { + return nil, xerrors.Errorf("failed to marshal sample query: %w", err) + } + + // Execute the search request + res, err := c.client.Search( + c.client.Search.WithContext(ctx), + c.client.Search.WithBody(bytes.NewReader(queryBytes)), + ) + if err != nil { + return nil, xerrors.Errorf("failed to fetch sample documents: %w", err) + } + defer res.Body.Close() + + // Read response body + body, err := io.ReadAll(res.Body) + if err != nil { + return nil, xerrors.Errorf("failed to read sample response: %w", err) + } + + // Parse JSON response + var result map[string]interface{} + err = json.Unmarshal(body, &result) + if err != nil { + return nil, xerrors.Errorf("failed to parse sample response: %w", err) + } + + // Extract search hits + var hits []interface{} + if hitsMap, ok := result["hits"].(map[string]interface{}); ok { + if hitsList, ok := hitsMap["hits"].([]interface{}); ok { + hits = hitsList + } else { + return nil, xerrors.Errorf("'hits' key is missing or not a list") + } + } else { + return nil, xerrors.Errorf("'hits' key is missing or not a map") + } + + // Process the results + results := make([]map[string]interface{}, len(hits)) + for i, hit := range hits { + hitMap := hit.(map[string]interface{}) + results[i] = hitMap["_source"].(map[string]interface{}) + } + + return results, nil +} + +func (c *Connector) InferQuery(ctx context.Context, query string) ([]model.ColumnSchema, error) { + // Query multiple documents for better inference + // Ensure the query is properly formatted JSON + var esQuery map[string]interface{} + err := json.Unmarshal([]byte(query), &esQuery) + if err != nil { + return nil, xerrors.Errorf("invalid query format: %w", err) + } + + // Execute the query in Elasticsearch + res, err := c.client.Search( + c.client.Search.WithContext(ctx), + c.client.Search.WithBody(strings.NewReader(query)), + ) + if err != nil { + return nil, xerrors.Errorf("failed to execute query: %w", err) + } + defer res.Body.Close() + + // Read response body + body, err := io.ReadAll(res.Body) + if err != nil { + return nil, xerrors.Errorf("failed to read Elasticsearch response: %w", err) + } + + // Parse JSON response + var result map[string]interface{} + err = json.Unmarshal(body, &result) + if err != nil { + return nil, xerrors.Errorf("failed to parse Elasticsearch response: %w", err) + } + + // Extract "hits" (documents) + var hits []interface{} + if hitsMap, ok := result["hits"].(map[string]interface{}); ok { + if hitsList, ok := hitsMap["hits"].([]interface{}); ok { + hits = hitsList + } else { + return nil, xerrors.Errorf("'hits' key is missing or not a list") + } + } else { + return nil, xerrors.Errorf("'hits' key is missing or not a map") + } + // Use the helper function + return c.extractColumnsFromHits(hits) +} + +func (c *Connector) GuessColumnType(sqlType string) model.ColumnType { + switch strings.ToLower(sqlType) { + case "text", "keyword": + return model.TypeString + case "long", "integer", "short", "byte": + return model.TypeInteger + case "float", "double", "half_float", "scaled_float": + return model.TypeNumber + case "boolean": + return model.TypeBoolean + case "date": + return model.TypeDatetime + case "object", "nested": + return model.TypeObject + case "array": + return model.TypeArray + default: + return model.TypeString // Default fallback + } +} + +func (c *Connector) sampleIndexFields(ctx context.Context, indexName string) ([]model.ColumnSchema, error) { + N := 100 // Sample up to 100 documents + query := map[string]interface{}{ + "size": N, + "query": map[string]interface{}{ + "match_all": map[string]interface{}{}, + }, + } + + // Convert query to JSON + queryBytes, err := json.Marshal(query) + if err != nil { + return nil, xerrors.Errorf("failed to marshal sample query: %w", err) + } + + // Execute search request + res, err := c.client.Search( + c.client.Search.WithContext(ctx), + c.client.Search.WithIndex(indexName), + c.client.Search.WithBody(bytes.NewReader(queryBytes)), + ) + if err != nil { + return nil, xerrors.Errorf("failed to execute sample query: %w", err) + } + defer res.Body.Close() + + // Read response body + body, err := io.ReadAll(res.Body) + if err != nil { + return nil, xerrors.Errorf("failed to read search response: %w", err) + } + + // Parse JSON response + var result map[string]interface{} + if err := json.Unmarshal(body, &result); err != nil { + return nil, xerrors.Errorf("failed to parse search response: %w", err) + } + + // Extract search hits (sample documents) + hits, ok := result["hits"].(map[string]interface{})["hits"].([]interface{}) + if !ok { + return nil, xerrors.Errorf("unexpected response structure from Elasticsearch") + } + + // Use the helper function + return c.extractColumnsFromHits(hits) +} + +func (c *Connector) extractColumnsFromHits(hits []interface{}) ([]model.ColumnSchema, error) { + // Track field types across multiple documents + fieldTypeMap := make(map[string]string) + + // Iterate through sample documents + for _, hit := range hits { + doc, ok := hit.(map[string]interface{})["_source"].(map[string]interface{}) + if !ok { + continue + } + + for field, value := range doc { + fieldType := fmt.Sprintf("%T", value) // Get Go type as a string + if _, exists := fieldTypeMap[field]; !exists { + fieldTypeMap[field] = fieldType + } + } + } + + // Convert detected types to ColumnSchema + var columns []model.ColumnSchema + for field, detectedType := range fieldTypeMap { + columnType := c.GuessColumnType(detectedType) // Convert to SQL-like type + columns = append(columns, model.ColumnSchema{ + Name: field, + Type: columnType, + }) + } + + return columns, nil +} diff --git a/connectors/elasticsearch/connector_test.go b/connectors/elasticsearch/connector_test.go new file mode 100644 index 0000000..c53231a --- /dev/null +++ b/connectors/elasticsearch/connector_test.go @@ -0,0 +1,150 @@ +package elasticsearch + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "github.com/centralmind/gateway/model" + "github.com/docker/go-connections/nat" + "github.com/sirupsen/logrus" + "strconv" + "testing" + "time" + + "github.com/centralmind/gateway/connectors" + es "github.com/elastic/go-elasticsearch/v8" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/testcontainers/testcontainers-go/modules/elasticsearch" +) + +func TestElasticsearchConnectorWithAuth(t *testing.T) { + ctx := context.Background() + esPassword := "test" + esUserName := "elastic" + + esContainer, err := elasticsearch.Run(ctx, + "docker.elastic.co/elasticsearch/elasticsearch:8.6.2", + elasticsearch.WithPassword(esPassword), + ) + require.NoError(t, err) + defer func() { + require.NoError(t, esContainer.Terminate(ctx)) + }() + esPort, _ := esContainer.MappedPort(ctx, nat.Port("9200")) + esURL := fmt.Sprintf("http://localhost:%s", esPort.Port()) + logrus.Info("Elasticsearch URL:", esURL) + + cfg := Config{ + Hosts: []string{ + esContainer.Settings.Address, + }, + Username: "elastic", + Password: esContainer.Settings.Password, + EnableTLS: true, + CertFile: string(esContainer.Settings.CACert), + } + + connector, err := connectors.New("elasticsearch", cfg) + assert.NoError(t, err) + assert.NotNil(t, connector) + + // Create test index and insert sample data + t.Run("Setup Test Data", func(t *testing.T) { + esClient, err := es.NewClient(es.Config{ + Addresses: []string{ + esContainer.Settings.Address, + }, + Username: esUserName, + Password: esContainer.Settings.Password, + CACert: esContainer.Settings.CACert, + }) + require.NoError(t, err) + + // Create index with mapping + mapping := `{ + "mappings": { + "properties": { + "name": {"type": "text"}, + "age": {"type": "integer"}, + "city": {"type": "text"}, + "job": {"type": "text"}, + "created_at": {"type": "date"} + } + } + }` + _, err = esClient.Indices.Create("test_users", esClient.Indices.Create.WithBody(bytes.NewReader([]byte(mapping)))) + require.NoError(t, err) + + // Insert sample documents + sampleDocs := []map[string]interface{}{ + {"name": "Alice", "age": 28, "city": "New York", "job": "Engineer", "created_at": "2024-03-12T12:00:00Z"}, + {"name": "Bob", "age": 35, "city": "San Francisco", "job": "Designer", "created_at": "2024-03-11T10:30:00Z"}, + } + for i, doc := range sampleDocs { + docJSON, _ := json.Marshal(doc) + docID := strconv.Itoa(i + 1) // ✅ Convert i+1 to string + _, err := esClient.Index("test_users", bytes.NewReader(docJSON), esClient.Index.WithDocumentID(docID)) + require.NoError(t, err) + } + }) + + // Test: Ping Elasticsearch + t.Run("Ping Elasticsearch", func(t *testing.T) { + err := connector.Ping(ctx) + assert.NoError(t, err) + }) + + // Test: Discover Indices + t.Run("Discover Indices", func(t *testing.T) { + indices, err := connector.Discovery(ctx) + assert.NoError(t, err) + assert.NotEmpty(t, indices) + + found := false + for _, index := range indices { + if index.Name == "test_users" { + found = true + break + } + } + assert.True(t, found, "Expected index 'test_users' to exist") + }) + + // Test: Query Elasticsearch + t.Run("Query Documents", func(t *testing.T) { + // When you insert documents, they are not immediately searchable. + // Elasticsearch requires a short period to index the data. + time.Sleep(2.0 * time.Second) + endpoint := model.Endpoint{ + Query: `{ + "query": { + "match": { + "job": "{{job}}" + } + } + }`, + Params: []model.EndpointParams{ + {Name: "job", Type: "string", Required: true}, + }, + } + + params := map[string]any{ + "job": "Engineer", + } + + rows, err := connector.Query(ctx, endpoint, params) + assert.NoError(t, err) + assert.Len(t, rows, 1) + + expected := map[string]any{ + "name": "Alice", + "age": float64(28), // JSON numbers default to float64 in Go + "city": "New York", + "job": "Engineer", + "created_at": "2024-03-12T12:00:00Z", + } + assert.Equal(t, expected, rows[0]) + }) +} diff --git a/connectors/elasticsearch/readme.md b/connectors/elasticsearch/readme.md new file mode 100755 index 0000000..e69de29 diff --git a/go.mod b/go.mod index 3110530..f7d941f 100644 --- a/go.mod +++ b/go.mod @@ -11,6 +11,7 @@ require ( github.com/charmbracelet/glamour v0.8.0 github.com/danielgtaylor/huma/v2 v2.30.0 github.com/docker/go-connections v0.5.0 + github.com/elastic/go-elasticsearch/v8 v8.17.1 github.com/gin-gonic/gin v1.10.0 github.com/go-sql-driver/mysql v1.7.1 github.com/hashicorp/golang-lru/v2 v2.0.7 @@ -29,6 +30,7 @@ require ( github.com/stretchr/testify v1.10.0 github.com/testcontainers/testcontainers-go v0.35.0 github.com/testcontainers/testcontainers-go/modules/clickhouse v0.35.0 + github.com/testcontainers/testcontainers-go/modules/elasticsearch v0.35.0 github.com/testcontainers/testcontainers-go/modules/gcloud v0.35.0 github.com/testcontainers/testcontainers-go/modules/postgres v0.35.0 github.com/yuin/gopher-lua v1.1.1 @@ -66,6 +68,7 @@ require ( github.com/charmbracelet/lipgloss v0.12.1 // indirect github.com/charmbracelet/x/ansi v0.1.4 // indirect github.com/dlclark/regexp2 v1.11.0 // indirect + github.com/elastic/elastic-transport-go/v8 v8.6.1 // indirect github.com/go-viper/mapstructure/v2 v2.2.1 // indirect github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect github.com/golang-sql/sqlexp v0.1.0 // indirect diff --git a/go.sum b/go.sum index d5374b8..aedf5e7 100644 --- a/go.sum +++ b/go.sum @@ -834,6 +834,10 @@ github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3 github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/dvsekhvalnov/jose2go v1.6.0 h1:Y9gnSnP4qEI0+/uQkHvFXeD2PLPJeXEL+ySMEA2EjTY= github.com/dvsekhvalnov/jose2go v1.6.0/go.mod h1:QsHjhyTlD/lAVqn/NSbVZmSCGeDehTB/mPZadG+mhXU= +github.com/elastic/elastic-transport-go/v8 v8.6.1 h1:h2jQRqH6eLGiBSN4eZbQnJLtL4bC5b4lfVFRjw2R4e4= +github.com/elastic/elastic-transport-go/v8 v8.6.1/go.mod h1:YLHer5cj0csTzNFXoNQ8qhtGY1GTvSqPnKWKaqQE3Hk= +github.com/elastic/go-elasticsearch/v8 v8.17.1 h1:bOXChDoCMB4TIwwGqKd031U8OXssmWLT3UrAr9EGs3Q= +github.com/elastic/go-elasticsearch/v8 v8.17.1/go.mod h1:MVJCtL+gJJ7x5jFeUmA20O7rvipX8GcQmo5iBcmaJn4= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= @@ -1330,6 +1334,8 @@ github.com/testcontainers/testcontainers-go v0.35.0 h1:uADsZpTKFAtp8SLK+hMwSaa+X github.com/testcontainers/testcontainers-go v0.35.0/go.mod h1:oEVBj5zrfJTrgjwONs1SsRbnBtH9OKl+IGl3UMcr2B4= github.com/testcontainers/testcontainers-go/modules/clickhouse v0.35.0 h1:A4NESGwof4RK+i/pjL0lAVu0JxNZIvLR35ZFB9DKgYQ= github.com/testcontainers/testcontainers-go/modules/clickhouse v0.35.0/go.mod h1:nT0LQ4rqTljX5Ub0Q3GdFrVXRYSrjK6p7RuJPpUE4wg= +github.com/testcontainers/testcontainers-go/modules/elasticsearch v0.35.0 h1:rDmyDK7URBMIJCK66fG7B+yhxBSlIWCw+/5sX4b0cHs= +github.com/testcontainers/testcontainers-go/modules/elasticsearch v0.35.0/go.mod h1:KEfm2TF2HBh2ysNyXYzjPCm6mAJtIqoxttXic8Pvtl8= github.com/testcontainers/testcontainers-go/modules/gcloud v0.35.0 h1:8tdSCu4ey2ye3pXjo4I0GPcnYZb66dJN3qN6Ebqpjcg= github.com/testcontainers/testcontainers-go/modules/gcloud v0.35.0/go.mod h1:jtvudSR4XxV/NTRHfhD5/wf2xlF5OPuiQmNSB0PM6XM= github.com/testcontainers/testcontainers-go/modules/mysql v0.35.0 h1:9voGAf+1KxC0ck/XtrC/AUrkr74SSGpQRBp0O851B3Y= diff --git a/main.go b/main.go index 5dd243e..f16befb 100644 --- a/main.go +++ b/main.go @@ -8,6 +8,7 @@ import ( "github.com/centralmind/gateway/cli" _ "github.com/centralmind/gateway/connectors/bigquery" _ "github.com/centralmind/gateway/connectors/clickhouse" + _ "github.com/centralmind/gateway/connectors/elasticsearch" _ "github.com/centralmind/gateway/connectors/mssql" _ "github.com/centralmind/gateway/connectors/mysql" _ "github.com/centralmind/gateway/connectors/oracle"