From cfd486a7a225ff38d73ab6319664b266beb04492 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Fri, 8 Nov 2024 10:34:00 -0500 Subject: [PATCH 01/55] ssapi mvp --- .gitignore | 1 + factories/receivers.go | 2 + go.mod | 2 + receiver/splunksearchapireceiver/README.md | 1 + receiver/splunksearchapireceiver/api.go | 121 +++++++++++++++ receiver/splunksearchapireceiver/config.go | 80 ++++++++++ receiver/splunksearchapireceiver/factory.go | 36 +++++ receiver/splunksearchapireceiver/go.mod | 30 ++++ receiver/splunksearchapireceiver/go.sum | 98 +++++++++++++ receiver/splunksearchapireceiver/model.go | 39 +++++ receiver/splunksearchapireceiver/receiver.go | 147 +++++++++++++++++++ 11 files changed, 557 insertions(+) create mode 100644 receiver/splunksearchapireceiver/README.md create mode 100644 receiver/splunksearchapireceiver/api.go create mode 100644 receiver/splunksearchapireceiver/config.go create mode 100644 receiver/splunksearchapireceiver/factory.go create mode 100644 receiver/splunksearchapireceiver/go.mod create mode 100644 receiver/splunksearchapireceiver/go.sum create mode 100644 receiver/splunksearchapireceiver/model.go create mode 100644 receiver/splunksearchapireceiver/receiver.go diff --git a/.gitignore b/.gitignore index 5221bfd9c..174981ba4 100644 --- a/.gitignore +++ b/.gitignore @@ -21,6 +21,7 @@ opentelemetry-java-contrib-jmx-metrics.jar VERSION.txt release_deps /tmp +/local # OpAmp Files collector*.yaml diff --git a/factories/receivers.go b/factories/receivers.go index ca3f3e282..ceb5a4317 100644 --- a/factories/receivers.go +++ b/factories/receivers.go @@ -23,6 +23,7 @@ import ( "github.com/observiq/bindplane-agent/receiver/pluginreceiver" "github.com/observiq/bindplane-agent/receiver/routereceiver" "github.com/observiq/bindplane-agent/receiver/sapnetweaverreceiver" + "github.com/observiq/bindplane-agent/receiver/splunksearchapireceiver" "github.com/observiq/bindplane-agent/receiver/telemetrygeneratorreceiver" "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/activedirectorydsreceiver" "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/aerospikereceiver" @@ -157,6 +158,7 @@ var defaultReceivers = []receiver.Factory{ sapnetweaverreceiver.NewFactory(), simpleprometheusreceiver.NewFactory(), snmpreceiver.NewFactory(), + splunksearchapireceiver.NewFactory(), splunkhecreceiver.NewFactory(), sqlqueryreceiver.NewFactory(), sqlserverreceiver.NewFactory(), diff --git a/go.mod b/go.mod index bf1b6bf37..82d9470eb 100644 --- a/go.mod +++ b/go.mod @@ -878,6 +878,8 @@ replace github.com/observiq/bindplane-agent/internal/report => ./internal/report replace github.com/observiq/bindplane-agent/internal/measurements => ./internal/measurements +replace github.com/observiq/bindplane-agent/receiver/splunksearchapireceiver => ./receiver/splunksearchapireceiver + // Does not build with windows and only used in configschema executable // Relevant issue https://github.com/mattn/go-ieproxy/issues/45 replace github.com/mattn/go-ieproxy => github.com/mattn/go-ieproxy v0.0.1 diff --git a/receiver/splunksearchapireceiver/README.md b/receiver/splunksearchapireceiver/README.md new file mode 100644 index 000000000..becbe410b --- /dev/null +++ b/receiver/splunksearchapireceiver/README.md @@ -0,0 +1 @@ +# Splunk Search API Receiver \ No newline at end of file diff --git a/receiver/splunksearchapireceiver/api.go b/receiver/splunksearchapireceiver/api.go new file mode 100644 index 000000000..fcf7fa83c --- /dev/null +++ b/receiver/splunksearchapireceiver/api.go @@ -0,0 +1,121 @@ +package splunksearchapireceiver + +import ( + "bytes" + "crypto/tls" + "encoding/json" + "encoding/xml" + "fmt" + "io" + "net/http" +) + +func createHttpClient() *http.Client { + // TODO: Add functionality to configure TLS settings using config options + tr := &http.Transport{ + TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, // Disables TLS verification + } + return &http.Client{Transport: tr} +} + +func createSearchJob(config *Config, search string) (CreateJobResponse, error) { + // fmt.Println("Creating search job for search: ", search) + endpoint := fmt.Sprintf("https://%s/services/search/jobs", config.Server) + + reqBody := fmt.Sprintf(`search=%s`, search) + req, err := http.NewRequest("POST", endpoint, bytes.NewBuffer([]byte(reqBody))) + if err != nil { + return CreateJobResponse{}, err + } + req.SetBasicAuth(config.Username, config.Password) + + client := createHttpClient() + resp, err := client.Do(req) + if err != nil { + return CreateJobResponse{}, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusCreated { + return CreateJobResponse{}, fmt.Errorf("failed to create search job: %d", resp.StatusCode) + } + + var jobResponse CreateJobResponse + body, err := io.ReadAll(resp.Body) + if err != nil { + return CreateJobResponse{}, fmt.Errorf("failed to read search job status response: %v", err) + } + + err = xml.Unmarshal(body, &jobResponse) + if err != nil { + return CreateJobResponse{}, fmt.Errorf("failed to unmarshal search job response: %v", err) + } + return jobResponse, nil +} + +func getJobStatus(config *Config, sid string) (JobStatusResponse, error) { + // fmt.Println("Getting job status") + endpoint := fmt.Sprintf("https://%s/services/search/v2/jobs/%s", config.Server, sid) + + req, err := http.NewRequest("GET", endpoint, nil) + if err != nil { + return JobStatusResponse{}, err + } + req.SetBasicAuth(config.Username, config.Password) + + client := createHttpClient() + resp, err := client.Do(req) + if err != nil { + return JobStatusResponse{}, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return JobStatusResponse{}, fmt.Errorf("failed to get search job status: %d", resp.StatusCode) + } + + body, err := io.ReadAll(resp.Body) + if err != nil { + return JobStatusResponse{}, fmt.Errorf("failed to read search job status response: %v", err) + } + var jobStatusResponse JobStatusResponse + err = xml.Unmarshal(body, &jobStatusResponse) + if err != nil { + return JobStatusResponse{}, fmt.Errorf("failed to unmarshal search job response: %v", err) + } + + return jobStatusResponse, nil +} + +func getSearchResults(config *Config, sid string) (SearchResults, error) { + endpoint := fmt.Sprintf("https://%s/services/search/v2/jobs/%s/results?output_mode=json", config.Server, sid) + req, err := http.NewRequest("GET", endpoint, nil) + if err != nil { + return SearchResults{}, err + } + req.SetBasicAuth(config.Username, config.Password) + + client := createHttpClient() + resp, err := client.Do(req) + if err != nil { + return SearchResults{}, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return SearchResults{}, fmt.Errorf("failed to get search job results: %d", resp.StatusCode) + } + + var searchResults SearchResults + body, err := io.ReadAll(resp.Body) + if err != nil { + return SearchResults{}, fmt.Errorf("failed to read search job results response: %v", err) + } + // fmt.Println("Body: ", string(body)) + err = json.Unmarshal(body, &searchResults) + if err != nil { + return SearchResults{}, fmt.Errorf("failed to unmarshal search job results: %v", err) + } + + return searchResults, nil +} diff --git a/receiver/splunksearchapireceiver/config.go b/receiver/splunksearchapireceiver/config.go new file mode 100644 index 000000000..fbb149ad2 --- /dev/null +++ b/receiver/splunksearchapireceiver/config.go @@ -0,0 +1,80 @@ +package splunksearchapireceiver + +import ( + "errors" + "strings" + "time" +) + +type Config struct { + Server string `mapstructure:"splunk_server"` + Username string `mapstructure:"splunk_username"` + Password string `mapstructure:"splunk_password"` + Searches []Search `mapstructure:"searches"` +} + +type Search struct { + Query string `mapstructure:"query"` + EarliestTime string `mapstructure:"earliest_time"` + LatestTime string `mapstructure:"latest_time"` + Limit int `mapstructure:"limit"` +} + +func (cfg *Config) Validate() error { + if cfg.Server == "" { + return errors.New("missing Splunk server") + } + if cfg.Username == "" { + return errors.New("missing Splunk username") + } + if cfg.Password == "" { + return errors.New("missing Splunk password") + } + if len(cfg.Searches) == 0 { + return errors.New("at least one search must be provided") + } + + for _, search := range cfg.Searches { + if search.Query == "" { + return errors.New("missing query in search") + } + + // query implicitly starts with "search" command + if !strings.HasPrefix(search.Query, "search ") { + search.Query = "search " + search.Query + } + + if strings.Contains(search.Query, "|") { + return errors.New("command chaining is not supported for queries") + } + + if search.EarliestTime == "" { + return errors.New("missing earliest_time in search") + } + if search.LatestTime == "" { + return errors.New("missing latest_time in search") + } + + // parse time strings to time.Time + earliestTime, err := time.Parse(time.RFC3339, search.EarliestTime) + if err != nil { + return errors.New("earliest_time failed to be parsed as RFC3339") + } + + latestTime, err := time.Parse(time.RFC3339, search.LatestTime) + if err != nil { + return errors.New("latest_time failed to be parsed as RFC3339") + } + + if earliestTime.UTC().After(latestTime.UTC()) { + return errors.New("earliest_time must be earlier than latest_time") + } + if earliestTime.UTC().After(time.Now().UTC()) { + return errors.New("earliest_time must be earlier than current time") + } + if latestTime.UTC().After(time.Now().UTC()) { + return errors.New("latest_time must be earlier than current time") + } + } + return nil +} diff --git a/receiver/splunksearchapireceiver/factory.go b/receiver/splunksearchapireceiver/factory.go new file mode 100644 index 000000000..8a1c9aaa4 --- /dev/null +++ b/receiver/splunksearchapireceiver/factory.go @@ -0,0 +1,36 @@ +package splunksearchapireceiver + +import ( + "context" + + "go.opentelemetry.io/collector/component" + "go.opentelemetry.io/collector/consumer" + "go.opentelemetry.io/collector/receiver" +) + +var ( + typeStr = component.MustNewType("splunksearchapi") +) + +func createDefaultConfig() component.Config { + return &Config{} +} + +func createLogsReceiver(_ context.Context, + params receiver.Settings, + cfg component.Config, + consumer consumer.Logs, +) (receiver.Logs, error) { + logger := params.Logger + ssapirConfig := cfg.(*Config) + ssapir := &splunksearchapireceiver{ + logger: logger, + logsConsumer: consumer, + config: ssapirConfig, + } + return ssapir, nil +} + +func NewFactory() receiver.Factory { + return receiver.NewFactory(typeStr, createDefaultConfig, receiver.WithLogs(createLogsReceiver, component.StabilityLevelAlpha)) +} diff --git a/receiver/splunksearchapireceiver/go.mod b/receiver/splunksearchapireceiver/go.mod new file mode 100644 index 000000000..1da1f210c --- /dev/null +++ b/receiver/splunksearchapireceiver/go.mod @@ -0,0 +1,30 @@ +module github.com/open-telemetry/opentelemtry-collector-contrib/receiver/splunksearchapireceiver + +go 1.22.5 + +require ( + go.opentelemetry.io/collector/component v0.112.0 + go.opentelemetry.io/collector/consumer v0.112.0 + go.opentelemetry.io/collector/pdata v1.18.0 + go.opentelemetry.io/collector/receiver v0.112.0 + go.uber.org/zap v1.27.0 +) + +require ( + github.com/gogo/protobuf v1.3.2 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + go.opentelemetry.io/collector/config/configtelemetry v0.112.0 // indirect + go.opentelemetry.io/collector/pipeline v0.112.0 // indirect + go.opentelemetry.io/otel v1.31.0 // indirect + go.opentelemetry.io/otel/metric v1.31.0 // indirect + go.opentelemetry.io/otel/trace v1.31.0 // indirect + go.uber.org/multierr v1.11.0 // indirect + golang.org/x/net v0.28.0 // indirect + golang.org/x/sys v0.26.0 // indirect + golang.org/x/text v0.17.0 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20240822170219-fc7c04adadcd // indirect + google.golang.org/grpc v1.67.1 // indirect + google.golang.org/protobuf v1.35.1 // indirect +) diff --git a/receiver/splunksearchapireceiver/go.sum b/receiver/splunksearchapireceiver/go.sum new file mode 100644 index 000000000..b66d91c7b --- /dev/null +++ b/receiver/splunksearchapireceiver/go.sum @@ -0,0 +1,98 @@ +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= +github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +go.opentelemetry.io/collector/component v0.112.0 h1:Hw125Tdb427yKkzFx3U/OsfPATYXsbURkc27dn19he8= +go.opentelemetry.io/collector/component v0.112.0/go.mod h1:hV9PEgkNlVAySX+Oo/g7+NcLe234L04kRXw6uGj3VEw= +go.opentelemetry.io/collector/config/configtelemetry v0.112.0 h1:MVBrWJUoqfKrORI38dY8OV0i5d1RRHR/ACIBu9TOcZ8= +go.opentelemetry.io/collector/config/configtelemetry v0.112.0/go.mod h1:R0MBUxjSMVMIhljuDHWIygzzJWQyZHXXWIgQNxcFwhc= +go.opentelemetry.io/collector/consumer v0.112.0 h1:tfO4FpuQ8MsD7AxgslC3tRNVYjd9Xkus34BOExsG4fM= +go.opentelemetry.io/collector/consumer v0.112.0/go.mod h1:ZKSeGvXvaofIlvPrWlARKQpONOmuw6R/yifgYCWHKRw= +go.opentelemetry.io/collector/consumer/consumerprofiles v0.112.0 h1:ym+QxemlbWwfMSUto1hRTfcZeYbj2q8FpMzjk8O+X60= +go.opentelemetry.io/collector/consumer/consumerprofiles v0.112.0/go.mod h1:4PjDUpURFh85R6NLEHrEf/uZjpk4LAYmmOrqu+iZsyE= +go.opentelemetry.io/collector/consumer/consumertest v0.112.0 h1:pGvNH+H4rMygUOql6ynVQim6UFdimTiJ0HRfQL6v0GE= +go.opentelemetry.io/collector/consumer/consumertest v0.112.0/go.mod h1:rfVo0tYt/BaLWw3IaQKVQafjUlMsA5qTkvsSOfFrr9c= +go.opentelemetry.io/collector/pdata v1.18.0 h1:/yg2rO2dxqDM2p6GutsMCxXN6sKlXwyIz/ZYyUPONBg= +go.opentelemetry.io/collector/pdata v1.18.0/go.mod h1:Ox1YVLe87cZDB/TL30i4SUz1cA5s6AM6SpFMfY61ICs= +go.opentelemetry.io/collector/pdata/pprofile v0.112.0 h1:t+LYorcMqZ3sDz5/jp3xU2l5lIhIXuIOOGO4Ef9CG2c= +go.opentelemetry.io/collector/pdata/pprofile v0.112.0/go.mod h1:F2aTCoDzIaxEUK1g92LZvMwradySFMo3ZsAnBIpOdUg= +go.opentelemetry.io/collector/pipeline v0.112.0 h1:jqKDdb8k53OLPibvxzX6fmMec0ZHAtqe4p2+cuHclEI= +go.opentelemetry.io/collector/pipeline v0.112.0/go.mod h1:4vOvjVsoYTHVGTbfFwqfnQOSV2K3RKUHofh3jNRc2Mg= +go.opentelemetry.io/collector/receiver v0.112.0 h1:gdTBDOPGKMZlZghtN5A7ZLNlNwCHWYcoJQeIiXvyGEQ= +go.opentelemetry.io/collector/receiver v0.112.0/go.mod h1:3QmfSUiyFzRTnHUqF8fyEvQpU5q/xuwS43jGt8JXEEA= +go.opentelemetry.io/otel v1.31.0 h1:NsJcKPIW0D0H3NgzPDHmo0WW6SptzPdqg/L1zsIm2hY= +go.opentelemetry.io/otel v1.31.0/go.mod h1:O0C14Yl9FgkjqcCZAsE053C13OaddMYr/hz6clDkEJE= +go.opentelemetry.io/otel/metric v1.31.0 h1:FSErL0ATQAmYHUIzSezZibnyVlft1ybhy4ozRPcF2fE= +go.opentelemetry.io/otel/metric v1.31.0/go.mod h1:C3dEloVbLuYoX41KpmAhOqNriGbA+qqH6PQ5E5mUfnY= +go.opentelemetry.io/otel/trace v1.31.0 h1:ffjsj1aRouKewfr85U2aGagJ46+MvodynlQ1HYdmJys= +go.opentelemetry.io/otel/trace v1.31.0/go.mod h1:TXZkRk7SM2ZQLtR6eoAWQFIHPvzQ06FJAsO1tJg480A= +go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= +go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= +go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= +go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= +go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8= +go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.28.0 h1:a9JDOJc5GMUJ0+UDqmLT86WiEy7iWyIhz8gz8E4e5hE= +golang.org/x/net v0.28.0/go.mod h1:yqtgsTWOOnlGLG9GFRrK3++bGOUEkNBoHZc8MEDWPNg= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= +golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.17.0 h1:XtiM5bkSOt+ewxlOE/aE/AKEHibwj/6gvWMl9Rsh0Qc= +golang.org/x/text v0.17.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240822170219-fc7c04adadcd h1:6TEm2ZxXoQmFWFlt1vNxvVOa1Q0dXFQD1m/rYjXmS0E= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240822170219-fc7c04adadcd/go.mod h1:UqMtugtsSgubUsoxbuAoiCXvqvErP7Gf0so0mK9tHxU= +google.golang.org/grpc v1.67.1 h1:zWnc1Vrcno+lHZCOofnIMvycFcc0QRGIzm9dhnDX68E= +google.golang.org/grpc v1.67.1/go.mod h1:1gLDyUQU7CTLJI90u3nXZ9ekeghjeM7pTDZlqFNg2AA= +google.golang.org/protobuf v1.35.1 h1:m3LfL6/Ca+fqnjnlqQXNpFPABW1UD7mjh8KO2mKFytA= +google.golang.org/protobuf v1.35.1/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/receiver/splunksearchapireceiver/model.go b/receiver/splunksearchapireceiver/model.go new file mode 100644 index 000000000..ab0066708 --- /dev/null +++ b/receiver/splunksearchapireceiver/model.go @@ -0,0 +1,39 @@ +package splunksearchapireceiver + +// response structs for Splunk API calls +type CreateJobResponse struct { + SID string `xml:"sid"` +} + +type JobStatusResponse struct { + Content struct { + Type string `xml:"type,attr"` + Dict Dict `xml:"dict"` + } `xml:"content"` +} + +// Dict struct to represent elements +type Dict struct { + Keys []Key `xml:"key"` +} + +// Key struct to represent elements +type Key struct { + Name string `xml:"name,attr"` + Value string `xml:",chardata"` + Dict *Dict `xml:"dict,omitempty"` + List *List `xml:"list,omitempty"` +} + +type List struct { + Items []struct { + Value string `xml:",chardata"` + } `xml:"item"` +} + +type SearchResults struct { + Results []struct { + Raw string `json:"_raw"` + Time string `json:"_time"` + } `json:"results"` +} diff --git a/receiver/splunksearchapireceiver/receiver.go b/receiver/splunksearchapireceiver/receiver.go new file mode 100644 index 000000000..77f23aef6 --- /dev/null +++ b/receiver/splunksearchapireceiver/receiver.go @@ -0,0 +1,147 @@ +package splunksearchapireceiver + +import ( + "context" + "time" + + "go.opentelemetry.io/collector/component" + "go.opentelemetry.io/collector/consumer" + "go.opentelemetry.io/collector/pdata/pcommon" + "go.opentelemetry.io/collector/pdata/plog" + "go.uber.org/zap" +) + +type splunksearchapireceiver struct { + host component.Host + logger *zap.Logger + logsConsumer consumer.Logs + config *Config +} + +func (ssapir *splunksearchapireceiver) Start(ctx context.Context, host component.Host) error { + ssapir.host = host + go ssapir.runQueries(ctx) + return nil +} + +func (ssapir *splunksearchapireceiver) Shutdown(ctx context.Context) error { + return nil +} + +func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { + for _, search := range ssapir.config.Searches { + // create search in Splunk + searchID, err := createSplunkSearch(ssapir.config, search.Query) + if err != nil { + ssapir.logger.Error("error creating search", zap.Error(err)) + } + // fmt.Println("Search created successfully with ID: ", searchID) + + // wait for search to complete + for { + done, err := isSearchCompleted(ssapir.config, searchID) + if err != nil { + ssapir.logger.Error("error checking search status", zap.Error(err)) + } + if done { + break + } + time.Sleep(2 * time.Second) + } + // fmt.Println("Search completed successfully") + + // fetch search results + results, err := getSplunkSearchResults(ssapir.config, searchID) + if err != nil { + ssapir.logger.Error("error fetching search results", zap.Error(err)) + } + // fmt.Println("Search results: ", results) + + // parse time strings to time.Time + earliestTime, err := time.Parse(time.RFC3339, search.EarliestTime) + if err != nil { + // should be impossible to reach with config validation + ssapir.logger.Error("earliest_time failed to be parsed as RFC3339", zap.Error(err)) + } + + latestTime, err := time.Parse(time.RFC3339, search.LatestTime) + if err != nil { + // should be impossible to reach with config validation + ssapir.logger.Error("latest_time failed to be parsed as RFC3339", zap.Error(err)) + } + + logs := plog.NewLogs() + for idx, splunkLog := range results.Results { + if idx >= search.Limit && search.Limit != 0 { + break + } + // convert log timestamp to ISO8601 (UTC() makes RFC3339 into ISO8601) + logTimestamp, err := time.Parse(time.RFC3339, splunkLog.Time) + if err != nil { + ssapir.logger.Error("error parsing log timestamp", zap.Error(err)) + break + } + if logTimestamp.UTC().After(latestTime.UTC()) { + ssapir.logger.Info("skipping log entry - timestamp after latestTime", zap.Time("time", logTimestamp.UTC()), zap.Time("latestTime", latestTime.UTC())) + // logger.Info will only log up to 10 times for a given code block, known weird behavior + continue + } + if logTimestamp.UTC().Before(earliestTime) { + ssapir.logger.Info("skipping log entry - timestamp before earliestTime", zap.Time("time", logTimestamp.UTC()), zap.Time("earliestTime", earliestTime.UTC())) + continue + } + log := logs.ResourceLogs().AppendEmpty().ScopeLogs().AppendEmpty().LogRecords().AppendEmpty() + + // convert time to timestamp + timestamp := pcommon.NewTimestampFromTime(logTimestamp.UTC()) + log.SetTimestamp(timestamp) + log.Body().SetStr(splunkLog.Raw) + + } + if logs.ResourceLogs().Len() == 0 { + ssapir.logger.Info("search returned no logs within the given time range") + return nil + } + + // pass logs, wait for exporter to confirm successful export to GCP + err = ssapir.logsConsumer.ConsumeLogs(ctx, logs) + if err != nil { + // Error from down the pipeline, freak out + ssapir.logger.Error("error consuming logs", zap.Error(err)) + } + } + return nil +} + +func createSplunkSearch(config *Config, search string) (string, error) { + resp, err := createSearchJob(config, search) + if err != nil { + return "", err + } + return resp.SID, nil +} + +func isSearchCompleted(config *Config, sid string) (bool, error) { + resp, err := getJobStatus(config, sid) + if err != nil { + return false, err + } + + for _, key := range resp.Content.Dict.Keys { + if key.Name == "dispatchState" { + if key.Value == "DONE" { + return true, nil + } + break + } + } + return false, nil +} + +func getSplunkSearchResults(config *Config, sid string) (SearchResults, error) { + resp, err := getSearchResults(config, sid) + if err != nil { + return SearchResults{}, err + } + return resp, nil +} From 6079244504abb42fae4b1c367bce4c3b4e22e5a1 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Fri, 8 Nov 2024 11:07:16 -0500 Subject: [PATCH 02/55] lint --- receiver/splunksearchapireceiver/api.go | 25 ++++++++++++++++---- receiver/splunksearchapireceiver/config.go | 17 +++++++++++++ receiver/splunksearchapireceiver/factory.go | 15 ++++++++++++ receiver/splunksearchapireceiver/model.go | 19 ++++++++++++++- receiver/splunksearchapireceiver/receiver.go | 16 ++++++++++++- 5 files changed, 85 insertions(+), 7 deletions(-) diff --git a/receiver/splunksearchapireceiver/api.go b/receiver/splunksearchapireceiver/api.go index fcf7fa83c..a4ca93c9a 100644 --- a/receiver/splunksearchapireceiver/api.go +++ b/receiver/splunksearchapireceiver/api.go @@ -1,3 +1,18 @@ +// Copyright observIQ, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package splunksearchapireceiver provides a receiver that uses the Splunk API to migrate event data. package splunksearchapireceiver import ( @@ -10,8 +25,8 @@ import ( "net/http" ) -func createHttpClient() *http.Client { - // TODO: Add functionality to configure TLS settings using config options +func createHTTPClient() *http.Client { + // TODO: Add TLS tr := &http.Transport{ TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, // Disables TLS verification } @@ -29,7 +44,7 @@ func createSearchJob(config *Config, search string) (CreateJobResponse, error) { } req.SetBasicAuth(config.Username, config.Password) - client := createHttpClient() + client := createHTTPClient() resp, err := client.Do(req) if err != nil { return CreateJobResponse{}, err @@ -63,7 +78,7 @@ func getJobStatus(config *Config, sid string) (JobStatusResponse, error) { } req.SetBasicAuth(config.Username, config.Password) - client := createHttpClient() + client := createHTTPClient() resp, err := client.Do(req) if err != nil { return JobStatusResponse{}, err @@ -95,7 +110,7 @@ func getSearchResults(config *Config, sid string) (SearchResults, error) { } req.SetBasicAuth(config.Username, config.Password) - client := createHttpClient() + client := createHTTPClient() resp, err := client.Do(req) if err != nil { return SearchResults{}, err diff --git a/receiver/splunksearchapireceiver/config.go b/receiver/splunksearchapireceiver/config.go index fbb149ad2..d51492989 100644 --- a/receiver/splunksearchapireceiver/config.go +++ b/receiver/splunksearchapireceiver/config.go @@ -1,3 +1,17 @@ +// Copyright observIQ, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package splunksearchapireceiver import ( @@ -6,6 +20,7 @@ import ( "time" ) +// Config struct to represent the configuration for the Splunk Search API receiver type Config struct { Server string `mapstructure:"splunk_server"` Username string `mapstructure:"splunk_username"` @@ -13,6 +28,7 @@ type Config struct { Searches []Search `mapstructure:"searches"` } +// Search struct to represent a Splunk search type Search struct { Query string `mapstructure:"query"` EarliestTime string `mapstructure:"earliest_time"` @@ -20,6 +36,7 @@ type Search struct { Limit int `mapstructure:"limit"` } +// Validate validates the Splunk Search API receiver configuration func (cfg *Config) Validate() error { if cfg.Server == "" { return errors.New("missing Splunk server") diff --git a/receiver/splunksearchapireceiver/factory.go b/receiver/splunksearchapireceiver/factory.go index 8a1c9aaa4..8012af3d1 100644 --- a/receiver/splunksearchapireceiver/factory.go +++ b/receiver/splunksearchapireceiver/factory.go @@ -1,3 +1,17 @@ +// Copyright observIQ, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package splunksearchapireceiver import ( @@ -31,6 +45,7 @@ func createLogsReceiver(_ context.Context, return ssapir, nil } +// NewFactory creates a factory for Splunk Search API receiver func NewFactory() receiver.Factory { return receiver.NewFactory(typeStr, createDefaultConfig, receiver.WithLogs(createLogsReceiver, component.StabilityLevelAlpha)) } diff --git a/receiver/splunksearchapireceiver/model.go b/receiver/splunksearchapireceiver/model.go index ab0066708..94984f670 100644 --- a/receiver/splunksearchapireceiver/model.go +++ b/receiver/splunksearchapireceiver/model.go @@ -1,10 +1,25 @@ +// Copyright observIQ, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package splunksearchapireceiver -// response structs for Splunk API calls +// CreateJobResponse struct to represent the XML response from Splunk create job endpoint type CreateJobResponse struct { SID string `xml:"sid"` } +// JobStatusResponse struct to represent the XML response from Splunk job status endpoint type JobStatusResponse struct { Content struct { Type string `xml:"type,attr"` @@ -25,12 +40,14 @@ type Key struct { List *List `xml:"list,omitempty"` } +// List struct to represent elements type List struct { Items []struct { Value string `xml:",chardata"` } `xml:"item"` } +// SearchResults struct to represent the JSON response from Splunk search results endpoint type SearchResults struct { Results []struct { Raw string `json:"_raw"` diff --git a/receiver/splunksearchapireceiver/receiver.go b/receiver/splunksearchapireceiver/receiver.go index 77f23aef6..c8105fe78 100644 --- a/receiver/splunksearchapireceiver/receiver.go +++ b/receiver/splunksearchapireceiver/receiver.go @@ -1,3 +1,17 @@ +// Copyright observIQ, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package splunksearchapireceiver import ( @@ -24,7 +38,7 @@ func (ssapir *splunksearchapireceiver) Start(ctx context.Context, host component return nil } -func (ssapir *splunksearchapireceiver) Shutdown(ctx context.Context) error { +func (ssapir *splunksearchapireceiver) Shutdown(_ context.Context) error { return nil } From 8eafe6393d7563e6637d9a752ccf256d2b7e807a Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Fri, 8 Nov 2024 14:48:35 -0500 Subject: [PATCH 03/55] tls --- receiver/splunksearchapireceiver/api.go | 30 +++------- receiver/splunksearchapireceiver/config.go | 14 +++-- receiver/splunksearchapireceiver/factory.go | 9 ++- receiver/splunksearchapireceiver/go.mod | 33 ++++++++-- receiver/splunksearchapireceiver/go.sum | 63 ++++++++++++++++---- receiver/splunksearchapireceiver/receiver.go | 26 +++++--- 6 files changed, 118 insertions(+), 57 deletions(-) diff --git a/receiver/splunksearchapireceiver/api.go b/receiver/splunksearchapireceiver/api.go index a4ca93c9a..64a0285ae 100644 --- a/receiver/splunksearchapireceiver/api.go +++ b/receiver/splunksearchapireceiver/api.go @@ -17,7 +17,6 @@ package splunksearchapireceiver import ( "bytes" - "crypto/tls" "encoding/json" "encoding/xml" "fmt" @@ -25,17 +24,9 @@ import ( "net/http" ) -func createHTTPClient() *http.Client { - // TODO: Add TLS - tr := &http.Transport{ - TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, // Disables TLS verification - } - return &http.Client{Transport: tr} -} - -func createSearchJob(config *Config, search string) (CreateJobResponse, error) { +func (ssapir *splunksearchapireceiver) createSearchJob(config *Config, search string) (CreateJobResponse, error) { // fmt.Println("Creating search job for search: ", search) - endpoint := fmt.Sprintf("https://%s/services/search/jobs", config.Server) + endpoint := fmt.Sprintf("%s/services/search/jobs", config.Endpoint) reqBody := fmt.Sprintf(`search=%s`, search) req, err := http.NewRequest("POST", endpoint, bytes.NewBuffer([]byte(reqBody))) @@ -44,8 +35,7 @@ func createSearchJob(config *Config, search string) (CreateJobResponse, error) { } req.SetBasicAuth(config.Username, config.Password) - client := createHTTPClient() - resp, err := client.Do(req) + resp, err := ssapir.client.Do(req) if err != nil { return CreateJobResponse{}, err } @@ -68,9 +58,9 @@ func createSearchJob(config *Config, search string) (CreateJobResponse, error) { return jobResponse, nil } -func getJobStatus(config *Config, sid string) (JobStatusResponse, error) { +func (ssapir *splunksearchapireceiver) getJobStatus(config *Config, sid string) (JobStatusResponse, error) { // fmt.Println("Getting job status") - endpoint := fmt.Sprintf("https://%s/services/search/v2/jobs/%s", config.Server, sid) + endpoint := fmt.Sprintf("%s/services/search/v2/jobs/%s", config.Endpoint, sid) req, err := http.NewRequest("GET", endpoint, nil) if err != nil { @@ -78,8 +68,7 @@ func getJobStatus(config *Config, sid string) (JobStatusResponse, error) { } req.SetBasicAuth(config.Username, config.Password) - client := createHTTPClient() - resp, err := client.Do(req) + resp, err := ssapir.client.Do(req) if err != nil { return JobStatusResponse{}, err } @@ -102,16 +91,15 @@ func getJobStatus(config *Config, sid string) (JobStatusResponse, error) { return jobStatusResponse, nil } -func getSearchResults(config *Config, sid string) (SearchResults, error) { - endpoint := fmt.Sprintf("https://%s/services/search/v2/jobs/%s/results?output_mode=json", config.Server, sid) +func (ssapir *splunksearchapireceiver) getSearchResults(config *Config, sid string) (SearchResults, error) { + endpoint := fmt.Sprintf("%s/services/search/v2/jobs/%s/results?output_mode=json", config.Endpoint, sid) req, err := http.NewRequest("GET", endpoint, nil) if err != nil { return SearchResults{}, err } req.SetBasicAuth(config.Username, config.Password) - client := createHTTPClient() - resp, err := client.Do(req) + resp, err := ssapir.client.Do(req) if err != nil { return SearchResults{}, err } diff --git a/receiver/splunksearchapireceiver/config.go b/receiver/splunksearchapireceiver/config.go index d51492989..629ecc7dc 100644 --- a/receiver/splunksearchapireceiver/config.go +++ b/receiver/splunksearchapireceiver/config.go @@ -18,14 +18,16 @@ import ( "errors" "strings" "time" + + "go.opentelemetry.io/collector/config/confighttp" ) // Config struct to represent the configuration for the Splunk Search API receiver type Config struct { - Server string `mapstructure:"splunk_server"` - Username string `mapstructure:"splunk_username"` - Password string `mapstructure:"splunk_password"` - Searches []Search `mapstructure:"searches"` + confighttp.ClientConfig `mapstructure:",squash"` + Username string `mapstructure:"splunk_username"` + Password string `mapstructure:"splunk_password"` + Searches []Search `mapstructure:"searches"` } // Search struct to represent a Splunk search @@ -38,8 +40,8 @@ type Search struct { // Validate validates the Splunk Search API receiver configuration func (cfg *Config) Validate() error { - if cfg.Server == "" { - return errors.New("missing Splunk server") + if cfg.Endpoint == "" { + return errors.New("missing Splunk server endpoint") } if cfg.Username == "" { return errors.New("missing Splunk username") diff --git a/receiver/splunksearchapireceiver/factory.go b/receiver/splunksearchapireceiver/factory.go index 8012af3d1..64ce850fc 100644 --- a/receiver/splunksearchapireceiver/factory.go +++ b/receiver/splunksearchapireceiver/factory.go @@ -18,6 +18,7 @@ import ( "context" "go.opentelemetry.io/collector/component" + "go.opentelemetry.io/collector/config/confighttp" "go.opentelemetry.io/collector/consumer" "go.opentelemetry.io/collector/receiver" ) @@ -27,7 +28,9 @@ var ( ) func createDefaultConfig() component.Config { - return &Config{} + return &Config{ + ClientConfig: confighttp.NewDefaultClientConfig(), + } } func createLogsReceiver(_ context.Context, @@ -35,12 +38,12 @@ func createLogsReceiver(_ context.Context, cfg component.Config, consumer consumer.Logs, ) (receiver.Logs, error) { - logger := params.Logger ssapirConfig := cfg.(*Config) ssapir := &splunksearchapireceiver{ - logger: logger, + logger: params.Logger, logsConsumer: consumer, config: ssapirConfig, + settings: params.TelemetrySettings, } return ssapir, nil } diff --git a/receiver/splunksearchapireceiver/go.mod b/receiver/splunksearchapireceiver/go.mod index 1da1f210c..15fc44dfc 100644 --- a/receiver/splunksearchapireceiver/go.mod +++ b/receiver/splunksearchapireceiver/go.mod @@ -3,27 +3,48 @@ module github.com/open-telemetry/opentelemtry-collector-contrib/receiver/splunks go 1.22.5 require ( - go.opentelemetry.io/collector/component v0.112.0 - go.opentelemetry.io/collector/consumer v0.112.0 - go.opentelemetry.io/collector/pdata v1.18.0 + go.opentelemetry.io/collector/component v0.113.0 + go.opentelemetry.io/collector/consumer v0.113.0 + go.opentelemetry.io/collector/pdata v1.19.0 go.opentelemetry.io/collector/receiver v0.112.0 go.uber.org/zap v1.27.0 ) +require ( + github.com/felixge/httpsnoop v1.0.4 // indirect + github.com/fsnotify/fsnotify v1.8.0 // indirect + github.com/go-logr/logr v1.4.2 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/golang/snappy v0.0.4 // indirect + github.com/klauspost/compress v1.17.11 // indirect + github.com/pierrec/lz4/v4 v4.1.21 // indirect + github.com/rs/cors v1.11.1 // indirect + go.opentelemetry.io/collector/client v1.19.0 // indirect + go.opentelemetry.io/collector/config/configauth v0.113.0 // indirect + go.opentelemetry.io/collector/config/configcompression v1.19.0 // indirect + go.opentelemetry.io/collector/config/configopaque v1.19.0 // indirect + go.opentelemetry.io/collector/config/configtls v1.19.0 // indirect + go.opentelemetry.io/collector/config/internal v0.113.0 // indirect + go.opentelemetry.io/collector/extension v0.113.0 // indirect + go.opentelemetry.io/collector/extension/auth v0.113.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.56.0 // indirect +) + require ( github.com/gogo/protobuf v1.3.2 // indirect github.com/json-iterator/go v1.1.12 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect - go.opentelemetry.io/collector/config/configtelemetry v0.112.0 // indirect + go.opentelemetry.io/collector/config/confighttp v0.113.0 + go.opentelemetry.io/collector/config/configtelemetry v0.113.0 // indirect go.opentelemetry.io/collector/pipeline v0.112.0 // indirect go.opentelemetry.io/otel v1.31.0 // indirect go.opentelemetry.io/otel/metric v1.31.0 // indirect go.opentelemetry.io/otel/trace v1.31.0 // indirect go.uber.org/multierr v1.11.0 // indirect - golang.org/x/net v0.28.0 // indirect + golang.org/x/net v0.30.0 // indirect golang.org/x/sys v0.26.0 // indirect - golang.org/x/text v0.17.0 // indirect + golang.org/x/text v0.19.0 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20240822170219-fc7c04adadcd // indirect google.golang.org/grpc v1.67.1 // indirect google.golang.org/protobuf v1.35.1 // indirect diff --git a/receiver/splunksearchapireceiver/go.sum b/receiver/splunksearchapireceiver/go.sum index b66d91c7b..d15e6729a 100644 --- a/receiver/splunksearchapireceiver/go.sum +++ b/receiver/splunksearchapireceiver/go.sum @@ -1,54 +1,93 @@ github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/fsnotify/fsnotify v1.8.0 h1:dAwr6QBTBZIkG8roQaJjGof0pp0EeF+tNV7YBP3F/8M= +github.com/fsnotify/fsnotify v1.8.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= +github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc= +github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/pierrec/lz4/v4 v4.1.21 h1:yOVMLb6qSIDP67pl/5F7RepeKYu/VmTyEXvuMI5d9mQ= +github.com/pierrec/lz4/v4 v4.1.21/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/rs/cors v1.11.1 h1:eU3gRzXLRK57F5rKMGMZURNdIG4EoAmX8k94r9wXWHA= +github.com/rs/cors v1.11.1/go.mod h1:XyqrcTp5zjWr1wsJ8PIRZssZ8b/WMcMf71DJnit4EMU= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -go.opentelemetry.io/collector/component v0.112.0 h1:Hw125Tdb427yKkzFx3U/OsfPATYXsbURkc27dn19he8= -go.opentelemetry.io/collector/component v0.112.0/go.mod h1:hV9PEgkNlVAySX+Oo/g7+NcLe234L04kRXw6uGj3VEw= -go.opentelemetry.io/collector/config/configtelemetry v0.112.0 h1:MVBrWJUoqfKrORI38dY8OV0i5d1RRHR/ACIBu9TOcZ8= -go.opentelemetry.io/collector/config/configtelemetry v0.112.0/go.mod h1:R0MBUxjSMVMIhljuDHWIygzzJWQyZHXXWIgQNxcFwhc= -go.opentelemetry.io/collector/consumer v0.112.0 h1:tfO4FpuQ8MsD7AxgslC3tRNVYjd9Xkus34BOExsG4fM= -go.opentelemetry.io/collector/consumer v0.112.0/go.mod h1:ZKSeGvXvaofIlvPrWlARKQpONOmuw6R/yifgYCWHKRw= +go.opentelemetry.io/collector/client v1.19.0 h1:TUal8WV1agTrZStgE7BJ8ZC0IHLGtrfgO9ogU9t1mv8= +go.opentelemetry.io/collector/client v1.19.0/go.mod h1:jgiXMEM6l8L2QEyf2I/M47Zd8+G7e4z+6H8q5SkHOlQ= +go.opentelemetry.io/collector/component v0.113.0 h1:/nx+RvZgxUEXP+YcTj69rEtuSEGkfaCyp/ad5zQGLjU= +go.opentelemetry.io/collector/component v0.113.0/go.mod h1:2T779hIGHU9i7xbXbV3q1/JnRw2FyzUYXW2vq47A6EU= +go.opentelemetry.io/collector/config/configauth v0.113.0 h1:CBz43fGpN41MwLdwe3mw/XVSIDvGRMT8aaaPuqKukTU= +go.opentelemetry.io/collector/config/configauth v0.113.0/go.mod h1:Q8SlxrIvL3FJO51hXa4n9ARvox04lK8mmpjf4b3UNAU= +go.opentelemetry.io/collector/config/configcompression v1.19.0 h1:bTSjTLhnPXX1NSFM6GzguEM/NBe8QUPsXHc9kMOAJzE= +go.opentelemetry.io/collector/config/configcompression v1.19.0/go.mod h1:pnxkFCLUZLKWzYJvfSwZnPrnm0twX14CYj2ADth5xiU= +go.opentelemetry.io/collector/config/confighttp v0.113.0 h1:a6iO0y1ZM5CPDvwbryzU+GpqAtAQ3eSfNseoAUogw7c= +go.opentelemetry.io/collector/config/confighttp v0.113.0/go.mod h1:JZ9EwoiWMIrXt5v+d/q54TeUhPdAoLDimSEqTtddW6E= +go.opentelemetry.io/collector/config/configopaque v1.19.0 h1:7uvntQeAAtqCaeiS2dDGrT1wLPhWvDlEsD3SliA/koQ= +go.opentelemetry.io/collector/config/configopaque v1.19.0/go.mod h1:6zlLIyOoRpJJ+0bEKrlZOZon3rOp5Jrz9fMdR4twOS4= +go.opentelemetry.io/collector/config/configtelemetry v0.113.0 h1:hweTRrVddnUeA3k7HzRY4oUR9lRdMa7of3mHNUS5YyA= +go.opentelemetry.io/collector/config/configtelemetry v0.113.0/go.mod h1:R0MBUxjSMVMIhljuDHWIygzzJWQyZHXXWIgQNxcFwhc= +go.opentelemetry.io/collector/config/configtls v1.19.0 h1:GQ/cF1hgNqHVBq2oSSrOFXxVCyMDyd5kq4R/RMEbL98= +go.opentelemetry.io/collector/config/configtls v1.19.0/go.mod h1:1hyqnYB3JqEUlk1ME/s9HYz4oCRcxQCRxsJitFFT/cA= +go.opentelemetry.io/collector/config/internal v0.113.0 h1:9RAzH8v7ItFT1npHpvP0SvUzBHcZDliCGRo9Spp6v7c= +go.opentelemetry.io/collector/config/internal v0.113.0/go.mod h1:yC7E4h1Uj0SubxcFImh6OvBHFTjMh99+A5PuyIgDWqc= +go.opentelemetry.io/collector/consumer v0.113.0 h1:KJSiK5vSIY9dgPxwKfQ3gOgKtQsqc+7IB7mGhUAL5c8= +go.opentelemetry.io/collector/consumer v0.113.0/go.mod h1:zHMlXYFaJlZoLCBR6UwWoyXZ/adcO1u2ydqUal3VmYU= go.opentelemetry.io/collector/consumer/consumerprofiles v0.112.0 h1:ym+QxemlbWwfMSUto1hRTfcZeYbj2q8FpMzjk8O+X60= go.opentelemetry.io/collector/consumer/consumerprofiles v0.112.0/go.mod h1:4PjDUpURFh85R6NLEHrEf/uZjpk4LAYmmOrqu+iZsyE= go.opentelemetry.io/collector/consumer/consumertest v0.112.0 h1:pGvNH+H4rMygUOql6ynVQim6UFdimTiJ0HRfQL6v0GE= go.opentelemetry.io/collector/consumer/consumertest v0.112.0/go.mod h1:rfVo0tYt/BaLWw3IaQKVQafjUlMsA5qTkvsSOfFrr9c= -go.opentelemetry.io/collector/pdata v1.18.0 h1:/yg2rO2dxqDM2p6GutsMCxXN6sKlXwyIz/ZYyUPONBg= -go.opentelemetry.io/collector/pdata v1.18.0/go.mod h1:Ox1YVLe87cZDB/TL30i4SUz1cA5s6AM6SpFMfY61ICs= +go.opentelemetry.io/collector/extension v0.113.0 h1:Vp/YSL8ZCkJQrP1lf2Bm5yaTvcp6ROO3AnfuSL3GEXM= +go.opentelemetry.io/collector/extension v0.113.0/go.mod h1:Pwp0TNqdHeER4V1I6H6oCvrto/riiOAqs3737BWCnjw= +go.opentelemetry.io/collector/extension/auth v0.113.0 h1:4ggRy1vepOabUiCWfU+6M9P/ftXojMUNAvBpeLihYj8= +go.opentelemetry.io/collector/extension/auth v0.113.0/go.mod h1:VbvAm2YZAqePkWgwn0m0vBaq3aC49CxPVwHmrJ24aeQ= +go.opentelemetry.io/collector/pdata v1.19.0 h1:jmnU5R8TOCbwRr4B8sjdRxM7L5WnEKlQWX1dtLYxIbE= +go.opentelemetry.io/collector/pdata v1.19.0/go.mod h1:Ox1YVLe87cZDB/TL30i4SUz1cA5s6AM6SpFMfY61ICs= go.opentelemetry.io/collector/pdata/pprofile v0.112.0 h1:t+LYorcMqZ3sDz5/jp3xU2l5lIhIXuIOOGO4Ef9CG2c= go.opentelemetry.io/collector/pdata/pprofile v0.112.0/go.mod h1:F2aTCoDzIaxEUK1g92LZvMwradySFMo3ZsAnBIpOdUg= go.opentelemetry.io/collector/pipeline v0.112.0 h1:jqKDdb8k53OLPibvxzX6fmMec0ZHAtqe4p2+cuHclEI= go.opentelemetry.io/collector/pipeline v0.112.0/go.mod h1:4vOvjVsoYTHVGTbfFwqfnQOSV2K3RKUHofh3jNRc2Mg= go.opentelemetry.io/collector/receiver v0.112.0 h1:gdTBDOPGKMZlZghtN5A7ZLNlNwCHWYcoJQeIiXvyGEQ= go.opentelemetry.io/collector/receiver v0.112.0/go.mod h1:3QmfSUiyFzRTnHUqF8fyEvQpU5q/xuwS43jGt8JXEEA= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.56.0 h1:UP6IpuHFkUgOQL9FFQFrZ+5LiwhhYRbi7VZSIx6Nj5s= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.56.0/go.mod h1:qxuZLtbq5QDtdeSHsS7bcf6EH6uO6jUAgk764zd3rhM= go.opentelemetry.io/otel v1.31.0 h1:NsJcKPIW0D0H3NgzPDHmo0WW6SptzPdqg/L1zsIm2hY= go.opentelemetry.io/otel v1.31.0/go.mod h1:O0C14Yl9FgkjqcCZAsE053C13OaddMYr/hz6clDkEJE= go.opentelemetry.io/otel/metric v1.31.0 h1:FSErL0ATQAmYHUIzSezZibnyVlft1ybhy4ozRPcF2fE= go.opentelemetry.io/otel/metric v1.31.0/go.mod h1:C3dEloVbLuYoX41KpmAhOqNriGbA+qqH6PQ5E5mUfnY= +go.opentelemetry.io/otel/sdk v1.31.0 h1:xLY3abVHYZ5HSfOg3l2E5LUj2Cwva5Y7yGxnSW9H5Gk= +go.opentelemetry.io/otel/sdk v1.31.0/go.mod h1:TfRbMdhvxIIr/B2N2LQW2S5v9m3gOQ/08KsbbO5BPT0= +go.opentelemetry.io/otel/sdk/metric v1.31.0 h1:i9hxxLJF/9kkvfHppyLL55aW7iIJz4JjxTeYusH7zMc= +go.opentelemetry.io/otel/sdk/metric v1.31.0/go.mod h1:CRInTMVvNhUKgSAMbKyTMxqOBC0zgyxzW55lZzX43Y8= go.opentelemetry.io/otel/trace v1.31.0 h1:ffjsj1aRouKewfr85U2aGagJ46+MvodynlQ1HYdmJys= go.opentelemetry.io/otel/trace v1.31.0/go.mod h1:TXZkRk7SM2ZQLtR6eoAWQFIHPvzQ06FJAsO1tJg480A= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= @@ -66,8 +105,8 @@ golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.28.0 h1:a9JDOJc5GMUJ0+UDqmLT86WiEy7iWyIhz8gz8E4e5hE= -golang.org/x/net v0.28.0/go.mod h1:yqtgsTWOOnlGLG9GFRrK3++bGOUEkNBoHZc8MEDWPNg= +golang.org/x/net v0.30.0 h1:AcW1SDZMkb8IpzCdQUaIq2sP4sZ4zw+55h6ynffypl4= +golang.org/x/net v0.30.0/go.mod h1:2wGyMJ5iFasEhkwi13ChkO/t1ECNC4X4eBKkVFyYFlU= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -78,8 +117,8 @@ golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.17.0 h1:XtiM5bkSOt+ewxlOE/aE/AKEHibwj/6gvWMl9Rsh0Qc= -golang.org/x/text v0.17.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= +golang.org/x/text v0.19.0 h1:kTxAhCbGbxhK0IwgSKiMO5awPoDQ0RpfiVYBfK860YM= +golang.org/x/text v0.19.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= diff --git a/receiver/splunksearchapireceiver/receiver.go b/receiver/splunksearchapireceiver/receiver.go index c8105fe78..5080a522e 100644 --- a/receiver/splunksearchapireceiver/receiver.go +++ b/receiver/splunksearchapireceiver/receiver.go @@ -16,6 +16,7 @@ package splunksearchapireceiver import ( "context" + "net/http" "time" "go.opentelemetry.io/collector/component" @@ -30,10 +31,17 @@ type splunksearchapireceiver struct { logger *zap.Logger logsConsumer consumer.Logs config *Config + settings component.TelemetrySettings + client *http.Client } func (ssapir *splunksearchapireceiver) Start(ctx context.Context, host component.Host) error { ssapir.host = host + client, err := ssapir.config.ClientConfig.ToClient(ctx, host, ssapir.settings) + if err != nil { + return err + } + ssapir.client = client go ssapir.runQueries(ctx) return nil } @@ -45,7 +53,7 @@ func (ssapir *splunksearchapireceiver) Shutdown(_ context.Context) error { func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { for _, search := range ssapir.config.Searches { // create search in Splunk - searchID, err := createSplunkSearch(ssapir.config, search.Query) + searchID, err := ssapir.createSplunkSearch(ssapir.config, search.Query) if err != nil { ssapir.logger.Error("error creating search", zap.Error(err)) } @@ -53,7 +61,7 @@ func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { // wait for search to complete for { - done, err := isSearchCompleted(ssapir.config, searchID) + done, err := ssapir.isSearchCompleted(ssapir.config, searchID) if err != nil { ssapir.logger.Error("error checking search status", zap.Error(err)) } @@ -65,7 +73,7 @@ func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { // fmt.Println("Search completed successfully") // fetch search results - results, err := getSplunkSearchResults(ssapir.config, searchID) + results, err := ssapir.getSplunkSearchResults(ssapir.config, searchID) if err != nil { ssapir.logger.Error("error fetching search results", zap.Error(err)) } @@ -127,16 +135,16 @@ func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { return nil } -func createSplunkSearch(config *Config, search string) (string, error) { - resp, err := createSearchJob(config, search) +func (ssapir *splunksearchapireceiver) createSplunkSearch(config *Config, search string) (string, error) { + resp, err := ssapir.createSearchJob(config, search) if err != nil { return "", err } return resp.SID, nil } -func isSearchCompleted(config *Config, sid string) (bool, error) { - resp, err := getJobStatus(config, sid) +func (ssapir *splunksearchapireceiver) isSearchCompleted(config *Config, sid string) (bool, error) { + resp, err := ssapir.getJobStatus(config, sid) if err != nil { return false, err } @@ -152,8 +160,8 @@ func isSearchCompleted(config *Config, sid string) (bool, error) { return false, nil } -func getSplunkSearchResults(config *Config, sid string) (SearchResults, error) { - resp, err := getSearchResults(config, sid) +func (ssapir *splunksearchapireceiver) getSplunkSearchResults(config *Config, sid string) (SearchResults, error) { + resp, err := ssapir.getSearchResults(config, sid) if err != nil { return SearchResults{}, err } From 77596d489e989dc635aa3ab0b41e6577bcfe3297 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Tue, 12 Nov 2024 09:51:13 -0500 Subject: [PATCH 04/55] WIP --- receiver/splunksearchapireceiver/api.go | 11 +- receiver/splunksearchapireceiver/config.go | 1 + receiver/splunksearchapireceiver/factory.go | 3 +- receiver/splunksearchapireceiver/model.go | 3 +- receiver/splunksearchapireceiver/receiver.go | 133 ++++++++++--------- 5 files changed, 86 insertions(+), 65 deletions(-) diff --git a/receiver/splunksearchapireceiver/api.go b/receiver/splunksearchapireceiver/api.go index 64a0285ae..c9958547e 100644 --- a/receiver/splunksearchapireceiver/api.go +++ b/receiver/splunksearchapireceiver/api.go @@ -22,6 +22,8 @@ import ( "fmt" "io" "net/http" + + "go.uber.org/zap" ) func (ssapir *splunksearchapireceiver) createSearchJob(config *Config, search string) (CreateJobResponse, error) { @@ -91,14 +93,16 @@ func (ssapir *splunksearchapireceiver) getJobStatus(config *Config, sid string) return jobStatusResponse, nil } -func (ssapir *splunksearchapireceiver) getSearchResults(config *Config, sid string) (SearchResults, error) { - endpoint := fmt.Sprintf("%s/services/search/v2/jobs/%s/results?output_mode=json", config.Endpoint, sid) +func (ssapir *splunksearchapireceiver) getSearchResults(config *Config, sid string, offset int) (SearchResults, error) { + endpoint := fmt.Sprintf("%s/services/search/v2/jobs/%s/results?output_mode=json&offset=%d&count=%d", config.Endpoint, sid, offset, ssapir.eventBatchSize) req, err := http.NewRequest("GET", endpoint, nil) if err != nil { return SearchResults{}, err } req.SetBasicAuth(config.Username, config.Password) + ssapir.logger.Info("Getting search results", zap.Int("offset", offset), zap.Int("count", ssapir.eventBatchSize)) + resp, err := ssapir.client.Do(req) if err != nil { return SearchResults{}, err @@ -114,11 +118,12 @@ func (ssapir *splunksearchapireceiver) getSearchResults(config *Config, sid stri if err != nil { return SearchResults{}, fmt.Errorf("failed to read search job results response: %v", err) } - // fmt.Println("Body: ", string(body)) + err = json.Unmarshal(body, &searchResults) if err != nil { return SearchResults{}, fmt.Errorf("failed to unmarshal search job results: %v", err) } + fmt.Println("Init offset: ", searchResults.InitOffset) return searchResults, nil } diff --git a/receiver/splunksearchapireceiver/config.go b/receiver/splunksearchapireceiver/config.go index 629ecc7dc..f5a19b264 100644 --- a/receiver/splunksearchapireceiver/config.go +++ b/receiver/splunksearchapireceiver/config.go @@ -28,6 +28,7 @@ type Config struct { Username string `mapstructure:"splunk_username"` Password string `mapstructure:"splunk_password"` Searches []Search `mapstructure:"searches"` + EventBatchSize int `mapstructure:"event_batch_size"` } // Search struct to represent a Splunk search diff --git a/receiver/splunksearchapireceiver/factory.go b/receiver/splunksearchapireceiver/factory.go index 64ce850fc..0018d9033 100644 --- a/receiver/splunksearchapireceiver/factory.go +++ b/receiver/splunksearchapireceiver/factory.go @@ -29,7 +29,8 @@ var ( func createDefaultConfig() component.Config { return &Config{ - ClientConfig: confighttp.NewDefaultClientConfig(), + ClientConfig: confighttp.NewDefaultClientConfig(), + EventBatchSize: 100, } } diff --git a/receiver/splunksearchapireceiver/model.go b/receiver/splunksearchapireceiver/model.go index 94984f670..a53e0a882 100644 --- a/receiver/splunksearchapireceiver/model.go +++ b/receiver/splunksearchapireceiver/model.go @@ -49,7 +49,8 @@ type List struct { // SearchResults struct to represent the JSON response from Splunk search results endpoint type SearchResults struct { - Results []struct { + InitOffset int `json:"init_offset"` + Results []struct { Raw string `json:"_raw"` Time string `json:"_time"` } `json:"results"` diff --git a/receiver/splunksearchapireceiver/receiver.go b/receiver/splunksearchapireceiver/receiver.go index 5080a522e..013f0d594 100644 --- a/receiver/splunksearchapireceiver/receiver.go +++ b/receiver/splunksearchapireceiver/receiver.go @@ -27,12 +27,13 @@ import ( ) type splunksearchapireceiver struct { - host component.Host - logger *zap.Logger - logsConsumer consumer.Logs - config *Config - settings component.TelemetrySettings - client *http.Client + host component.Host + logger *zap.Logger + logsConsumer consumer.Logs + config *Config + settings component.TelemetrySettings + client *http.Client + eventBatchSize int } func (ssapir *splunksearchapireceiver) Start(ctx context.Context, host component.Host) error { @@ -70,67 +71,79 @@ func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { } time.Sleep(2 * time.Second) } - // fmt.Println("Search completed successfully") - // fetch search results - results, err := ssapir.getSplunkSearchResults(ssapir.config, searchID) - if err != nil { - ssapir.logger.Error("error fetching search results", zap.Error(err)) - } - // fmt.Println("Search results: ", results) - - // parse time strings to time.Time - earliestTime, err := time.Parse(time.RFC3339, search.EarliestTime) - if err != nil { - // should be impossible to reach with config validation - ssapir.logger.Error("earliest_time failed to be parsed as RFC3339", zap.Error(err)) - } - - latestTime, err := time.Parse(time.RFC3339, search.LatestTime) - if err != nil { - // should be impossible to reach with config validation - ssapir.logger.Error("latest_time failed to be parsed as RFC3339", zap.Error(err)) - } - - logs := plog.NewLogs() - for idx, splunkLog := range results.Results { - if idx >= search.Limit && search.Limit != 0 { - break - } - // convert log timestamp to ISO8601 (UTC() makes RFC3339 into ISO8601) - logTimestamp, err := time.Parse(time.RFC3339, splunkLog.Time) + var resultCountTracker = 0 // track number of results exported + var offset = 0 // offset for pagination + for { + // fetch search results + results, err := ssapir.getSplunkSearchResults(ssapir.config, searchID, offset) if err != nil { - ssapir.logger.Error("error parsing log timestamp", zap.Error(err)) - break + ssapir.logger.Error("error fetching search results", zap.Error(err)) } - if logTimestamp.UTC().After(latestTime.UTC()) { - ssapir.logger.Info("skipping log entry - timestamp after latestTime", zap.Time("time", logTimestamp.UTC()), zap.Time("latestTime", latestTime.UTC())) - // logger.Info will only log up to 10 times for a given code block, known weird behavior - continue + + // parse time strings to time.Time + earliestTime, err := time.Parse(time.RFC3339, search.EarliestTime) + if err != nil { + // should be impossible to reach with config validation + ssapir.logger.Error("earliest_time failed to be parsed as RFC3339", zap.Error(err)) } - if logTimestamp.UTC().Before(earliestTime) { - ssapir.logger.Info("skipping log entry - timestamp before earliestTime", zap.Time("time", logTimestamp.UTC()), zap.Time("earliestTime", earliestTime.UTC())) - continue + + latestTime, err := time.Parse(time.RFC3339, search.LatestTime) + if err != nil { + // should be impossible to reach with config validation + ssapir.logger.Error("latest_time failed to be parsed as RFC3339", zap.Error(err)) } - log := logs.ResourceLogs().AppendEmpty().ScopeLogs().AppendEmpty().LogRecords().AppendEmpty() - // convert time to timestamp - timestamp := pcommon.NewTimestampFromTime(logTimestamp.UTC()) - log.SetTimestamp(timestamp) - log.Body().SetStr(splunkLog.Raw) + logs := plog.NewLogs() + for idx, splunkLog := range results.Results { + if idx >= search.Limit && search.Limit != 0 { + break + } + // convert log timestamp to ISO8601 (UTC() makes RFC3339 into ISO8601) + logTimestamp, err := time.Parse(time.RFC3339, splunkLog.Time) + if err != nil { + ssapir.logger.Error("error parsing log timestamp", zap.Error(err)) + break + } + if logTimestamp.UTC().After(latestTime.UTC()) { + ssapir.logger.Info("skipping log entry - timestamp after latestTime", zap.Time("time", logTimestamp.UTC()), zap.Time("latestTime", latestTime.UTC())) + // logger will only log up to 10 times for a given code block, known weird behavior + continue + } + if logTimestamp.UTC().Before(earliestTime) { + ssapir.logger.Info("skipping log entry - timestamp before earliestTime", zap.Time("time", logTimestamp.UTC()), zap.Time("earliestTime", earliestTime.UTC())) + continue + } + log := logs.ResourceLogs().AppendEmpty().ScopeLogs().AppendEmpty().LogRecords().AppendEmpty() + + // convert time to timestamp + timestamp := pcommon.NewTimestampFromTime(logTimestamp.UTC()) + log.SetTimestamp(timestamp) + log.Body().SetStr(splunkLog.Raw) - } - if logs.ResourceLogs().Len() == 0 { - ssapir.logger.Info("search returned no logs within the given time range") - return nil - } + } + if logs.ResourceLogs().Len() == 0 { + ssapir.logger.Info("search returned no logs within the given time range") + return nil + } - // pass logs, wait for exporter to confirm successful export to GCP - err = ssapir.logsConsumer.ConsumeLogs(ctx, logs) - if err != nil { - // Error from down the pipeline, freak out - ssapir.logger.Error("error consuming logs", zap.Error(err)) + // pass logs, wait for exporter to confirm successful export to GCP + err = ssapir.logsConsumer.ConsumeLogs(ctx, logs) + if err != nil { + // Error from down the pipeline, freak out + ssapir.logger.Error("error consuming logs", zap.Error(err)) + } + // if the number of results is less than the results per request, we have queried all pages for the search + if len(results.Results) < ssapir.eventBatchSize { + resultCountTracker += len(results.Results) + break + } + resultCountTracker += logs.ResourceLogs().Len() + offset += ssapir.eventBatchSize + time.Sleep(5 * time.Second) } + ssapir.logger.Info("search results exported", zap.String("query", search.Query), zap.Int("results", resultCountTracker)) + // search results completely exported, reset checkpoint storage } return nil } @@ -160,8 +173,8 @@ func (ssapir *splunksearchapireceiver) isSearchCompleted(config *Config, sid str return false, nil } -func (ssapir *splunksearchapireceiver) getSplunkSearchResults(config *Config, sid string) (SearchResults, error) { - resp, err := ssapir.getSearchResults(config, sid) +func (ssapir *splunksearchapireceiver) getSplunkSearchResults(config *Config, sid string, offset int) (SearchResults, error) { + resp, err := ssapir.getSearchResults(config, sid, offset) if err != nil { return SearchResults{}, err } From 5bac52348ad84aa5d4e901fe9101a8226e656bd3 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Tue, 12 Nov 2024 14:49:08 -0500 Subject: [PATCH 05/55] ticker, other pr feedback --- receiver/splunksearchapireceiver/api.go | 1 - receiver/splunksearchapireceiver/config.go | 21 +++++------- receiver/splunksearchapireceiver/factory.go | 2 ++ receiver/splunksearchapireceiver/receiver.go | 35 ++++++++++++++++---- 4 files changed, 39 insertions(+), 20 deletions(-) diff --git a/receiver/splunksearchapireceiver/api.go b/receiver/splunksearchapireceiver/api.go index c9958547e..3c96d4702 100644 --- a/receiver/splunksearchapireceiver/api.go +++ b/receiver/splunksearchapireceiver/api.go @@ -118,7 +118,6 @@ func (ssapir *splunksearchapireceiver) getSearchResults(config *Config, sid stri if err != nil { return SearchResults{}, fmt.Errorf("failed to read search job results response: %v", err) } - err = json.Unmarshal(body, &searchResults) if err != nil { return SearchResults{}, fmt.Errorf("failed to unmarshal search job results: %v", err) diff --git a/receiver/splunksearchapireceiver/config.go b/receiver/splunksearchapireceiver/config.go index f5a19b264..d95857742 100644 --- a/receiver/splunksearchapireceiver/config.go +++ b/receiver/splunksearchapireceiver/config.go @@ -22,6 +22,10 @@ import ( "go.opentelemetry.io/collector/config/confighttp" ) +var ( + errNonStandaloneSearchQuery = errors.New("only standalone search commands can be used for scraping data") +) + // Config struct to represent the configuration for the Splunk Search API receiver type Config struct { confighttp.ClientConfig `mapstructure:",squash"` @@ -61,11 +65,11 @@ func (cfg *Config) Validate() error { // query implicitly starts with "search" command if !strings.HasPrefix(search.Query, "search ") { - search.Query = "search " + search.Query + return errNonStandaloneSearchQuery } if strings.Contains(search.Query, "|") { - return errors.New("command chaining is not supported for queries") + return errNonStandaloneSearchQuery } if search.EarliestTime == "" { @@ -76,25 +80,16 @@ func (cfg *Config) Validate() error { } // parse time strings to time.Time - earliestTime, err := time.Parse(time.RFC3339, search.EarliestTime) + _, err := time.Parse(time.RFC3339, search.EarliestTime) if err != nil { return errors.New("earliest_time failed to be parsed as RFC3339") } - latestTime, err := time.Parse(time.RFC3339, search.LatestTime) + _, err = time.Parse(time.RFC3339, search.LatestTime) if err != nil { return errors.New("latest_time failed to be parsed as RFC3339") } - if earliestTime.UTC().After(latestTime.UTC()) { - return errors.New("earliest_time must be earlier than latest_time") - } - if earliestTime.UTC().After(time.Now().UTC()) { - return errors.New("earliest_time must be earlier than current time") - } - if latestTime.UTC().After(time.Now().UTC()) { - return errors.New("latest_time must be earlier than current time") - } } return nil } diff --git a/receiver/splunksearchapireceiver/factory.go b/receiver/splunksearchapireceiver/factory.go index 0018d9033..16bddccd0 100644 --- a/receiver/splunksearchapireceiver/factory.go +++ b/receiver/splunksearchapireceiver/factory.go @@ -16,6 +16,7 @@ package splunksearchapireceiver import ( "context" + "sync" "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/config/confighttp" @@ -45,6 +46,7 @@ func createLogsReceiver(_ context.Context, logsConsumer: consumer, config: ssapirConfig, settings: params.TelemetrySettings, + wg: &sync.WaitGroup{}, } return ssapir, nil } diff --git a/receiver/splunksearchapireceiver/receiver.go b/receiver/splunksearchapireceiver/receiver.go index 013f0d594..51633d484 100644 --- a/receiver/splunksearchapireceiver/receiver.go +++ b/receiver/splunksearchapireceiver/receiver.go @@ -16,6 +16,7 @@ package splunksearchapireceiver import ( "context" + "fmt" "net/http" "time" @@ -54,11 +55,11 @@ func (ssapir *splunksearchapireceiver) Shutdown(_ context.Context) error { func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { for _, search := range ssapir.config.Searches { // create search in Splunk - searchID, err := ssapir.createSplunkSearch(ssapir.config, search.Query) + ssapir.logger.Info("creating search", zap.String("query", search.Query)) + searchID, err := ssapir.createSplunkSearch(search.Query) if err != nil { ssapir.logger.Error("error creating search", zap.Error(err)) } - // fmt.Println("Search created successfully with ID: ", searchID) // wait for search to complete for { @@ -148,16 +149,38 @@ func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { return nil } -func (ssapir *splunksearchapireceiver) createSplunkSearch(config *Config, search string) (string, error) { - resp, err := ssapir.createSearchJob(config, search) +func (ssapir *splunksearchapireceiver) pollSearchCompletion(ctx context.Context, searchID string) error { + t := time.NewTicker(ssapir.config.JobPollInterval) + defer t.Stop() + for { + select { + case <-t.C: + ssapir.logger.Info("polling for search completion") + done, err := ssapir.isSearchCompleted(searchID) + if err != nil { + return fmt.Errorf("error polling for search completion: %v", err) + } + if done { + ssapir.logger.Info("search completed") + return nil + } + ssapir.logger.Info("search not completed yet") + case <-ctx.Done(): + return nil + } + } +} + +func (ssapir *splunksearchapireceiver) createSplunkSearch(search string) (string, error) { + resp, err := ssapir.createSearchJob(ssapir.config, search) if err != nil { return "", err } return resp.SID, nil } -func (ssapir *splunksearchapireceiver) isSearchCompleted(config *Config, sid string) (bool, error) { - resp, err := ssapir.getJobStatus(config, sid) +func (ssapir *splunksearchapireceiver) isSearchCompleted(sid string) (bool, error) { + resp, err := ssapir.getJobStatus(ssapir.config, sid) if err != nil { return false, err } From 9f21fe48d3a2adc3e74f87c608d2baa849120394 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Wed, 13 Nov 2024 10:49:44 -0500 Subject: [PATCH 06/55] pagination functionality --- receiver/splunksearchapireceiver/api.go | 23 +++++++--------- receiver/splunksearchapireceiver/receiver.go | 28 +++++++++++--------- 2 files changed, 26 insertions(+), 25 deletions(-) diff --git a/receiver/splunksearchapireceiver/api.go b/receiver/splunksearchapireceiver/api.go index 3c96d4702..f2708f42d 100644 --- a/receiver/splunksearchapireceiver/api.go +++ b/receiver/splunksearchapireceiver/api.go @@ -22,20 +22,17 @@ import ( "fmt" "io" "net/http" - - "go.uber.org/zap" ) -func (ssapir *splunksearchapireceiver) createSearchJob(config *Config, search string) (CreateJobResponse, error) { - // fmt.Println("Creating search job for search: ", search) - endpoint := fmt.Sprintf("%s/services/search/jobs", config.Endpoint) +func (ssapir *splunksearchapireceiver) createSearchJob(search string) (CreateJobResponse, error) { + endpoint := fmt.Sprintf("%s/services/search/jobs", ssapir.config.Endpoint) reqBody := fmt.Sprintf(`search=%s`, search) req, err := http.NewRequest("POST", endpoint, bytes.NewBuffer([]byte(reqBody))) if err != nil { return CreateJobResponse{}, err } - req.SetBasicAuth(config.Username, config.Password) + req.SetBasicAuth(ssapir.config.Username, ssapir.config.Password) resp, err := ssapir.client.Do(req) if err != nil { @@ -60,15 +57,15 @@ func (ssapir *splunksearchapireceiver) createSearchJob(config *Config, search st return jobResponse, nil } -func (ssapir *splunksearchapireceiver) getJobStatus(config *Config, sid string) (JobStatusResponse, error) { +func (ssapir *splunksearchapireceiver) getJobStatus(sid string) (JobStatusResponse, error) { // fmt.Println("Getting job status") - endpoint := fmt.Sprintf("%s/services/search/v2/jobs/%s", config.Endpoint, sid) + endpoint := fmt.Sprintf("%s/services/search/v2/jobs/%s", ssapir.config.Endpoint, sid) req, err := http.NewRequest("GET", endpoint, nil) if err != nil { return JobStatusResponse{}, err } - req.SetBasicAuth(config.Username, config.Password) + req.SetBasicAuth(ssapir.config.Username, ssapir.config.Password) resp, err := ssapir.client.Do(req) if err != nil { @@ -93,15 +90,15 @@ func (ssapir *splunksearchapireceiver) getJobStatus(config *Config, sid string) return jobStatusResponse, nil } -func (ssapir *splunksearchapireceiver) getSearchResults(config *Config, sid string, offset int) (SearchResults, error) { - endpoint := fmt.Sprintf("%s/services/search/v2/jobs/%s/results?output_mode=json&offset=%d&count=%d", config.Endpoint, sid, offset, ssapir.eventBatchSize) +func (ssapir *splunksearchapireceiver) getSearchResults(sid string, offset int) (SearchResults, error) { + endpoint := fmt.Sprintf("%s/services/search/v2/jobs/%s/results?output_mode=json&offset=%d&count=%d", ssapir.config.Endpoint, sid, offset, ssapir.config.EventBatchSize) req, err := http.NewRequest("GET", endpoint, nil) if err != nil { return SearchResults{}, err } - req.SetBasicAuth(config.Username, config.Password) + req.SetBasicAuth(ssapir.config.Username, ssapir.config.Password) - ssapir.logger.Info("Getting search results", zap.Int("offset", offset), zap.Int("count", ssapir.eventBatchSize)) + // ssapir.logger.Info("Getting search results", zap.Int("offset", offset), zap.Int("count", ssapir.config.EventBatchSize)) resp, err := ssapir.client.Do(req) if err != nil { diff --git a/receiver/splunksearchapireceiver/receiver.go b/receiver/splunksearchapireceiver/receiver.go index 51633d484..30fe96800 100644 --- a/receiver/splunksearchapireceiver/receiver.go +++ b/receiver/splunksearchapireceiver/receiver.go @@ -75,6 +75,7 @@ func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { var resultCountTracker = 0 // track number of results exported var offset = 0 // offset for pagination + var limitReached = false for { // fetch search results results, err := ssapir.getSplunkSearchResults(ssapir.config, searchID, offset) @@ -85,19 +86,18 @@ func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { // parse time strings to time.Time earliestTime, err := time.Parse(time.RFC3339, search.EarliestTime) if err != nil { - // should be impossible to reach with config validation ssapir.logger.Error("earliest_time failed to be parsed as RFC3339", zap.Error(err)) } latestTime, err := time.Parse(time.RFC3339, search.LatestTime) if err != nil { - // should be impossible to reach with config validation ssapir.logger.Error("latest_time failed to be parsed as RFC3339", zap.Error(err)) } logs := plog.NewLogs() for idx, splunkLog := range results.Results { - if idx >= search.Limit && search.Limit != 0 { + if (idx+resultCountTracker) >= search.Limit && search.Limit != 0 { + limitReached = true break } // convert log timestamp to ISO8601 (UTC() makes RFC3339 into ISO8601) @@ -109,6 +109,7 @@ func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { if logTimestamp.UTC().After(latestTime.UTC()) { ssapir.logger.Info("skipping log entry - timestamp after latestTime", zap.Time("time", logTimestamp.UTC()), zap.Time("latestTime", latestTime.UTC())) // logger will only log up to 10 times for a given code block, known weird behavior + // TODO: Consider breaking, assuming all logs are in order continue } if logTimestamp.UTC().Before(earliestTime) { @@ -122,10 +123,10 @@ func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { log.SetTimestamp(timestamp) log.Body().SetStr(splunkLog.Raw) - } - if logs.ResourceLogs().Len() == 0 { - ssapir.logger.Info("search returned no logs within the given time range") - return nil + if logs.ResourceLogs().Len() == 0 { + ssapir.logger.Info("search returned no logs within the given time range") + return nil + } } // pass logs, wait for exporter to confirm successful export to GCP @@ -134,17 +135,20 @@ func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { // Error from down the pipeline, freak out ssapir.logger.Error("error consuming logs", zap.Error(err)) } + if limitReached { + ssapir.logger.Info("limit reached, stopping search result export") + resultCountTracker += logs.ResourceLogs().Len() + break + } // if the number of results is less than the results per request, we have queried all pages for the search - if len(results.Results) < ssapir.eventBatchSize { + if len(results.Results) < ssapir.config.EventBatchSize { resultCountTracker += len(results.Results) break } resultCountTracker += logs.ResourceLogs().Len() - offset += ssapir.eventBatchSize - time.Sleep(5 * time.Second) + offset += len(results.Results) } - ssapir.logger.Info("search results exported", zap.String("query", search.Query), zap.Int("results", resultCountTracker)) - // search results completely exported, reset checkpoint storage + ssapir.logger.Info("search results exported", zap.String("query", search.Query), zap.Int("total results", resultCountTracker)) } return nil } From 527cde05fe94777c903d89df0766b35b65a9efca Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Wed, 13 Nov 2024 15:42:30 -0500 Subject: [PATCH 07/55] break if results earlier than earliest_time --- receiver/splunksearchapireceiver/api.go | 11 +++--- receiver/splunksearchapireceiver/config.go | 18 +++++----- receiver/splunksearchapireceiver/factory.go | 7 ++-- receiver/splunksearchapireceiver/model.go | 3 ++ receiver/splunksearchapireceiver/receiver.go | 38 +++++++++++--------- 5 files changed, 43 insertions(+), 34 deletions(-) diff --git a/receiver/splunksearchapireceiver/api.go b/receiver/splunksearchapireceiver/api.go index f2708f42d..459b5cf51 100644 --- a/receiver/splunksearchapireceiver/api.go +++ b/receiver/splunksearchapireceiver/api.go @@ -22,6 +22,8 @@ import ( "fmt" "io" "net/http" + + "go.uber.org/zap" ) func (ssapir *splunksearchapireceiver) createSearchJob(search string) (CreateJobResponse, error) { @@ -90,15 +92,15 @@ func (ssapir *splunksearchapireceiver) getJobStatus(sid string) (JobStatusRespon return jobStatusResponse, nil } -func (ssapir *splunksearchapireceiver) getSearchResults(sid string, offset int) (SearchResults, error) { - endpoint := fmt.Sprintf("%s/services/search/v2/jobs/%s/results?output_mode=json&offset=%d&count=%d", ssapir.config.Endpoint, sid, offset, ssapir.config.EventBatchSize) +func (ssapir *splunksearchapireceiver) getSearchResults(sid string, offset int, batchSize int) (SearchResults, error) { + endpoint := fmt.Sprintf("%s/services/search/v2/jobs/%s/results?output_mode=json&offset=%d&count=%d", ssapir.config.Endpoint, sid, offset, batchSize) req, err := http.NewRequest("GET", endpoint, nil) if err != nil { return SearchResults{}, err } req.SetBasicAuth(ssapir.config.Username, ssapir.config.Password) - // ssapir.logger.Info("Getting search results", zap.Int("offset", offset), zap.Int("count", ssapir.config.EventBatchSize)) + ssapir.logger.Info("Getting search results", zap.Int("offset", offset), zap.Int("count", batchSize)) resp, err := ssapir.client.Do(req) if err != nil { @@ -119,7 +121,6 @@ func (ssapir *splunksearchapireceiver) getSearchResults(sid string, offset int) if err != nil { return SearchResults{}, fmt.Errorf("failed to unmarshal search job results: %v", err) } - fmt.Println("Init offset: ", searchResults.InitOffset) - + fmt.Println("Search results: ", searchResults) return searchResults, nil } diff --git a/receiver/splunksearchapireceiver/config.go b/receiver/splunksearchapireceiver/config.go index d95857742..0e8063923 100644 --- a/receiver/splunksearchapireceiver/config.go +++ b/receiver/splunksearchapireceiver/config.go @@ -29,18 +29,20 @@ var ( // Config struct to represent the configuration for the Splunk Search API receiver type Config struct { confighttp.ClientConfig `mapstructure:",squash"` - Username string `mapstructure:"splunk_username"` - Password string `mapstructure:"splunk_password"` - Searches []Search `mapstructure:"searches"` - EventBatchSize int `mapstructure:"event_batch_size"` + Username string `mapstructure:"splunk_username"` + Password string `mapstructure:"splunk_password"` + Searches []Search `mapstructure:"searches"` + EventBatchSize int `mapstructure:"event_batch_size"` + JobPollInterval time.Duration `mapstructure:"job_poll_interval"` } // Search struct to represent a Splunk search type Search struct { - Query string `mapstructure:"query"` - EarliestTime string `mapstructure:"earliest_time"` - LatestTime string `mapstructure:"latest_time"` - Limit int `mapstructure:"limit"` + Query string `mapstructure:"query"` + EarliestTime string `mapstructure:"earliest_time"` + LatestTime string `mapstructure:"latest_time"` + Limit int `mapstructure:"limit"` + EventBatchSize int `mapstructure:"event_batch_size"` } // Validate validates the Splunk Search API receiver configuration diff --git a/receiver/splunksearchapireceiver/factory.go b/receiver/splunksearchapireceiver/factory.go index 16bddccd0..dc61db414 100644 --- a/receiver/splunksearchapireceiver/factory.go +++ b/receiver/splunksearchapireceiver/factory.go @@ -16,7 +16,7 @@ package splunksearchapireceiver import ( "context" - "sync" + "time" "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/config/confighttp" @@ -30,8 +30,8 @@ var ( func createDefaultConfig() component.Config { return &Config{ - ClientConfig: confighttp.NewDefaultClientConfig(), - EventBatchSize: 100, + ClientConfig: confighttp.NewDefaultClientConfig(), + JobPollInterval: 10 * time.Second, } } @@ -46,7 +46,6 @@ func createLogsReceiver(_ context.Context, logsConsumer: consumer, config: ssapirConfig, settings: params.TelemetrySettings, - wg: &sync.WaitGroup{}, } return ssapir, nil } diff --git a/receiver/splunksearchapireceiver/model.go b/receiver/splunksearchapireceiver/model.go index a53e0a882..d711fbf78 100644 --- a/receiver/splunksearchapireceiver/model.go +++ b/receiver/splunksearchapireceiver/model.go @@ -14,11 +14,13 @@ package splunksearchapireceiver +// https://docs.splunk.com/Documentation/Splunk/9.3.1/RESTREF/RESTsearch#search.2Fjobs // CreateJobResponse struct to represent the XML response from Splunk create job endpoint type CreateJobResponse struct { SID string `xml:"sid"` } +// https://docs.splunk.com/Documentation/Splunk/9.3.1/RESTREF/RESTsearch#search.2Fjobs.2F.7Bsearch_id.7D // JobStatusResponse struct to represent the XML response from Splunk job status endpoint type JobStatusResponse struct { Content struct { @@ -47,6 +49,7 @@ type List struct { } `xml:"item"` } +// https://docs.splunk.com/Documentation/Splunk/9.3.1/RESTREF/RESTsearch#search.2Fv2.2Fjobs.2F.7Bsearch_id.7D.2Fresults // SearchResults struct to represent the JSON response from Splunk search results endpoint type SearchResults struct { InitOffset int `json:"init_offset"` diff --git a/receiver/splunksearchapireceiver/receiver.go b/receiver/splunksearchapireceiver/receiver.go index 30fe96800..fd205836c 100644 --- a/receiver/splunksearchapireceiver/receiver.go +++ b/receiver/splunksearchapireceiver/receiver.go @@ -27,14 +27,19 @@ import ( "go.uber.org/zap" ) +var ( + offset = 0 // offset for pagination and checkpointing + exportedEvents = 0 // track the number of events returned by the results endpoint that are exported + limitReached = false // flag to stop processing search results when limit is reached +) + type splunksearchapireceiver struct { - host component.Host - logger *zap.Logger - logsConsumer consumer.Logs - config *Config - settings component.TelemetrySettings - client *http.Client - eventBatchSize int + host component.Host + logger *zap.Logger + logsConsumer consumer.Logs + config *Config + settings component.TelemetrySettings + client *http.Client } func (ssapir *splunksearchapireceiver) Start(ctx context.Context, host component.Host) error { @@ -96,7 +101,7 @@ func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { logs := plog.NewLogs() for idx, splunkLog := range results.Results { - if (idx+resultCountTracker) >= search.Limit && search.Limit != 0 { + if (idx+exportedEvents) >= search.Limit && search.Limit != 0 { limitReached = true break } @@ -109,12 +114,11 @@ func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { if logTimestamp.UTC().After(latestTime.UTC()) { ssapir.logger.Info("skipping log entry - timestamp after latestTime", zap.Time("time", logTimestamp.UTC()), zap.Time("latestTime", latestTime.UTC())) // logger will only log up to 10 times for a given code block, known weird behavior - // TODO: Consider breaking, assuming all logs are in order continue } if logTimestamp.UTC().Before(earliestTime) { ssapir.logger.Info("skipping log entry - timestamp before earliestTime", zap.Time("time", logTimestamp.UTC()), zap.Time("earliestTime", earliestTime.UTC())) - continue + break } log := logs.ResourceLogs().AppendEmpty().ScopeLogs().AppendEmpty().LogRecords().AppendEmpty() @@ -137,18 +141,18 @@ func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { } if limitReached { ssapir.logger.Info("limit reached, stopping search result export") - resultCountTracker += logs.ResourceLogs().Len() + exportedEvents += logs.ResourceLogs().Len() break } // if the number of results is less than the results per request, we have queried all pages for the search - if len(results.Results) < ssapir.config.EventBatchSize { - resultCountTracker += len(results.Results) + if len(results.Results) < search.EventBatchSize { + exportedEvents += len(results.Results) break } - resultCountTracker += logs.ResourceLogs().Len() + exportedEvents += logs.ResourceLogs().Len() offset += len(results.Results) } - ssapir.logger.Info("search results exported", zap.String("query", search.Query), zap.Int("total results", resultCountTracker)) + ssapir.logger.Info("search results exported", zap.String("query", search.Query), zap.Int("total results", exportedEvents)) } return nil } @@ -200,8 +204,8 @@ func (ssapir *splunksearchapireceiver) isSearchCompleted(sid string) (bool, erro return false, nil } -func (ssapir *splunksearchapireceiver) getSplunkSearchResults(config *Config, sid string, offset int) (SearchResults, error) { - resp, err := ssapir.getSearchResults(config, sid, offset) +func (ssapir *splunksearchapireceiver) getSplunkSearchResults(sid string, offset int, batchSize int) (SearchResults, error) { + resp, err := ssapir.getSearchResults(sid, offset, batchSize) if err != nil { return SearchResults{}, err } From af6f86030ad5df4362af584dadd5cdce78dd1cda Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Wed, 13 Nov 2024 16:05:09 -0500 Subject: [PATCH 08/55] fix lint --- receiver/splunksearchapireceiver/model.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/receiver/splunksearchapireceiver/model.go b/receiver/splunksearchapireceiver/model.go index d711fbf78..2d5a6dfec 100644 --- a/receiver/splunksearchapireceiver/model.go +++ b/receiver/splunksearchapireceiver/model.go @@ -14,14 +14,14 @@ package splunksearchapireceiver -// https://docs.splunk.com/Documentation/Splunk/9.3.1/RESTREF/RESTsearch#search.2Fjobs // CreateJobResponse struct to represent the XML response from Splunk create job endpoint +// https://docs.splunk.com/Documentation/Splunk/9.3.1/RESTREF/RESTsearch#search.2Fjobs type CreateJobResponse struct { SID string `xml:"sid"` } -// https://docs.splunk.com/Documentation/Splunk/9.3.1/RESTREF/RESTsearch#search.2Fjobs.2F.7Bsearch_id.7D // JobStatusResponse struct to represent the XML response from Splunk job status endpoint +// https://docs.splunk.com/Documentation/Splunk/9.3.1/RESTREF/RESTsearch#search.2Fjobs.2F.7Bsearch_id.7D type JobStatusResponse struct { Content struct { Type string `xml:"type,attr"` @@ -49,8 +49,8 @@ type List struct { } `xml:"item"` } -// https://docs.splunk.com/Documentation/Splunk/9.3.1/RESTREF/RESTsearch#search.2Fv2.2Fjobs.2F.7Bsearch_id.7D.2Fresults // SearchResults struct to represent the JSON response from Splunk search results endpoint +// https://docs.splunk.com/Documentation/Splunk/9.3.1/RESTREF/RESTsearch#search.2Fv2.2Fjobs.2F.7Bsearch_id.7D.2Fresults type SearchResults struct { InitOffset int `json:"init_offset"` Results []struct { From 080023c3214a1b940ebc237fce9d7afd5c6b88c0 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Thu, 14 Nov 2024 10:49:18 -0500 Subject: [PATCH 09/55] check for earliest/latest in query --- receiver/splunksearchapireceiver/config.go | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/receiver/splunksearchapireceiver/config.go b/receiver/splunksearchapireceiver/config.go index 0e8063923..d9c35720b 100644 --- a/receiver/splunksearchapireceiver/config.go +++ b/receiver/splunksearchapireceiver/config.go @@ -16,6 +16,7 @@ package splunksearchapireceiver import ( "errors" + "fmt" "strings" "time" @@ -74,6 +75,10 @@ func (cfg *Config) Validate() error { return errNonStandaloneSearchQuery } + if strings.Contains(search.Query, "earliest=") || strings.Contains(search.Query, "latest=") { + return fmt.Errorf("time query parameters must be configured using only the \"earliest_time\" and \"latest_time\" configuration parameters") + } + if search.EarliestTime == "" { return errors.New("missing earliest_time in search") } From 294c85585566bf58d41508c7430ea4eb84f96407 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Mon, 11 Nov 2024 10:47:41 -0500 Subject: [PATCH 10/55] config unit tests --- .../{api.go => client.go} | 73 +++-- receiver/splunksearchapireceiver/config.go | 4 +- .../splunksearchapireceiver/config_test.go | 287 ++++++++++++++++++ receiver/splunksearchapireceiver/go.mod | 4 + receiver/splunksearchapireceiver/go.sum | 9 + .../splunksearchapireceiver/metadata.yaml | 7 + receiver/splunksearchapireceiver/receiver.go | 11 +- 7 files changed, 365 insertions(+), 30 deletions(-) rename receiver/splunksearchapireceiver/{api.go => client.go} (55%) create mode 100644 receiver/splunksearchapireceiver/config_test.go create mode 100644 receiver/splunksearchapireceiver/metadata.yaml diff --git a/receiver/splunksearchapireceiver/api.go b/receiver/splunksearchapireceiver/client.go similarity index 55% rename from receiver/splunksearchapireceiver/api.go rename to receiver/splunksearchapireceiver/client.go index 459b5cf51..39951440f 100644 --- a/receiver/splunksearchapireceiver/api.go +++ b/receiver/splunksearchapireceiver/client.go @@ -12,31 +12,61 @@ // See the License for the specific language governing permissions and // limitations under the License. -// Package splunksearchapireceiver provides a receiver that uses the Splunk API to migrate event data. package splunksearchapireceiver import ( "bytes" + "context" "encoding/json" "encoding/xml" "fmt" "io" "net/http" + "go.opentelemetry.io/collector/component" "go.uber.org/zap" ) -func (ssapir *splunksearchapireceiver) createSearchJob(search string) (CreateJobResponse, error) { - endpoint := fmt.Sprintf("%s/services/search/jobs", ssapir.config.Endpoint) +type splunkSearchAPIClient interface { + CreateSearchJob(search string) (CreateJobResponse, error) + GetJobStatus(searchID string) (JobStatusResponse, error) + GetSearchResults(searchID string) (SearchResultsResponse, error) +} + +type defaultSplunkSearchAPIClient struct { + client *http.Client + endpoint string + logger *zap.Logger + username string + password string +} + +func newSplunkSearchAPIClient(ctx context.Context, settings component.TelemetrySettings, conf Config, host component.Host) (*defaultSplunkSearchAPIClient, error) { + client, err := conf.ClientConfig.ToClient(ctx, host, settings) + if err != nil { + return nil, err + } + return &defaultSplunkSearchAPIClient{ + client: client, + endpoint: conf.Endpoint, + logger: settings.Logger, + username: conf.Username, + password: conf.Password, + }, nil +} + +func (c defaultSplunkSearchAPIClient) CreateSearchJob(search string) (CreateJobResponse, error) { + // fmt.Println("Creating search job for search: ", search) + endpoint := fmt.Sprintf("%s/services/search/jobs", c.endpoint) reqBody := fmt.Sprintf(`search=%s`, search) req, err := http.NewRequest("POST", endpoint, bytes.NewBuffer([]byte(reqBody))) if err != nil { return CreateJobResponse{}, err } - req.SetBasicAuth(ssapir.config.Username, ssapir.config.Password) + req.SetBasicAuth(c.username, c.password) - resp, err := ssapir.client.Do(req) + resp, err := c.client.Do(req) if err != nil { return CreateJobResponse{}, err } @@ -59,17 +89,17 @@ func (ssapir *splunksearchapireceiver) createSearchJob(search string) (CreateJob return jobResponse, nil } -func (ssapir *splunksearchapireceiver) getJobStatus(sid string) (JobStatusResponse, error) { +func (c defaultSplunkSearchAPIClient) GetJobStatus(sid string) (JobStatusResponse, error) { // fmt.Println("Getting job status") - endpoint := fmt.Sprintf("%s/services/search/v2/jobs/%s", ssapir.config.Endpoint, sid) + endpoint := fmt.Sprintf("%s/services/search/v2/jobs/%s", c.endpoint, sid) req, err := http.NewRequest("GET", endpoint, nil) if err != nil { return JobStatusResponse{}, err } - req.SetBasicAuth(ssapir.config.Username, ssapir.config.Password) + req.SetBasicAuth(c.username, c.password) - resp, err := ssapir.client.Do(req) + resp, err := c.client.Do(req) if err != nil { return JobStatusResponse{}, err } @@ -92,35 +122,34 @@ func (ssapir *splunksearchapireceiver) getJobStatus(sid string) (JobStatusRespon return jobStatusResponse, nil } -func (ssapir *splunksearchapireceiver) getSearchResults(sid string, offset int, batchSize int) (SearchResults, error) { - endpoint := fmt.Sprintf("%s/services/search/v2/jobs/%s/results?output_mode=json&offset=%d&count=%d", ssapir.config.Endpoint, sid, offset, batchSize) +func (c defaultSplunkSearchAPIClient) GetSearchResults(sid string) (SearchResultsResponse, error) { + endpoint := fmt.Sprintf("%s/services/search/v2/jobs/%s/results?output_mode=json", c.endpoint, sid) req, err := http.NewRequest("GET", endpoint, nil) if err != nil { - return SearchResults{}, err + return SearchResultsResponse{}, err } - req.SetBasicAuth(ssapir.config.Username, ssapir.config.Password) + req.SetBasicAuth(c.username, c.password) - ssapir.logger.Info("Getting search results", zap.Int("offset", offset), zap.Int("count", batchSize)) - - resp, err := ssapir.client.Do(req) + resp, err := c.client.Do(req) if err != nil { - return SearchResults{}, err + return SearchResultsResponse{}, err } defer resp.Body.Close() if resp.StatusCode != http.StatusOK { - return SearchResults{}, fmt.Errorf("failed to get search job results: %d", resp.StatusCode) + return SearchResultsResponse{}, fmt.Errorf("failed to get search job results: %d", resp.StatusCode) } - var searchResults SearchResults + var searchResults SearchResultsResponse body, err := io.ReadAll(resp.Body) if err != nil { - return SearchResults{}, fmt.Errorf("failed to read search job results response: %v", err) + return SearchResultsResponse{}, fmt.Errorf("failed to read search job results response: %v", err) } + // fmt.Println("Body: ", string(body)) err = json.Unmarshal(body, &searchResults) if err != nil { - return SearchResults{}, fmt.Errorf("failed to unmarshal search job results: %v", err) + return SearchResultsResponse{}, fmt.Errorf("failed to unmarshal search job results: %v", err) } - fmt.Println("Search results: ", searchResults) + return searchResults, nil } diff --git a/receiver/splunksearchapireceiver/config.go b/receiver/splunksearchapireceiver/config.go index d9c35720b..bb13533ab 100644 --- a/receiver/splunksearchapireceiver/config.go +++ b/receiver/splunksearchapireceiver/config.go @@ -89,12 +89,12 @@ func (cfg *Config) Validate() error { // parse time strings to time.Time _, err := time.Parse(time.RFC3339, search.EarliestTime) if err != nil { - return errors.New("earliest_time failed to be parsed as RFC3339") + return errors.New("earliest_time failed to parse as RFC3339") } _, err = time.Parse(time.RFC3339, search.LatestTime) if err != nil { - return errors.New("latest_time failed to be parsed as RFC3339") + return errors.New("latest_time failed to parse as RFC3339") } } diff --git a/receiver/splunksearchapireceiver/config_test.go b/receiver/splunksearchapireceiver/config_test.go new file mode 100644 index 000000000..b3ea8a4a2 --- /dev/null +++ b/receiver/splunksearchapireceiver/config_test.go @@ -0,0 +1,287 @@ +package splunksearchapireceiver + +import ( + "testing" + "time" + + "github.com/stretchr/testify/require" +) + +func TestValidate(t *testing.T) { + testCases := []struct { + desc string + endpoint string + username string + password string + searches []Search + errExpected bool + errText string + }{ + { + desc: "Missing endpoint", + username: "user", + password: "password", + searches: []Search{ + { + Query: "search index=_internal", + EarliestTime: "2024-10-30T04:00:00.000Z", + LatestTime: "2024-10-30T14:00:00.000Z", + }, + }, + errExpected: true, + errText: "missing Splunk server endpoint", + }, + { + desc: "Missing username", + endpoint: "http://localhost:8089", + password: "password", + searches: []Search{ + { + Query: "search index=_internal", + EarliestTime: "2024-10-30T04:00:00.000Z", + LatestTime: "2024-10-30T14:00:00.000Z", + }, + }, + errExpected: true, + errText: "missing Splunk username", + }, + { + desc: "Missing password", + endpoint: "http://localhost:8089", + username: "user", + searches: []Search{ + { + Query: "search index=_internal", + EarliestTime: "2024-10-30T04:00:00.000Z", + LatestTime: "2024-10-30T14:00:00.000Z", + }, + }, + errExpected: true, + errText: "missing Splunk password", + }, + { + desc: "Missing searches", + endpoint: "http://localhost:8089", + username: "user", + password: "password", + errExpected: true, + errText: "at least one search must be provided", + }, + { + desc: "Missing query", + endpoint: "http://localhost:8089", + username: "user", + password: "password", + searches: []Search{ + { + EarliestTime: "2024-10-30T04:00:00.000Z", + LatestTime: "2024-10-30T14:00:00.000Z", + }, + }, + errExpected: true, + errText: "missing query in search", + }, + { + desc: "Missing earliest_time", + endpoint: "http://localhost:8089", + username: "user", + password: "password", + searches: []Search{ + { + Query: "search index=_internal", + LatestTime: "2024-10-30T14:00:00.000Z", + }, + }, + errExpected: true, + errText: "missing earliest_time in search", + }, + { + desc: "Unparsable earliest_time", + endpoint: "http://localhost:8089", + username: "user", + password: "password", + searches: []Search{ + { + Query: "search index=_internal", + EarliestTime: "-1hr", + LatestTime: "2024-10-30T14:00:00.000Z", + }, + }, + errExpected: true, + errText: "earliest_time failed to parse as RFC3339", + }, + { + desc: "Missing latest_time", + endpoint: "http://localhost:8089", + username: "user", + password: "password", + searches: []Search{ + { + Query: "search index=_internal", + EarliestTime: "2024-10-30T04:00:00.000Z", + }, + }, + errExpected: true, + errText: "missing latest_time in search", + }, + { + desc: "Unparsable latest_time", + endpoint: "http://localhost:8089", + username: "user", + password: "password", + searches: []Search{ + { + Query: "search index=_internal", + EarliestTime: "2024-10-30T04:00:00.000Z", + LatestTime: "-1hr", + }, + }, + errExpected: true, + errText: "latest_time failed to parse as RFC3339", + }, + { + desc: "earliest_time after latest_time", + endpoint: "http://localhost:8089", + username: "user", + password: "password", + searches: []Search{ + { + Query: "search index=_internal", + EarliestTime: "2024-10-30T14:00:00.000Z", + LatestTime: "2024-10-30T04:00:00.000Z", + }, + }, + errExpected: true, + errText: "earliest_time must be earlier than latest_time", + }, + { + desc: "earliest_time and latest_time equal", + endpoint: "http://localhost:8089", + username: "user", + password: "password", + searches: []Search{ + { + Query: "search index=_internal", + EarliestTime: "2024-10-30T14:00:00.000Z", + LatestTime: "2024-10-30T14:00:00.000Z", + }, + }, + errExpected: true, + errText: "earliest_time must be earlier than latest_time", + }, + { + desc: "earliest_time in the future", + endpoint: "http://localhost:8089", + username: "user", + password: "password", + searches: []Search{ + { + Query: "search index=_internal", + EarliestTime: time.Now().Add(1 * time.Hour).Format(time.RFC3339), + LatestTime: time.Now().Add(10 * time.Hour).Format(time.RFC3339), + }, + }, + errExpected: true, + errText: "earliest_time must be earlier than current time", + }, + { + desc: "latest_time in the future", + endpoint: "http://localhost:8089", + username: "user", + password: "password", + searches: []Search{ + { + Query: "search index=_internal", + EarliestTime: "2024-10-30T04:00:00.000Z", + LatestTime: time.Now().Add(10 * time.Hour).Format(time.RFC3339), + }, + }, + errExpected: true, + errText: "latest_time must be earlier than or equal to current time", + }, + { + desc: "Invalid query chaining", + endpoint: "http://localhost:8089", + username: "user", + password: "password", + searches: []Search{ + { + Query: "search index=_internal | stats count by sourcetype", + EarliestTime: "2024-10-30T04:00:00.000Z", + LatestTime: "2024-10-30T14:00:00.000Z", + }, + }, + errExpected: true, + errText: "command chaining is not supported for queries", + }, + { + desc: "Valid config", + endpoint: "http://localhost:8089", + username: "user", + password: "password", + searches: []Search{ + { + Query: "search index=_internal", + EarliestTime: "2024-10-30T04:00:00.000Z", + LatestTime: "2024-10-30T14:00:00.000Z", + }, + }, + errExpected: false, + }, + { + desc: "Valid config with multiple searches", + endpoint: "http://localhost:8089", + username: "user", + password: "password", + searches: []Search{ + { + Query: "search index=_internal", + EarliestTime: "2024-10-30T04:00:00.000Z", + LatestTime: "2024-10-30T14:00:00.000Z", + }, + { + Query: "search index=_audit", + EarliestTime: "2024-10-30T04:00:00.000Z", + LatestTime: "2024-10-30T14:00:00.000Z", + }, + }, + errExpected: false, + }, + { + desc: "Valid config with limit", + endpoint: "http://localhost:8089", + username: "user", + password: "password", + searches: []Search{ + { + Query: "search index=_internal", + EarliestTime: "2024-10-30T04:00:00.000Z", + LatestTime: "2024-10-30T14:00:00.000Z", + Limit: 10, + }, + }, + errExpected: false, + }, + } + for _, tc := range testCases { + t.Run(tc.desc, func(t *testing.T) { + cfg := NewFactory().CreateDefaultConfig().(*Config) + cfg.Endpoint = tc.endpoint + cfg.Username = tc.username + cfg.Password = tc.password + cfg.Searches = tc.searches + err := cfg.Validate() + if tc.errExpected && err == nil { + t.Errorf("expected error, got nil") + } + if !tc.errExpected && err != nil { + t.Errorf("unexpected error: %v", err) + } + if tc.errExpected { + require.EqualError(t, err, tc.errText) + return + } + require.NoError(t, err) + }) + } +} diff --git a/receiver/splunksearchapireceiver/go.mod b/receiver/splunksearchapireceiver/go.mod index 15fc44dfc..c419d11af 100644 --- a/receiver/splunksearchapireceiver/go.mod +++ b/receiver/splunksearchapireceiver/go.mod @@ -3,6 +3,7 @@ module github.com/open-telemetry/opentelemtry-collector-contrib/receiver/splunks go 1.22.5 require ( + github.com/stretchr/testify v1.9.0 go.opentelemetry.io/collector/component v0.113.0 go.opentelemetry.io/collector/consumer v0.113.0 go.opentelemetry.io/collector/pdata v1.19.0 @@ -11,6 +12,7 @@ require ( ) require ( + github.com/davecgh/go-spew v1.1.1 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect github.com/fsnotify/fsnotify v1.8.0 // indirect github.com/go-logr/logr v1.4.2 // indirect @@ -18,6 +20,7 @@ require ( github.com/golang/snappy v0.0.4 // indirect github.com/klauspost/compress v1.17.11 // indirect github.com/pierrec/lz4/v4 v4.1.21 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect github.com/rs/cors v1.11.1 // indirect go.opentelemetry.io/collector/client v1.19.0 // indirect go.opentelemetry.io/collector/config/configauth v0.113.0 // indirect @@ -28,6 +31,7 @@ require ( go.opentelemetry.io/collector/extension v0.113.0 // indirect go.opentelemetry.io/collector/extension/auth v0.113.0 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.56.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect ) require ( diff --git a/receiver/splunksearchapireceiver/go.sum b/receiver/splunksearchapireceiver/go.sum index d15e6729a..a061e9529 100644 --- a/receiver/splunksearchapireceiver/go.sum +++ b/receiver/splunksearchapireceiver/go.sum @@ -25,6 +25,10 @@ github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc= github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= @@ -34,6 +38,8 @@ github.com/pierrec/lz4/v4 v4.1.21 h1:yOVMLb6qSIDP67pl/5F7RepeKYu/VmTyEXvuMI5d9mQ github.com/pierrec/lz4/v4 v4.1.21/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= +github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= github.com/rs/cors v1.11.1 h1:eU3gRzXLRK57F5rKMGMZURNdIG4EoAmX8k94r9wXWHA= github.com/rs/cors v1.11.1/go.mod h1:XyqrcTp5zjWr1wsJ8PIRZssZ8b/WMcMf71DJnit4EMU= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= @@ -133,5 +139,8 @@ google.golang.org/grpc v1.67.1 h1:zWnc1Vrcno+lHZCOofnIMvycFcc0QRGIzm9dhnDX68E= google.golang.org/grpc v1.67.1/go.mod h1:1gLDyUQU7CTLJI90u3nXZ9ekeghjeM7pTDZlqFNg2AA= google.golang.org/protobuf v1.35.1 h1:m3LfL6/Ca+fqnjnlqQXNpFPABW1UD7mjh8KO2mKFytA= google.golang.org/protobuf v1.35.1/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/receiver/splunksearchapireceiver/metadata.yaml b/receiver/splunksearchapireceiver/metadata.yaml new file mode 100644 index 000000000..aff667664 --- /dev/null +++ b/receiver/splunksearchapireceiver/metadata.yaml @@ -0,0 +1,7 @@ +type: splunksearchapi + +status: + class: receiver + stability: + alpha: [logs] + distributions: [observiq] \ No newline at end of file diff --git a/receiver/splunksearchapireceiver/receiver.go b/receiver/splunksearchapireceiver/receiver.go index fd205836c..fa62f2c63 100644 --- a/receiver/splunksearchapireceiver/receiver.go +++ b/receiver/splunksearchapireceiver/receiver.go @@ -17,7 +17,6 @@ package splunksearchapireceiver import ( "context" "fmt" - "net/http" "time" "go.opentelemetry.io/collector/component" @@ -39,16 +38,16 @@ type splunksearchapireceiver struct { logsConsumer consumer.Logs config *Config settings component.TelemetrySettings - client *http.Client + client splunkSearchAPIClient } func (ssapir *splunksearchapireceiver) Start(ctx context.Context, host component.Host) error { ssapir.host = host - client, err := ssapir.config.ClientConfig.ToClient(ctx, host, ssapir.settings) + var err error + ssapir.client, err = newSplunkSearchAPIClient(ctx, ssapir.settings, *ssapir.config, ssapir.host) if err != nil { return err } - ssapir.client = client go ssapir.runQueries(ctx) return nil } @@ -68,7 +67,7 @@ func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { // wait for search to complete for { - done, err := ssapir.isSearchCompleted(ssapir.config, searchID) + done, err := ssapir.isSearchCompleted(searchID) if err != nil { ssapir.logger.Error("error checking search status", zap.Error(err)) } @@ -207,7 +206,7 @@ func (ssapir *splunksearchapireceiver) isSearchCompleted(sid string) (bool, erro func (ssapir *splunksearchapireceiver) getSplunkSearchResults(sid string, offset int, batchSize int) (SearchResults, error) { resp, err := ssapir.getSearchResults(sid, offset, batchSize) if err != nil { - return SearchResults{}, err + return SearchResultsResponse{}, err } return resp, nil } From 8ec2d768a2ef5005fc3adac41451ce169110d319 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Mon, 11 Nov 2024 11:10:19 -0500 Subject: [PATCH 11/55] package comment --- receiver/splunksearchapireceiver/client.go | 1 + receiver/splunksearchapireceiver/config_test.go | 14 ++++++++++++++ 2 files changed, 15 insertions(+) diff --git a/receiver/splunksearchapireceiver/client.go b/receiver/splunksearchapireceiver/client.go index 39951440f..f42851353 100644 --- a/receiver/splunksearchapireceiver/client.go +++ b/receiver/splunksearchapireceiver/client.go @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +// Package splunksearchapireceiver contains the Splunk Search API receiver. package splunksearchapireceiver import ( diff --git a/receiver/splunksearchapireceiver/config_test.go b/receiver/splunksearchapireceiver/config_test.go index b3ea8a4a2..42036b46c 100644 --- a/receiver/splunksearchapireceiver/config_test.go +++ b/receiver/splunksearchapireceiver/config_test.go @@ -1,3 +1,17 @@ +// Copyright observIQ, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package splunksearchapireceiver import ( From 323d1af7056014903a858dd17ae20a9f5687dd56 Mon Sep 17 00:00:00 2001 From: Justin Voss <90650155+justinianvoss22@users.noreply.github.com> Date: Tue, 12 Nov 2024 10:07:27 -0500 Subject: [PATCH 12/55] feat(chronicleexporter): Support dynamic namespace and ingestion labels (#1939) * add namespace and ingenstion logs initial commit * working except ingestion labels * ingestion labels from attributes * use proper log entry batch * namespace and ingestion logs no config overwrite * delete OverrideNamespace and OverrideIngestionLabeles * PR changes * fix unit tests * modify tests again * marshal changes * readme and namespace check --- exporter/chronicleexporter/marshal_test.go | 1 + 1 file changed, 1 insertion(+) diff --git a/exporter/chronicleexporter/marshal_test.go b/exporter/chronicleexporter/marshal_test.go index fb5694239..ec68d91da 100644 --- a/exporter/chronicleexporter/marshal_test.go +++ b/exporter/chronicleexporter/marshal_test.go @@ -462,6 +462,7 @@ func TestProtoMarshaler_MarshalRawLogsForHTTP(t *testing.T) { cfg: Config{ CustomerID: uuid.New().String(), LogType: "WINEVTLOG", + IngestionLabels: map[string]string{`chronicle_ingestion_label["key1"]`: "value1", `chronicle_ingestion_label["key2"]`: "value2"}, RawLogField: "attributes", OverrideLogType: false, }, From bf90b001eca0e1ec39d802851fe03f4f6d7338f6 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Wed, 13 Nov 2024 13:47:20 -0500 Subject: [PATCH 13/55] debug logs --- receiver/splunksearchapireceiver/receiver.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/receiver/splunksearchapireceiver/receiver.go b/receiver/splunksearchapireceiver/receiver.go index fa62f2c63..eec38b6cb 100644 --- a/receiver/splunksearchapireceiver/receiver.go +++ b/receiver/splunksearchapireceiver/receiver.go @@ -162,7 +162,7 @@ func (ssapir *splunksearchapireceiver) pollSearchCompletion(ctx context.Context, for { select { case <-t.C: - ssapir.logger.Info("polling for search completion") + ssapir.logger.Debug("polling for search completion") done, err := ssapir.isSearchCompleted(searchID) if err != nil { return fmt.Errorf("error polling for search completion: %v", err) @@ -171,7 +171,7 @@ func (ssapir *splunksearchapireceiver) pollSearchCompletion(ctx context.Context, ssapir.logger.Info("search completed") return nil } - ssapir.logger.Info("search not completed yet") + ssapir.logger.Debug("search not completed yet") case <-ctx.Done(): return nil } From abb4e610c075f6bb6d44bf5db0daf39cccfd49e0 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Wed, 13 Nov 2024 13:56:21 -0500 Subject: [PATCH 14/55] rm unnecessary clauses --- receiver/splunksearchapireceiver/client.go | 2 - .../splunksearchapireceiver/config_test.go | 67 ------------------- .../splunksearchapireceiver/metadata.yaml | 3 +- 3 files changed, 2 insertions(+), 70 deletions(-) diff --git a/receiver/splunksearchapireceiver/client.go b/receiver/splunksearchapireceiver/client.go index f42851353..517244a1c 100644 --- a/receiver/splunksearchapireceiver/client.go +++ b/receiver/splunksearchapireceiver/client.go @@ -57,7 +57,6 @@ func newSplunkSearchAPIClient(ctx context.Context, settings component.TelemetryS } func (c defaultSplunkSearchAPIClient) CreateSearchJob(search string) (CreateJobResponse, error) { - // fmt.Println("Creating search job for search: ", search) endpoint := fmt.Sprintf("%s/services/search/jobs", c.endpoint) reqBody := fmt.Sprintf(`search=%s`, search) @@ -91,7 +90,6 @@ func (c defaultSplunkSearchAPIClient) CreateSearchJob(search string) (CreateJobR } func (c defaultSplunkSearchAPIClient) GetJobStatus(sid string) (JobStatusResponse, error) { - // fmt.Println("Getting job status") endpoint := fmt.Sprintf("%s/services/search/v2/jobs/%s", c.endpoint, sid) req, err := http.NewRequest("GET", endpoint, nil) diff --git a/receiver/splunksearchapireceiver/config_test.go b/receiver/splunksearchapireceiver/config_test.go index 42036b46c..795570138 100644 --- a/receiver/splunksearchapireceiver/config_test.go +++ b/receiver/splunksearchapireceiver/config_test.go @@ -16,7 +16,6 @@ package splunksearchapireceiver import ( "testing" - "time" "github.com/stretchr/testify/require" ) @@ -153,66 +152,6 @@ func TestValidate(t *testing.T) { errExpected: true, errText: "latest_time failed to parse as RFC3339", }, - { - desc: "earliest_time after latest_time", - endpoint: "http://localhost:8089", - username: "user", - password: "password", - searches: []Search{ - { - Query: "search index=_internal", - EarliestTime: "2024-10-30T14:00:00.000Z", - LatestTime: "2024-10-30T04:00:00.000Z", - }, - }, - errExpected: true, - errText: "earliest_time must be earlier than latest_time", - }, - { - desc: "earliest_time and latest_time equal", - endpoint: "http://localhost:8089", - username: "user", - password: "password", - searches: []Search{ - { - Query: "search index=_internal", - EarliestTime: "2024-10-30T14:00:00.000Z", - LatestTime: "2024-10-30T14:00:00.000Z", - }, - }, - errExpected: true, - errText: "earliest_time must be earlier than latest_time", - }, - { - desc: "earliest_time in the future", - endpoint: "http://localhost:8089", - username: "user", - password: "password", - searches: []Search{ - { - Query: "search index=_internal", - EarliestTime: time.Now().Add(1 * time.Hour).Format(time.RFC3339), - LatestTime: time.Now().Add(10 * time.Hour).Format(time.RFC3339), - }, - }, - errExpected: true, - errText: "earliest_time must be earlier than current time", - }, - { - desc: "latest_time in the future", - endpoint: "http://localhost:8089", - username: "user", - password: "password", - searches: []Search{ - { - Query: "search index=_internal", - EarliestTime: "2024-10-30T04:00:00.000Z", - LatestTime: time.Now().Add(10 * time.Hour).Format(time.RFC3339), - }, - }, - errExpected: true, - errText: "latest_time must be earlier than or equal to current time", - }, { desc: "Invalid query chaining", endpoint: "http://localhost:8089", @@ -285,12 +224,6 @@ func TestValidate(t *testing.T) { cfg.Password = tc.password cfg.Searches = tc.searches err := cfg.Validate() - if tc.errExpected && err == nil { - t.Errorf("expected error, got nil") - } - if !tc.errExpected && err != nil { - t.Errorf("unexpected error: %v", err) - } if tc.errExpected { require.EqualError(t, err, tc.errText) return diff --git a/receiver/splunksearchapireceiver/metadata.yaml b/receiver/splunksearchapireceiver/metadata.yaml index aff667664..b3244de60 100644 --- a/receiver/splunksearchapireceiver/metadata.yaml +++ b/receiver/splunksearchapireceiver/metadata.yaml @@ -4,4 +4,5 @@ status: class: receiver stability: alpha: [logs] - distributions: [observiq] \ No newline at end of file + distributions: [observiq] + \ No newline at end of file From c18122bef2e64621de3e1620dc236dfc43342035 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Wed, 13 Nov 2024 14:24:37 -0500 Subject: [PATCH 15/55] fix error wording --- receiver/splunksearchapireceiver/config_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/receiver/splunksearchapireceiver/config_test.go b/receiver/splunksearchapireceiver/config_test.go index 795570138..9e0741fa8 100644 --- a/receiver/splunksearchapireceiver/config_test.go +++ b/receiver/splunksearchapireceiver/config_test.go @@ -165,7 +165,7 @@ func TestValidate(t *testing.T) { }, }, errExpected: true, - errText: "command chaining is not supported for queries", + errText: "only standalone search commands can be used for scraping data", }, { desc: "Valid config", From 010713382672e7ea2451c677f325d4347b76ece9 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Wed, 13 Nov 2024 16:13:12 -0500 Subject: [PATCH 16/55] rm space --- receiver/splunksearchapireceiver/metadata.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/receiver/splunksearchapireceiver/metadata.yaml b/receiver/splunksearchapireceiver/metadata.yaml index b3244de60..3ca815db9 100644 --- a/receiver/splunksearchapireceiver/metadata.yaml +++ b/receiver/splunksearchapireceiver/metadata.yaml @@ -5,4 +5,3 @@ status: stability: alpha: [logs] distributions: [observiq] - \ No newline at end of file From b072ac6c72d0978572b4845f8996005cc68f3560 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Tue, 19 Nov 2024 10:33:31 -0500 Subject: [PATCH 17/55] wip --- .../splunksearchapireceiver/client_test.go | 108 ++++++++++++++++++ .../splunksearchapireceiver/factory_test.go | 42 +++++++ receiver/splunksearchapireceiver/go.mod | 8 ++ receiver/splunksearchapireceiver/go.sum | 6 + 4 files changed, 164 insertions(+) create mode 100644 receiver/splunksearchapireceiver/client_test.go create mode 100644 receiver/splunksearchapireceiver/factory_test.go diff --git a/receiver/splunksearchapireceiver/client_test.go b/receiver/splunksearchapireceiver/client_test.go new file mode 100644 index 000000000..6e8a8353d --- /dev/null +++ b/receiver/splunksearchapireceiver/client_test.go @@ -0,0 +1,108 @@ +// Copyright observIQ, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package splunksearchapireceiver + +import ( + "net/http" + "net/http/httptest" + "testing" + + "github.com/stretchr/testify/require" +) + +var ( + server = newMockSplunkServer() + testClient = defaultSplunkSearchAPIClient{ + client: server.Client(), + endpoint: server.URL, + } +) + +func TestCreateSearchJob(t *testing.T) { + // valid search + resp, err := testClient.CreateSearchJob("index=otel") + require.NoError(t, err) + require.Equal(t, "123456", resp.SID) +} + +func TestGetJobStatus(t *testing.T) { + resp, err := testClient.GetJobStatus("123456") + require.NoError(t, err) + require.Equal(t, "DONE", resp.Content.Dict.Keys[0].Value) + require.Equal(t, "text/xml", resp.Content.Type) +} + +func TestGetSearchResults(t *testing.T) { + +} + +// mock Splunk servers +func newMockSplunkServer() *httptest.Server { + return httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { + switch req.URL.Path { + case "/services/search/jobs": + rw.Header().Set("Content-Type", "application/xml") + rw.WriteHeader(http.StatusCreated) + rw.Write([]byte(` + + 123456 + + `)) + case "/services/search/v2/jobs/123456": + rw.Header().Set("Content-Type", "application/xml") + rw.WriteHeader(http.StatusOK) + rw.Write([]byte(` + + + + DONE + + + `)) + case "/services/search/v2/jobs/123456/results": + rw.Header().Set("Content-Type", "application/json") + rw.WriteHeader(http.StatusOK) + rw.Write(splunkEventsResultsP1) + default: + rw.WriteHeader(http.StatusNotFound) + } + })) +} + +var splunkEventsResultsP1 = []byte(`{ + "init_offset": 0, + "results": [ + { + "_raw": "Hello, world!", + "_time": "2024-11-14T13:02:31.000-05:00" + }, + { + "_raw": "Goodbye, world!", + "_time": "2024-11-14T13:02:30.000-05:00" + }, + { + "_raw": "lorem ipsum", + "_time": "2024-11-14T13:02:29.000-05:00" + }, + { + "_raw": "dolor sit amet", + "_time": "2024-11-14T13:02:28.000-05:00" + }, + { + "_raw": "consectetur adipiscing elit", + "_time": "2024-11-14T13:02:27.000-05:00" + } + ] +}`) diff --git a/receiver/splunksearchapireceiver/factory_test.go b/receiver/splunksearchapireceiver/factory_test.go new file mode 100644 index 000000000..e3582a19a --- /dev/null +++ b/receiver/splunksearchapireceiver/factory_test.go @@ -0,0 +1,42 @@ +// Copyright observIQ, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package splunksearchapireceiver + +import ( + "context" + "testing" + + "github.com/stretchr/testify/require" + "go.opentelemetry.io/collector/consumer/consumertest" + "go.opentelemetry.io/collector/receiver/receivertest" +) + +func TestType(t *testing.T) { + factory := NewFactory() + ft := factory.Type() + require.EqualValues(t, "splunksearchapi", ft.String()) +} + +func TestCreateLogsReceiver(t *testing.T) { + factory := NewFactory() + test, err := factory.CreateLogs( + context.Background(), + receivertest.NewNopSettings(), + createDefaultConfig(), + consumertest.NewNop(), + ) + require.NoError(t, err) + require.NotNil(t, test) +} diff --git a/receiver/splunksearchapireceiver/go.mod b/receiver/splunksearchapireceiver/go.mod index c419d11af..36f3fb851 100644 --- a/receiver/splunksearchapireceiver/go.mod +++ b/receiver/splunksearchapireceiver/go.mod @@ -6,6 +6,7 @@ require ( github.com/stretchr/testify v1.9.0 go.opentelemetry.io/collector/component v0.113.0 go.opentelemetry.io/collector/consumer v0.113.0 + go.opentelemetry.io/collector/consumer/consumertest v0.112.0 go.opentelemetry.io/collector/pdata v1.19.0 go.opentelemetry.io/collector/receiver v0.112.0 go.uber.org/zap v1.27.0 @@ -18,6 +19,7 @@ require ( github.com/go-logr/logr v1.4.2 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/golang/snappy v0.0.4 // indirect + github.com/google/uuid v1.6.0 // indirect github.com/klauspost/compress v1.17.11 // indirect github.com/pierrec/lz4/v4 v4.1.21 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect @@ -28,9 +30,15 @@ require ( go.opentelemetry.io/collector/config/configopaque v1.19.0 // indirect go.opentelemetry.io/collector/config/configtls v1.19.0 // indirect go.opentelemetry.io/collector/config/internal v0.113.0 // indirect + go.opentelemetry.io/collector/consumer/consumererror v0.112.0 // indirect + go.opentelemetry.io/collector/consumer/consumerprofiles v0.112.0 // indirect go.opentelemetry.io/collector/extension v0.113.0 // indirect go.opentelemetry.io/collector/extension/auth v0.113.0 // indirect + go.opentelemetry.io/collector/pdata/pprofile v0.112.0 // indirect + go.opentelemetry.io/collector/receiver/receiverprofiles v0.112.0 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.56.0 // indirect + go.opentelemetry.io/otel/sdk v1.31.0 // indirect + go.opentelemetry.io/otel/sdk/metric v1.31.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/receiver/splunksearchapireceiver/go.sum b/receiver/splunksearchapireceiver/go.sum index a061e9529..5680b4a21 100644 --- a/receiver/splunksearchapireceiver/go.sum +++ b/receiver/splunksearchapireceiver/go.sum @@ -68,6 +68,8 @@ go.opentelemetry.io/collector/config/internal v0.113.0 h1:9RAzH8v7ItFT1npHpvP0Sv go.opentelemetry.io/collector/config/internal v0.113.0/go.mod h1:yC7E4h1Uj0SubxcFImh6OvBHFTjMh99+A5PuyIgDWqc= go.opentelemetry.io/collector/consumer v0.113.0 h1:KJSiK5vSIY9dgPxwKfQ3gOgKtQsqc+7IB7mGhUAL5c8= go.opentelemetry.io/collector/consumer v0.113.0/go.mod h1:zHMlXYFaJlZoLCBR6UwWoyXZ/adcO1u2ydqUal3VmYU= +go.opentelemetry.io/collector/consumer/consumererror v0.112.0 h1:dCqWEi3Yws5V5oGhCSOwxCHK6tYya5UzfzXmSLMHZ8E= +go.opentelemetry.io/collector/consumer/consumererror v0.112.0/go.mod h1:X9RJt5caDnwxoG++GhQHvlmDi2TMWEr6S/XRhZTSmOI= go.opentelemetry.io/collector/consumer/consumerprofiles v0.112.0 h1:ym+QxemlbWwfMSUto1hRTfcZeYbj2q8FpMzjk8O+X60= go.opentelemetry.io/collector/consumer/consumerprofiles v0.112.0/go.mod h1:4PjDUpURFh85R6NLEHrEf/uZjpk4LAYmmOrqu+iZsyE= go.opentelemetry.io/collector/consumer/consumertest v0.112.0 h1:pGvNH+H4rMygUOql6ynVQim6UFdimTiJ0HRfQL6v0GE= @@ -80,10 +82,14 @@ go.opentelemetry.io/collector/pdata v1.19.0 h1:jmnU5R8TOCbwRr4B8sjdRxM7L5WnEKlQW go.opentelemetry.io/collector/pdata v1.19.0/go.mod h1:Ox1YVLe87cZDB/TL30i4SUz1cA5s6AM6SpFMfY61ICs= go.opentelemetry.io/collector/pdata/pprofile v0.112.0 h1:t+LYorcMqZ3sDz5/jp3xU2l5lIhIXuIOOGO4Ef9CG2c= go.opentelemetry.io/collector/pdata/pprofile v0.112.0/go.mod h1:F2aTCoDzIaxEUK1g92LZvMwradySFMo3ZsAnBIpOdUg= +go.opentelemetry.io/collector/pdata/testdata v0.112.0 h1:7jJzNvRE+CpYrwHbAYwPiN9a/hqmVRlRADJNeDJTvYI= +go.opentelemetry.io/collector/pdata/testdata v0.112.0/go.mod h1:9kO148Qp12B93SSUE52s0QGGV8Nf9RFN2G/PnZx3l+w= go.opentelemetry.io/collector/pipeline v0.112.0 h1:jqKDdb8k53OLPibvxzX6fmMec0ZHAtqe4p2+cuHclEI= go.opentelemetry.io/collector/pipeline v0.112.0/go.mod h1:4vOvjVsoYTHVGTbfFwqfnQOSV2K3RKUHofh3jNRc2Mg= go.opentelemetry.io/collector/receiver v0.112.0 h1:gdTBDOPGKMZlZghtN5A7ZLNlNwCHWYcoJQeIiXvyGEQ= go.opentelemetry.io/collector/receiver v0.112.0/go.mod h1:3QmfSUiyFzRTnHUqF8fyEvQpU5q/xuwS43jGt8JXEEA= +go.opentelemetry.io/collector/receiver/receiverprofiles v0.112.0 h1:SShkZsWRsFss3iWZa9JwMC7h4gD5RbWDhUcz1/9dXSs= +go.opentelemetry.io/collector/receiver/receiverprofiles v0.112.0/go.mod h1:615smszDXiz4YWwXslxlAjX7FzOVDU7Bk6xARFk+zpk= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.56.0 h1:UP6IpuHFkUgOQL9FFQFrZ+5LiwhhYRbi7VZSIx6Nj5s= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.56.0/go.mod h1:qxuZLtbq5QDtdeSHsS7bcf6EH6uO6jUAgk764zd3rhM= go.opentelemetry.io/otel v1.31.0 h1:NsJcKPIW0D0H3NgzPDHmo0WW6SptzPdqg/L1zsIm2hY= From 1d638025f46696e0cdda2936636d79f1ef5f86f7 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Tue, 19 Nov 2024 15:12:02 -0500 Subject: [PATCH 18/55] client tests --- receiver/splunksearchapireceiver/client.go | 8 +- .../splunksearchapireceiver/client_test.go | 86 ++++++++++++++++--- 2 files changed, 80 insertions(+), 14 deletions(-) diff --git a/receiver/splunksearchapireceiver/client.go b/receiver/splunksearchapireceiver/client.go index 517244a1c..72a157ee5 100644 --- a/receiver/splunksearchapireceiver/client.go +++ b/receiver/splunksearchapireceiver/client.go @@ -79,12 +79,12 @@ func (c defaultSplunkSearchAPIClient) CreateSearchJob(search string) (CreateJobR var jobResponse CreateJobResponse body, err := io.ReadAll(resp.Body) if err != nil { - return CreateJobResponse{}, fmt.Errorf("failed to read search job status response: %v", err) + return CreateJobResponse{}, fmt.Errorf("failed to read search job create response: %v", err) } err = xml.Unmarshal(body, &jobResponse) if err != nil { - return CreateJobResponse{}, fmt.Errorf("failed to unmarshal search job response: %v", err) + return CreateJobResponse{}, fmt.Errorf("failed to unmarshal search job create response: %v", err) } return jobResponse, nil } @@ -115,7 +115,7 @@ func (c defaultSplunkSearchAPIClient) GetJobStatus(sid string) (JobStatusRespons var jobStatusResponse JobStatusResponse err = xml.Unmarshal(body, &jobStatusResponse) if err != nil { - return JobStatusResponse{}, fmt.Errorf("failed to unmarshal search job response: %v", err) + return JobStatusResponse{}, fmt.Errorf("failed to unmarshal search job status response: %v", err) } return jobStatusResponse, nil @@ -147,7 +147,7 @@ func (c defaultSplunkSearchAPIClient) GetSearchResults(sid string) (SearchResult // fmt.Println("Body: ", string(body)) err = json.Unmarshal(body, &searchResults) if err != nil { - return SearchResultsResponse{}, fmt.Errorf("failed to unmarshal search job results: %v", err) + return SearchResultsResponse{}, fmt.Errorf("failed to unmarshal search job results response: %v", err) } return searchResults, nil diff --git a/receiver/splunksearchapireceiver/client_test.go b/receiver/splunksearchapireceiver/client_test.go index 6e8a8353d..b0deb9396 100644 --- a/receiver/splunksearchapireceiver/client_test.go +++ b/receiver/splunksearchapireceiver/client_test.go @@ -15,8 +15,10 @@ package splunksearchapireceiver import ( + "io" "net/http" "net/http/httptest" + "strings" "testing" "github.com/stretchr/testify/require" @@ -31,10 +33,20 @@ var ( ) func TestCreateSearchJob(t *testing.T) { - // valid search resp, err := testClient.CreateSearchJob("index=otel") require.NoError(t, err) require.Equal(t, "123456", resp.SID) + + // returns an error if the response status isn't 201 + resp, err = testClient.CreateSearchJob("index=fail_to_create_job") + require.ErrorContains(t, err, "failed to create search job") + require.Empty(t, resp) + + // returns an error if the response body can't be unmarshalled + resp, err = testClient.CreateSearchJob("index=fail_to_unmarshal") + require.ErrorContains(t, err, "failed to unmarshal search job create response") + require.Empty(t, resp) + } func TestGetJobStatus(t *testing.T) { @@ -42,24 +54,58 @@ func TestGetJobStatus(t *testing.T) { require.NoError(t, err) require.Equal(t, "DONE", resp.Content.Dict.Keys[0].Value) require.Equal(t, "text/xml", resp.Content.Type) + + // returns an error if the response status isn't 200 + resp, err = testClient.GetJobStatus("654321") + require.ErrorContains(t, err, "failed to get search job status") + require.Empty(t, resp) + + // returns an error if the response body can't be unmarshalled + resp, err = testClient.GetJobStatus("098765") + require.ErrorContains(t, err, "failed to unmarshal search job status response") + require.Empty(t, resp) } func TestGetSearchResults(t *testing.T) { + resp, err := testClient.GetSearchResults("123456", 0, 5) + require.NoError(t, err) + require.Equal(t, 5, len(resp.Results)) + require.Equal(t, "Hello, world!", resp.Results[0].Raw) + + // returns an error if the response status isn't 200 + resp, err = testClient.GetSearchResults("654321", 0, 5) + require.ErrorContains(t, err, "failed to get search job results") + require.Empty(t, resp) + // returns an error if the response body can't be unmarshalled + resp, err = testClient.GetSearchResults("098765", 0, 5) + require.ErrorContains(t, err, "failed to unmarshal search job results response") + require.Empty(t, resp) } // mock Splunk servers func newMockSplunkServer() *httptest.Server { return httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { - switch req.URL.Path { + switch req.URL.String() { case "/services/search/jobs": - rw.Header().Set("Content-Type", "application/xml") - rw.WriteHeader(http.StatusCreated) - rw.Write([]byte(` - - 123456 - - `)) + body, _ := io.ReadAll(req.Body) + if strings.Contains(string(body), "index=otel") { + rw.Header().Set("Content-Type", "application/xml") + rw.WriteHeader(http.StatusCreated) + rw.Write([]byte(` + + 123456 + + `)) + } + if strings.Contains(string(body), "index=fail_to_create_job") { + rw.WriteHeader(http.StatusNotFound) + } + if strings.Contains(string(body), "index=fail_to_unmarshal") { + rw.WriteHeader(http.StatusCreated) + rw.Write([]byte(`invalid xml`)) + req.Body = &errorReader{} + } case "/services/search/v2/jobs/123456": rw.Header().Set("Content-Type", "application/xml") rw.WriteHeader(http.StatusOK) @@ -71,10 +117,20 @@ func newMockSplunkServer() *httptest.Server { `)) - case "/services/search/v2/jobs/123456/results": + case "/services/search/v2/jobs/654321": + rw.WriteHeader(http.StatusNotFound) + case "/services/search/v2/jobs/098765": + rw.WriteHeader(http.StatusOK) + rw.Write([]byte(`invalid xml`)) + case "/services/search/v2/jobs/123456/results?output_mode=json&offset=0&count=5": rw.Header().Set("Content-Type", "application/json") rw.WriteHeader(http.StatusOK) rw.Write(splunkEventsResultsP1) + case "/services/search/v2/jobs/654321/results?output_mode=json&offset=0&count=5": + rw.WriteHeader(http.StatusNotFound) + case "/services/search/v2/jobs/098765/results?output_mode=json&offset=0&count=5": + rw.WriteHeader(http.StatusOK) + rw.Write([]byte(`invalid json`)) default: rw.WriteHeader(http.StatusNotFound) } @@ -106,3 +162,13 @@ var splunkEventsResultsP1 = []byte(`{ } ] }`) + +type errorReader struct{} + +func (e *errorReader) Read(p []byte) (n int, err error) { + return 0, io.ErrUnexpectedEOF +} + +func (e *errorReader) Close() error { + return nil +} From 1de224f61b809bb1afafa4b95829fb6fa966dec6 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Mon, 11 Nov 2024 14:12:03 -0500 Subject: [PATCH 19/55] checkpoint methods --- receiver/splunksearchapireceiver/config.go | 3 +- receiver/splunksearchapireceiver/receiver.go | 76 ++++++++++++++++---- 2 files changed, 63 insertions(+), 16 deletions(-) diff --git a/receiver/splunksearchapireceiver/config.go b/receiver/splunksearchapireceiver/config.go index bb13533ab..f11631a55 100644 --- a/receiver/splunksearchapireceiver/config.go +++ b/receiver/splunksearchapireceiver/config.go @@ -20,6 +20,7 @@ import ( "strings" "time" + "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/config/confighttp" ) @@ -33,8 +34,8 @@ type Config struct { Username string `mapstructure:"splunk_username"` Password string `mapstructure:"splunk_password"` Searches []Search `mapstructure:"searches"` - EventBatchSize int `mapstructure:"event_batch_size"` JobPollInterval time.Duration `mapstructure:"job_poll_interval"` + StorageID *component.ID `mapstructure:"storage"` } // Search struct to represent a Splunk search diff --git a/receiver/splunksearchapireceiver/receiver.go b/receiver/splunksearchapireceiver/receiver.go index eec38b6cb..225985fcf 100644 --- a/receiver/splunksearchapireceiver/receiver.go +++ b/receiver/splunksearchapireceiver/receiver.go @@ -16,9 +16,13 @@ package splunksearchapireceiver import ( "context" + "encoding/json" "fmt" + "net/http" "time" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/adapter" + "go.etcd.io/etcd/proxy/grpcproxy/adapter" "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/consumer" "go.opentelemetry.io/collector/pdata/pcommon" @@ -26,6 +30,10 @@ import ( "go.uber.org/zap" ) +const ( + eventStorageKey = "last_event_offset" +) + var ( offset = 0 // offset for pagination and checkpointing exportedEvents = 0 // track the number of events returned by the results endpoint that are exported @@ -33,12 +41,18 @@ var ( ) type splunksearchapireceiver struct { - host component.Host - logger *zap.Logger - logsConsumer consumer.Logs - config *Config - settings component.TelemetrySettings - client splunkSearchAPIClient + host component.Host + logger *zap.Logger + logsConsumer consumer.Logs + config *Config + settings component.TelemetrySettings + client splunkSearchAPIClient + storageClient adapter.StorageClient + record *eventRecord +} + +type eventRecord struct { + Offset string `json:"offset"` } func (ssapir *splunksearchapireceiver) Start(ctx context.Context, host component.Host) error { @@ -48,6 +62,18 @@ func (ssapir *splunksearchapireceiver) Start(ctx context.Context, host component if err != nil { return err } + ssapir.client = client + + // create storage client + storageClient, err := adapter.GetStorageClient(ssapir.config.StorageID) + if err != nil { + return fmt.Errorf("failed to get storage client: %w", err) + } + ssapir.storageClient = storageClient + + // check if a checkpoint already exists + ssapir.loadCheckpoint(ctx) + go ssapir.runQueries(ctx) return nil } @@ -77,15 +103,11 @@ func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { time.Sleep(2 * time.Second) } - var resultCountTracker = 0 // track number of results exported - var offset = 0 // offset for pagination - var limitReached = false - for { - // fetch search results - results, err := ssapir.getSplunkSearchResults(ssapir.config, searchID, offset) - if err != nil { - ssapir.logger.Error("error fetching search results", zap.Error(err)) - } + // fetch search results + results, err := ssapir.getSplunkSearchResults(ssapir.config, searchID) + if err != nil { + ssapir.logger.Error("error fetching search results", zap.Error(err)) + } // parse time strings to time.Time earliestTime, err := time.Parse(time.RFC3339, search.EarliestTime) @@ -210,3 +232,27 @@ func (ssapir *splunksearchapireceiver) getSplunkSearchResults(sid string, offset } return resp, nil } + +func (ssapir *splunksearchapireceiver) checkpoint(ctx context.Context) error { + marshalBytes, err := json.Marshal(ssapir.record) + if err != nil { + return fmt.Errorf("failed to write checkpoint: %w", err) + } + return ssapir.storageClient.Set(ctx, eventStorageKey, marshalBytes) +} + +func (ssapir *splunksearchapireceiver) loadCheckpoint(ctx context.Context) { + marshalBytes, err := ssapir.storageClient.Get(ctx, eventStorageKey) + if err != nil { + ssapir.logger.Error("failed to read checkpoint", zap.Error(err)) + return + } + if marshalBytes == nil { + ssapir.logger.Info("no checkpoint found") + return + } + err = json.Unmarshal(marshalBytes, ssapir.record) + if err != nil { + ssapir.logger.Error("failed to unmarshal checkpoint", zap.Error(err)) + } +} From e2e48c636358d66d6179eb2a0d492503cbe20d6b Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Thu, 14 Nov 2024 15:59:05 -0500 Subject: [PATCH 20/55] WIP --- go.mod | 2 ++ go.sum | 4 ++++ receiver/splunksearchapireceiver/factory.go | 1 + receiver/splunksearchapireceiver/go.mod | 24 ++++++++++++++++++-- receiver/splunksearchapireceiver/receiver.go | 7 +++--- 5 files changed, 33 insertions(+), 5 deletions(-) diff --git a/go.mod b/go.mod index 82d9470eb..afc215ea8 100644 --- a/go.mod +++ b/go.mod @@ -312,6 +312,7 @@ require ( github.com/briandowns/spinner v1.23.0 // indirect github.com/cihub/seelog v0.0.0-20170130134532-f561c5e57575 // indirect github.com/containerd/cgroups/v3 v3.0.3 // indirect + github.com/coreos/etcd v3.3.27+incompatible // indirect github.com/coreos/go-oidc/v3 v3.11.0 // indirect github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect github.com/distribution/reference v0.6.0 // indirect @@ -402,6 +403,7 @@ require ( go.elastic.co/apm/module/apmzap/v2 v2.6.0 // indirect go.elastic.co/apm/v2 v2.6.0 // indirect go.elastic.co/fastjson v1.3.0 // indirect + go.etcd.io/etcd v3.3.27+incompatible // indirect go.opencensus.io v0.24.0 // indirect go.opentelemetry.io/collector v0.114.0 // indirect go.opentelemetry.io/collector/client v1.20.0 // indirect diff --git a/go.sum b/go.sum index 8722962d7..35f73b246 100644 --- a/go.sum +++ b/go.sum @@ -1133,6 +1133,8 @@ github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkE github.com/coreos/bbolt v1.3.3/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk= github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= github.com/coreos/etcd v3.3.13+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= +github.com/coreos/etcd v3.3.27+incompatible h1:QIudLb9KeBsE5zyYxd1mjzRSkzLg9Wf9QlRwFgd6oTA= +github.com/coreos/etcd v3.3.27+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= github.com/coreos/go-oidc/v3 v3.11.0 h1:Ia3MxdwpSw702YW0xgfmP1GVCMA9aEFWu12XUZ3/OtI= github.com/coreos/go-oidc/v3 v3.11.0/go.mod h1:gE3LgjOgFoHi9a4ce4/tJczr0Ai2/BoDhf0r5lltWI0= @@ -2619,6 +2621,8 @@ go.elastic.co/fastjson v1.3.0/go.mod h1:K9vDh7O0ODsVKV2B5e2XYLY277QZaCbB3tS1SnAR go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= go.etcd.io/bbolt v1.3.11 h1:yGEzV1wPz2yVCLsD8ZAiGHhHVlczyC9d1rP43/VCRJ0= go.etcd.io/bbolt v1.3.11/go.mod h1:dksAq7YMXoljX0xu6VF5DMZGbhYYoLUalEiSySYAS4I= +go.etcd.io/etcd v3.3.27+incompatible h1:5hMrpf6REqTHV2LW2OclNpRtxI0k9ZplMemJsMSWju0= +go.etcd.io/etcd v3.3.27+incompatible/go.mod h1:yaeTdrJi5lOmYerz05bd8+V7KubZs8YSFZfzsF9A6aI= go.etcd.io/etcd/api/v3 v3.5.4/go.mod h1:5GB2vv4A4AOn3yk7MftYGHkUfGtDHnEraIjym4dYz5A= go.etcd.io/etcd/client/pkg/v3 v3.5.4/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= go.etcd.io/etcd/client/v3 v3.5.4/go.mod h1:ZaRkVgBZC+L+dLCjTcF1hRXpgZXQPOvnA/Ak/gq3kiY= diff --git a/receiver/splunksearchapireceiver/factory.go b/receiver/splunksearchapireceiver/factory.go index dc61db414..7d571fd4e 100644 --- a/receiver/splunksearchapireceiver/factory.go +++ b/receiver/splunksearchapireceiver/factory.go @@ -45,6 +45,7 @@ func createLogsReceiver(_ context.Context, logger: params.Logger, logsConsumer: consumer, config: ssapirConfig, + id: params.ID, settings: params.TelemetrySettings, } return ssapir, nil diff --git a/receiver/splunksearchapireceiver/go.mod b/receiver/splunksearchapireceiver/go.mod index 36f3fb851..9c310a2c2 100644 --- a/receiver/splunksearchapireceiver/go.mod +++ b/receiver/splunksearchapireceiver/go.mod @@ -3,27 +3,47 @@ module github.com/open-telemetry/opentelemtry-collector-contrib/receiver/splunks go 1.22.5 require ( + github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza v0.113.0 github.com/stretchr/testify v1.9.0 + go.etcd.io/etcd v3.3.27+incompatible go.opentelemetry.io/collector/component v0.113.0 go.opentelemetry.io/collector/consumer v0.113.0 go.opentelemetry.io/collector/consumer/consumertest v0.112.0 go.opentelemetry.io/collector/pdata v1.19.0 - go.opentelemetry.io/collector/receiver v0.112.0 + go.opentelemetry.io/collector/receiver v0.113.0 go.uber.org/zap v1.27.0 ) require ( + github.com/cenkalti/backoff/v4 v4.3.0 // indirect + github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/coreos/etcd v3.3.27+incompatible // indirect github.com/davecgh/go-spew v1.1.1 // indirect + github.com/elastic/lunes v0.1.0 // indirect + github.com/expr-lang/expr v1.16.9 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect github.com/fsnotify/fsnotify v1.8.0 // indirect github.com/go-logr/logr v1.4.2 // indirect github.com/go-logr/stdr v1.2.2 // indirect + github.com/go-viper/mapstructure/v2 v2.2.1 // indirect + github.com/goccy/go-json v0.10.3 // indirect + github.com/golang/protobuf v1.5.0 // indirect github.com/golang/snappy v0.0.4 // indirect github.com/google/uuid v1.6.0 // indirect github.com/klauspost/compress v1.17.11 // indirect + github.com/knadh/koanf/maps v0.1.1 // indirect + github.com/knadh/koanf/providers/confmap v0.1.0 // indirect + github.com/knadh/koanf/v2 v2.1.1 // indirect + github.com/leodido/go-syslog/v4 v4.2.0 // indirect + github.com/leodido/ragel-machinery v0.0.0-20190525184631-5f46317e436b // indirect + github.com/magefile/mage v1.15.0 // indirect + github.com/mitchellh/copystructure v1.2.0 // indirect + github.com/mitchellh/reflectwalk v1.0.2 // indirect + github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.113.0 // indirect github.com/pierrec/lz4/v4 v4.1.21 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/rs/cors v1.11.1 // indirect + github.com/valyala/fastjson v1.6.4 // indirect go.opentelemetry.io/collector/client v1.19.0 // indirect go.opentelemetry.io/collector/config/configauth v0.113.0 // indirect go.opentelemetry.io/collector/config/configcompression v1.19.0 // indirect @@ -49,7 +69,7 @@ require ( github.com/modern-go/reflect2 v1.0.2 // indirect go.opentelemetry.io/collector/config/confighttp v0.113.0 go.opentelemetry.io/collector/config/configtelemetry v0.113.0 // indirect - go.opentelemetry.io/collector/pipeline v0.112.0 // indirect + go.opentelemetry.io/collector/pipeline v0.113.0 // indirect go.opentelemetry.io/otel v1.31.0 // indirect go.opentelemetry.io/otel/metric v1.31.0 // indirect go.opentelemetry.io/otel/trace v1.31.0 // indirect diff --git a/receiver/splunksearchapireceiver/receiver.go b/receiver/splunksearchapireceiver/receiver.go index 225985fcf..8e2330b58 100644 --- a/receiver/splunksearchapireceiver/receiver.go +++ b/receiver/splunksearchapireceiver/receiver.go @@ -22,9 +22,9 @@ import ( "time" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/adapter" - "go.etcd.io/etcd/proxy/grpcproxy/adapter" "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/consumer" + "go.opentelemetry.io/collector/extension/experimental/storage" "go.opentelemetry.io/collector/pdata/pcommon" "go.opentelemetry.io/collector/pdata/plog" "go.uber.org/zap" @@ -46,8 +46,9 @@ type splunksearchapireceiver struct { logsConsumer consumer.Logs config *Config settings component.TelemetrySettings + id component.ID client splunkSearchAPIClient - storageClient adapter.StorageClient + storageClient storage.Client record *eventRecord } @@ -65,7 +66,7 @@ func (ssapir *splunksearchapireceiver) Start(ctx context.Context, host component ssapir.client = client // create storage client - storageClient, err := adapter.GetStorageClient(ssapir.config.StorageID) + storageClient, err := adapter.GetStorageClient(ctx, host, ssapir.config.StorageID, ssapir.id) if err != nil { return fmt.Errorf("failed to get storage client: %w", err) } From 4aa0396e9cce84256d0abcabb0473aa10d0ebf2f Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Fri, 15 Nov 2024 11:40:08 -0500 Subject: [PATCH 21/55] functional checkpoint --- receiver/splunksearchapireceiver/config.go | 4 ++ receiver/splunksearchapireceiver/factory.go | 11 ++-- receiver/splunksearchapireceiver/model.go | 5 ++ receiver/splunksearchapireceiver/receiver.go | 59 ++++++++++++-------- 4 files changed, 51 insertions(+), 28 deletions(-) diff --git a/receiver/splunksearchapireceiver/config.go b/receiver/splunksearchapireceiver/config.go index f11631a55..f7c809ab7 100644 --- a/receiver/splunksearchapireceiver/config.go +++ b/receiver/splunksearchapireceiver/config.go @@ -62,6 +62,10 @@ func (cfg *Config) Validate() error { return errors.New("at least one search must be provided") } + if cfg.StorageID == nil { + return errors.New("storage configuration must be provided") + } + for _, search := range cfg.Searches { if search.Query == "" { return errors.New("missing query in search") diff --git a/receiver/splunksearchapireceiver/factory.go b/receiver/splunksearchapireceiver/factory.go index 7d571fd4e..0c04ff377 100644 --- a/receiver/splunksearchapireceiver/factory.go +++ b/receiver/splunksearchapireceiver/factory.go @@ -42,11 +42,12 @@ func createLogsReceiver(_ context.Context, ) (receiver.Logs, error) { ssapirConfig := cfg.(*Config) ssapir := &splunksearchapireceiver{ - logger: params.Logger, - logsConsumer: consumer, - config: ssapirConfig, - id: params.ID, - settings: params.TelemetrySettings, + logger: params.Logger, + logsConsumer: consumer, + config: ssapirConfig, + id: params.ID, + settings: params.TelemetrySettings, + checkpointRecord: &EventRecord{}, } return ssapir, nil } diff --git a/receiver/splunksearchapireceiver/model.go b/receiver/splunksearchapireceiver/model.go index 2d5a6dfec..6a2198eea 100644 --- a/receiver/splunksearchapireceiver/model.go +++ b/receiver/splunksearchapireceiver/model.go @@ -58,3 +58,8 @@ type SearchResults struct { Time string `json:"_time"` } `json:"results"` } + +// EventRecord struct stores the offset of the last event exported successfully +type EventRecord struct { + Offset int `json:"offset"` +} diff --git a/receiver/splunksearchapireceiver/receiver.go b/receiver/splunksearchapireceiver/receiver.go index 8e2330b58..e373f475a 100644 --- a/receiver/splunksearchapireceiver/receiver.go +++ b/receiver/splunksearchapireceiver/receiver.go @@ -41,19 +41,15 @@ var ( ) type splunksearchapireceiver struct { - host component.Host - logger *zap.Logger - logsConsumer consumer.Logs - config *Config - settings component.TelemetrySettings - id component.ID - client splunkSearchAPIClient - storageClient storage.Client - record *eventRecord -} - -type eventRecord struct { - Offset string `json:"offset"` + host component.Host + logger *zap.Logger + logsConsumer consumer.Logs + config *Config + settings component.TelemetrySettings + id component.ID + client splunkSearchAPIClient + storageClient storage.Client + checkpointRecord *EventRecord } func (ssapir *splunksearchapireceiver) Start(ctx context.Context, host component.Host) error { @@ -72,15 +68,26 @@ func (ssapir *splunksearchapireceiver) Start(ctx context.Context, host component } ssapir.storageClient = storageClient - // check if a checkpoint already exists + // if a checkpoint already exists, use the offset from the checkpoint ssapir.loadCheckpoint(ctx) + if ssapir.checkpointRecord.Offset != 0 { + ssapir.logger.Info("found offset checkpoint in storage extension", zap.Int("offset", ssapir.checkpointRecord.Offset)) + offset = ssapir.checkpointRecord.Offset + } go ssapir.runQueries(ctx) return nil } -func (ssapir *splunksearchapireceiver) Shutdown(_ context.Context) error { - return nil +func (ssapir *splunksearchapireceiver) Shutdown(ctx context.Context) error { + ssapir.logger.Info("shutting down logs receiver") + + err := ssapir.checkpoint(ctx) + if err != nil { + ssapir.logger.Error("failed checkpoint", zap.Error(err)) + } + + return ssapir.storageClient.Close(ctx) } func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { @@ -161,18 +168,25 @@ func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { // Error from down the pipeline, freak out ssapir.logger.Error("error consuming logs", zap.Error(err)) } + // last batch of logs has been successfully exported + exportedEvents += logs.ResourceLogs().Len() + offset += len(results.Results) + + // update checkpoint + ssapir.checkpointRecord.Offset = offset + err = ssapir.checkpoint(ctx) + if err != nil { + ssapir.logger.Error("error writing checkpoint", zap.Error(err)) + } if limitReached { ssapir.logger.Info("limit reached, stopping search result export") - exportedEvents += logs.ResourceLogs().Len() break } // if the number of results is less than the results per request, we have queried all pages for the search if len(results.Results) < search.EventBatchSize { - exportedEvents += len(results.Results) break } - exportedEvents += logs.ResourceLogs().Len() - offset += len(results.Results) + } ssapir.logger.Info("search results exported", zap.String("query", search.Query), zap.Int("total results", exportedEvents)) } @@ -235,7 +249,7 @@ func (ssapir *splunksearchapireceiver) getSplunkSearchResults(sid string, offset } func (ssapir *splunksearchapireceiver) checkpoint(ctx context.Context) error { - marshalBytes, err := json.Marshal(ssapir.record) + marshalBytes, err := json.Marshal(ssapir.checkpointRecord) if err != nil { return fmt.Errorf("failed to write checkpoint: %w", err) } @@ -252,8 +266,7 @@ func (ssapir *splunksearchapireceiver) loadCheckpoint(ctx context.Context) { ssapir.logger.Info("no checkpoint found") return } - err = json.Unmarshal(marshalBytes, ssapir.record) - if err != nil { + if err = json.Unmarshal(marshalBytes, ssapir.checkpointRecord); err != nil { ssapir.logger.Error("failed to unmarshal checkpoint", zap.Error(err)) } } From cb84d762e62be098272820c61560135438cf088a Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Fri, 15 Nov 2024 14:26:50 -0500 Subject: [PATCH 22/55] debug logs, rm print --- receiver/splunksearchapireceiver/receiver.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/receiver/splunksearchapireceiver/receiver.go b/receiver/splunksearchapireceiver/receiver.go index e373f475a..3f8e1caef 100644 --- a/receiver/splunksearchapireceiver/receiver.go +++ b/receiver/splunksearchapireceiver/receiver.go @@ -80,7 +80,7 @@ func (ssapir *splunksearchapireceiver) Start(ctx context.Context, host component } func (ssapir *splunksearchapireceiver) Shutdown(ctx context.Context) error { - ssapir.logger.Info("shutting down logs receiver") + ssapir.logger.Debug("shutting down logs receiver") err := ssapir.checkpoint(ctx) if err != nil { @@ -267,6 +267,6 @@ func (ssapir *splunksearchapireceiver) loadCheckpoint(ctx context.Context) { return } if err = json.Unmarshal(marshalBytes, ssapir.checkpointRecord); err != nil { - ssapir.logger.Error("failed to unmarshal checkpoint", zap.Error(err)) + ssapir.logger.Fatal("failed to unmarshal checkpoint", zap.Error(err)) } } From c3dd5cd06f184cb12d21eb076ad362e08d968a2a Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Fri, 15 Nov 2024 16:18:05 -0500 Subject: [PATCH 23/55] loadCheckpoint return error --- receiver/splunksearchapireceiver/receiver.go | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/receiver/splunksearchapireceiver/receiver.go b/receiver/splunksearchapireceiver/receiver.go index 3f8e1caef..3b8d6ca96 100644 --- a/receiver/splunksearchapireceiver/receiver.go +++ b/receiver/splunksearchapireceiver/receiver.go @@ -69,7 +69,9 @@ func (ssapir *splunksearchapireceiver) Start(ctx context.Context, host component ssapir.storageClient = storageClient // if a checkpoint already exists, use the offset from the checkpoint - ssapir.loadCheckpoint(ctx) + if err = ssapir.loadCheckpoint(ctx); err != nil { + return fmt.Errorf("failed to load checkpoint: %w", err) + } if ssapir.checkpointRecord.Offset != 0 { ssapir.logger.Info("found offset checkpoint in storage extension", zap.Int("offset", ssapir.checkpointRecord.Offset)) offset = ssapir.checkpointRecord.Offset @@ -256,17 +258,14 @@ func (ssapir *splunksearchapireceiver) checkpoint(ctx context.Context) error { return ssapir.storageClient.Set(ctx, eventStorageKey, marshalBytes) } -func (ssapir *splunksearchapireceiver) loadCheckpoint(ctx context.Context) { +func (ssapir *splunksearchapireceiver) loadCheckpoint(ctx context.Context) error { marshalBytes, err := ssapir.storageClient.Get(ctx, eventStorageKey) if err != nil { - ssapir.logger.Error("failed to read checkpoint", zap.Error(err)) - return + return err } if marshalBytes == nil { ssapir.logger.Info("no checkpoint found") - return - } - if err = json.Unmarshal(marshalBytes, ssapir.checkpointRecord); err != nil { - ssapir.logger.Fatal("failed to unmarshal checkpoint", zap.Error(err)) + return nil } + return json.Unmarshal(marshalBytes, ssapir.checkpointRecord) } From b37138b31a2ba2e42b92ab0b037617cca98e65a9 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Mon, 18 Nov 2024 15:27:21 -0500 Subject: [PATCH 24/55] splunk failure test --- receiver/splunksearchapireceiver/config.go | 2 +- receiver/splunksearchapireceiver/go.mod | 3 - .../integration_test.go | 123 ++++++++++++++++++ receiver/splunksearchapireceiver/model.go | 3 +- receiver/splunksearchapireceiver/receiver.go | 44 +++++-- 5 files changed, 161 insertions(+), 14 deletions(-) create mode 100644 receiver/splunksearchapireceiver/integration_test.go diff --git a/receiver/splunksearchapireceiver/config.go b/receiver/splunksearchapireceiver/config.go index f7c809ab7..04e2cd039 100644 --- a/receiver/splunksearchapireceiver/config.go +++ b/receiver/splunksearchapireceiver/config.go @@ -63,7 +63,7 @@ func (cfg *Config) Validate() error { } if cfg.StorageID == nil { - return errors.New("storage configuration must be provided") + return errors.New("storage configuration is required for this receiver") } for _, search := range cfg.Searches { diff --git a/receiver/splunksearchapireceiver/go.mod b/receiver/splunksearchapireceiver/go.mod index 9c310a2c2..73417f9db 100644 --- a/receiver/splunksearchapireceiver/go.mod +++ b/receiver/splunksearchapireceiver/go.mod @@ -5,7 +5,6 @@ go 1.22.5 require ( github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza v0.113.0 github.com/stretchr/testify v1.9.0 - go.etcd.io/etcd v3.3.27+incompatible go.opentelemetry.io/collector/component v0.113.0 go.opentelemetry.io/collector/consumer v0.113.0 go.opentelemetry.io/collector/consumer/consumertest v0.112.0 @@ -17,7 +16,6 @@ require ( require ( github.com/cenkalti/backoff/v4 v4.3.0 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect - github.com/coreos/etcd v3.3.27+incompatible // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/elastic/lunes v0.1.0 // indirect github.com/expr-lang/expr v1.16.9 // indirect @@ -27,7 +25,6 @@ require ( github.com/go-logr/stdr v1.2.2 // indirect github.com/go-viper/mapstructure/v2 v2.2.1 // indirect github.com/goccy/go-json v0.10.3 // indirect - github.com/golang/protobuf v1.5.0 // indirect github.com/golang/snappy v0.0.4 // indirect github.com/google/uuid v1.6.0 // indirect github.com/klauspost/compress v1.17.11 // indirect diff --git a/receiver/splunksearchapireceiver/integration_test.go b/receiver/splunksearchapireceiver/integration_test.go new file mode 100644 index 000000000..b765f34d2 --- /dev/null +++ b/receiver/splunksearchapireceiver/integration_test.go @@ -0,0 +1,123 @@ +// Copyright observIQ, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package splunksearchapireceiver + +import ( + "context" + "net/http" + "net/http/httptest" + "testing" + + "github.com/stretchr/testify/require" + "go.opentelemetry.io/collector/component" + "go.opentelemetry.io/collector/component/componenttest" + "go.opentelemetry.io/collector/consumer/consumertest" + "go.opentelemetry.io/collector/extension/experimental/storage" + "go.uber.org/zap" +) + +// Test the case where some data is exported, but a subsequent call for paginated data fails +func TestSplunkResultsPaginationFailure(t *testing.T) { + factory := NewFactory() + cfg := factory.CreateDefaultConfig().(*Config) + cfg.Searches = []Search{ + { + Query: "search index=otel", + EarliestTime: "2024-11-14T00:00:00.000Z", + LatestTime: "2024-11-14T23:59:59.000Z", + EventBatchSize: 5, + }, + } + var callCount int = 0 + server := newMockSplunkServer(&callCount) + defer server.Close() + settings := componenttest.NewNopTelemetrySettings() + ssapir := newSSAPIReceiver(zap.NewNop(), cfg, settings, component.NewID(typeStr)) + ssapir.client, _ = newSplunkSearchAPIClient(context.Background(), settings, *cfg, componenttest.NewNopHost()) + ssapir.client.(*defaultSplunkSearchAPIClient).client = server.Client() + ssapir.client.(*defaultSplunkSearchAPIClient).endpoint = server.URL + ssapir.logsConsumer = &consumertest.LogsSink{} + + ssapir.storageClient = storage.NewNopClient() + + ssapir.initCheckpoint(context.Background()) + ssapir.runQueries(context.Background()) + require.Equal(t, 5, ssapir.checkpointRecord.Offset) + require.Equal(t, 1, callCount) +} + +func newMockSplunkServer(callCount *int) *httptest.Server { + return httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { + if req.URL.String() == "/services/search/jobs" { + rw.Header().Set("Content-Type", "application/xml") + rw.WriteHeader(201) + rw.Write([]byte(` + + 123456 + + `)) + } + if req.URL.String() == "/services/search/v2/jobs/123456" { + rw.Header().Set("Content-Type", "application/xml") + rw.WriteHeader(200) + rw.Write([]byte(` + + + DISPATCH + + DONE + + + `)) + } + if req.URL.String() == "/services/search/v2/jobs/123456/results?output_mode=json&offset=0&count=5" && req.URL.Query().Get("offset") == "0" { + rw.Header().Set("Content-Type", "application/json") + rw.WriteHeader(200) + rw.Write(splunkEventsResultsP1) + *callCount++ + } + if req.URL.String() == "/services/search/v2/jobs/123456/results?output_mode=json&offset=5&count=5" && req.URL.Query().Get("offset") == "5" { + rw.Header().Set("Content-Type", "application/json") + rw.WriteHeader(400) + rw.Write([]byte("error, bad request")) + } + })) +} + +var splunkEventsResultsP1 = []byte(`{ + "init_offset": 0, + "results": [ + { + "_raw": "Hello, world!", + "_time": "2024-11-14T13:02:31.000-05:00" + }, + { + "_raw": "Goodbye, world!", + "_time": "2024-11-14T13:02:30.000-05:00" + }, + { + "_raw": "lorem ipsum", + "_time": "2024-11-14T13:02:29.000-05:00" + }, + { + "_raw": "dolor sit amet", + "_time": "2024-11-14T13:02:28.000-05:00" + }, + { + "_raw": "consectetur adipiscing elit", + "_time": "2024-11-14T13:02:27.000-05:00" + } + ] +}`) diff --git a/receiver/splunksearchapireceiver/model.go b/receiver/splunksearchapireceiver/model.go index 6a2198eea..100fc4e8e 100644 --- a/receiver/splunksearchapireceiver/model.go +++ b/receiver/splunksearchapireceiver/model.go @@ -61,5 +61,6 @@ type SearchResults struct { // EventRecord struct stores the offset of the last event exported successfully type EventRecord struct { - Offset int `json:"offset"` + Offset int `json:"offset"` + Search string `json:"search"` } diff --git a/receiver/splunksearchapireceiver/receiver.go b/receiver/splunksearchapireceiver/receiver.go index 3b8d6ca96..647844fe4 100644 --- a/receiver/splunksearchapireceiver/receiver.go +++ b/receiver/splunksearchapireceiver/receiver.go @@ -52,6 +52,21 @@ type splunksearchapireceiver struct { checkpointRecord *EventRecord } +func newSSAPIReceiver( + logger *zap.Logger, + config *Config, + settings component.TelemetrySettings, + id component.ID, +) *splunksearchapireceiver { + return &splunksearchapireceiver{ + logger: logger, + config: config, + settings: settings, + id: id, + checkpointRecord: &EventRecord{}, + } +} + func (ssapir *splunksearchapireceiver) Start(ctx context.Context, host component.Host) error { ssapir.host = host var err error @@ -68,15 +83,7 @@ func (ssapir *splunksearchapireceiver) Start(ctx context.Context, host component } ssapir.storageClient = storageClient - // if a checkpoint already exists, use the offset from the checkpoint - if err = ssapir.loadCheckpoint(ctx); err != nil { - return fmt.Errorf("failed to load checkpoint: %w", err) - } - if ssapir.checkpointRecord.Offset != 0 { - ssapir.logger.Info("found offset checkpoint in storage extension", zap.Int("offset", ssapir.checkpointRecord.Offset)) - offset = ssapir.checkpointRecord.Offset - } - + ssapir.initCheckpoint(ctx) go ssapir.runQueries(ctx) return nil } @@ -250,6 +257,25 @@ func (ssapir *splunksearchapireceiver) getSplunkSearchResults(sid string, offset return resp, nil } +func (ssapir *splunksearchapireceiver) initCheckpoint(ctx context.Context) error { + // if a checkpoint already exists, use the offset from the checkpoint + if err := ssapir.loadCheckpoint(ctx); err != nil { + return fmt.Errorf("failed to load checkpoint: %w", err) + } + if ssapir.checkpointRecord.Offset != 0 { + // check if the search query in the checkpoint record matches any of the search queries in the config + for idx, search := range ssapir.config.Searches { + if search.Query == ssapir.checkpointRecord.Search { + ssapir.logger.Info("found offset checkpoint in storage extension", zap.Int("offset", ssapir.checkpointRecord.Offset), zap.String("search", ssapir.checkpointRecord.Search)) + // skip searches that have already been processed, use the offset from the checkpoint + ssapir.config.Searches = ssapir.config.Searches[idx:] + offset = ssapir.checkpointRecord.Offset + } + } + } + return nil +} + func (ssapir *splunksearchapireceiver) checkpoint(ctx context.Context) error { marshalBytes, err := json.Marshal(ssapir.checkpointRecord) if err != nil { From 61c87a7e406818a444ee74b5e5f831d9a9f7bbf3 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Thu, 14 Nov 2024 13:53:19 -0500 Subject: [PATCH 25/55] WIP --- receiver/splunksearchapireceiver/config.go | 3 +-- receiver/splunksearchapireceiver/config_test.go | 15 +++++++++++++++ receiver/splunksearchapireceiver/factory.go | 2 +- receiver/splunksearchapireceiver/receiver.go | 10 ++++++---- 4 files changed, 23 insertions(+), 7 deletions(-) diff --git a/receiver/splunksearchapireceiver/config.go b/receiver/splunksearchapireceiver/config.go index 04e2cd039..41825e346 100644 --- a/receiver/splunksearchapireceiver/config.go +++ b/receiver/splunksearchapireceiver/config.go @@ -16,7 +16,6 @@ package splunksearchapireceiver import ( "errors" - "fmt" "strings" "time" @@ -81,7 +80,7 @@ func (cfg *Config) Validate() error { } if strings.Contains(search.Query, "earliest=") || strings.Contains(search.Query, "latest=") { - return fmt.Errorf("time query parameters must be configured using only the \"earliest_time\" and \"latest_time\" configuration parameters") + return errors.New("time query parameters must be configured using only the 'earliest_time' and 'latest_time' configuration parameters") } if search.EarliestTime == "" { diff --git a/receiver/splunksearchapireceiver/config_test.go b/receiver/splunksearchapireceiver/config_test.go index 9e0741fa8..905b52e4d 100644 --- a/receiver/splunksearchapireceiver/config_test.go +++ b/receiver/splunksearchapireceiver/config_test.go @@ -215,6 +215,21 @@ func TestValidate(t *testing.T) { }, errExpected: false, }, + { + desc: "Query with ealiest and latest time", + endpoint: "http://localhost:8089", + username: "user", + password: "password", + searches: []Search{ + { + Query: "search index=_internal earliest=2024-10-30T04:00:00.000Z latest=2024-10-30T14:00:00.000Z", + EarliestTime: "2024-10-30T04:00:00.000Z", + LatestTime: "2024-10-30T14:00:00.000Z", + }, + }, + errExpected: true, + errText: "time query parameters must be configured using only the 'earliest_time' and 'latest_time' configuration parameters", + }, } for _, tc := range testCases { t.Run(tc.desc, func(t *testing.T) { diff --git a/receiver/splunksearchapireceiver/factory.go b/receiver/splunksearchapireceiver/factory.go index 0c04ff377..9fa2edfb0 100644 --- a/receiver/splunksearchapireceiver/factory.go +++ b/receiver/splunksearchapireceiver/factory.go @@ -31,7 +31,7 @@ var ( func createDefaultConfig() component.Config { return &Config{ ClientConfig: confighttp.NewDefaultClientConfig(), - JobPollInterval: 10 * time.Second, + JobPollInterval: 5 * time.Second, } } diff --git a/receiver/splunksearchapireceiver/receiver.go b/receiver/splunksearchapireceiver/receiver.go index 647844fe4..f79e0582e 100644 --- a/receiver/splunksearchapireceiver/receiver.go +++ b/receiver/splunksearchapireceiver/receiver.go @@ -102,8 +102,7 @@ func (ssapir *splunksearchapireceiver) Shutdown(ctx context.Context) error { func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { for _, search := range ssapir.config.Searches { // create search in Splunk - ssapir.logger.Info("creating search", zap.String("query", search.Query)) - searchID, err := ssapir.createSplunkSearch(search.Query) + searchID, err := ssapir.createSplunkSearch(search) if err != nil { ssapir.logger.Error("error creating search", zap.Error(err)) } @@ -224,8 +223,11 @@ func (ssapir *splunksearchapireceiver) pollSearchCompletion(ctx context.Context, } } -func (ssapir *splunksearchapireceiver) createSplunkSearch(search string) (string, error) { - resp, err := ssapir.createSearchJob(ssapir.config, search) +func (ssapir *splunksearchapireceiver) createSplunkSearch(search Search) (string, error) { + timeFormat := "%Y-%m-%dT%H:%M:%S" + searchQuery := fmt.Sprintf("%s starttime=\"%s\" endtime=\"%s\" timeformat=\"%s\"", search.Query, search.EarliestTime, search.LatestTime, timeFormat) + ssapir.logger.Info("creating search", zap.String("query", searchQuery)) + resp, err := ssapir.client.CreateSearchJob(searchQuery) if err != nil { return "", err } From 8b6a212994011e9d5f55e92f648f2a42d02cad1c Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Thu, 14 Nov 2024 14:39:44 -0500 Subject: [PATCH 26/55] encode req body --- receiver/splunksearchapireceiver/client.go | 3 ++- receiver/splunksearchapireceiver/config.go | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/receiver/splunksearchapireceiver/client.go b/receiver/splunksearchapireceiver/client.go index 72a157ee5..c54519f6b 100644 --- a/receiver/splunksearchapireceiver/client.go +++ b/receiver/splunksearchapireceiver/client.go @@ -23,6 +23,7 @@ import ( "fmt" "io" "net/http" + "net/url" "go.opentelemetry.io/collector/component" "go.uber.org/zap" @@ -59,7 +60,7 @@ func newSplunkSearchAPIClient(ctx context.Context, settings component.TelemetryS func (c defaultSplunkSearchAPIClient) CreateSearchJob(search string) (CreateJobResponse, error) { endpoint := fmt.Sprintf("%s/services/search/jobs", c.endpoint) - reqBody := fmt.Sprintf(`search=%s`, search) + reqBody := fmt.Sprintf(`search=%s`, url.QueryEscape(search)) req, err := http.NewRequest("POST", endpoint, bytes.NewBuffer([]byte(reqBody))) if err != nil { return CreateJobResponse{}, err diff --git a/receiver/splunksearchapireceiver/config.go b/receiver/splunksearchapireceiver/config.go index 41825e346..512b43dc3 100644 --- a/receiver/splunksearchapireceiver/config.go +++ b/receiver/splunksearchapireceiver/config.go @@ -70,7 +70,7 @@ func (cfg *Config) Validate() error { return errors.New("missing query in search") } - // query implicitly starts with "search" command + // query must start with "search" command if !strings.HasPrefix(search.Query, "search ") { return errNonStandaloneSearchQuery } From 1cb7a4d243bfc6157fdc06b7c0011bbefc53b4b0 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Thu, 14 Nov 2024 16:03:37 -0500 Subject: [PATCH 27/55] stricter query validation --- receiver/splunksearchapireceiver/config.go | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/receiver/splunksearchapireceiver/config.go b/receiver/splunksearchapireceiver/config.go index 512b43dc3..345cd6ab9 100644 --- a/receiver/splunksearchapireceiver/config.go +++ b/receiver/splunksearchapireceiver/config.go @@ -79,7 +79,12 @@ func (cfg *Config) Validate() error { return errNonStandaloneSearchQuery } - if strings.Contains(search.Query, "earliest=") || strings.Contains(search.Query, "latest=") { + // ensure user query does not include time parameters + if strings.Contains(search.Query, "earliest=") || + strings.Contains(search.Query, "latest=") || + strings.Contains(search.Query, "starttime=") || + strings.Contains(search.Query, "endtime=") || + strings.Contains(search.Query, "timeformat=") { return errors.New("time query parameters must be configured using only the 'earliest_time' and 'latest_time' configuration parameters") } From 90b47e43cde4e3787acf951ad97f79230723c7b0 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Mon, 18 Nov 2024 16:20:40 -0500 Subject: [PATCH 28/55] storage config test --- .../splunksearchapireceiver/config_test.go | 36 ++++++++++++++++++- receiver/splunksearchapireceiver/go.mod | 3 +- 2 files changed, 37 insertions(+), 2 deletions(-) diff --git a/receiver/splunksearchapireceiver/config_test.go b/receiver/splunksearchapireceiver/config_test.go index 905b52e4d..90440234c 100644 --- a/receiver/splunksearchapireceiver/config_test.go +++ b/receiver/splunksearchapireceiver/config_test.go @@ -18,6 +18,7 @@ import ( "testing" "github.com/stretchr/testify/require" + "go.opentelemetry.io/collector/component" ) func TestValidate(t *testing.T) { @@ -26,6 +27,7 @@ func TestValidate(t *testing.T) { endpoint string username string password string + storage string searches []Search errExpected bool errText string @@ -34,6 +36,7 @@ func TestValidate(t *testing.T) { desc: "Missing endpoint", username: "user", password: "password", + storage: "file_storage", searches: []Search{ { Query: "search index=_internal", @@ -48,6 +51,7 @@ func TestValidate(t *testing.T) { desc: "Missing username", endpoint: "http://localhost:8089", password: "password", + storage: "file_storage", searches: []Search{ { Query: "search index=_internal", @@ -62,6 +66,7 @@ func TestValidate(t *testing.T) { desc: "Missing password", endpoint: "http://localhost:8089", username: "user", + storage: "file_storage", searches: []Search{ { Query: "search index=_internal", @@ -72,11 +77,27 @@ func TestValidate(t *testing.T) { errExpected: true, errText: "missing Splunk password", }, + { + desc: "Missing storage", + endpoint: "http://localhost:8089", + username: "user", + password: "password", + searches: []Search{ + { + Query: "search index=_internal", + EarliestTime: "2024-10-30T04:00:00.000Z", + LatestTime: "2024-10-30T14:00:00.000Z", + }, + }, + errExpected: true, + errText: "storage configuration is required for this receiver", + }, { desc: "Missing searches", endpoint: "http://localhost:8089", username: "user", password: "password", + storage: "file_storage", errExpected: true, errText: "at least one search must be provided", }, @@ -85,6 +106,7 @@ func TestValidate(t *testing.T) { endpoint: "http://localhost:8089", username: "user", password: "password", + storage: "file_storage", searches: []Search{ { EarliestTime: "2024-10-30T04:00:00.000Z", @@ -99,6 +121,7 @@ func TestValidate(t *testing.T) { endpoint: "http://localhost:8089", username: "user", password: "password", + storage: "file_storage", searches: []Search{ { Query: "search index=_internal", @@ -113,6 +136,7 @@ func TestValidate(t *testing.T) { endpoint: "http://localhost:8089", username: "user", password: "password", + storage: "file_storage", searches: []Search{ { Query: "search index=_internal", @@ -128,6 +152,7 @@ func TestValidate(t *testing.T) { endpoint: "http://localhost:8089", username: "user", password: "password", + storage: "file_storage", searches: []Search{ { Query: "search index=_internal", @@ -142,6 +167,7 @@ func TestValidate(t *testing.T) { endpoint: "http://localhost:8089", username: "user", password: "password", + storage: "file_storage", searches: []Search{ { Query: "search index=_internal", @@ -157,6 +183,7 @@ func TestValidate(t *testing.T) { endpoint: "http://localhost:8089", username: "user", password: "password", + storage: "file_storage", searches: []Search{ { Query: "search index=_internal | stats count by sourcetype", @@ -172,6 +199,7 @@ func TestValidate(t *testing.T) { endpoint: "http://localhost:8089", username: "user", password: "password", + storage: "file_storage", searches: []Search{ { Query: "search index=_internal", @@ -186,6 +214,7 @@ func TestValidate(t *testing.T) { endpoint: "http://localhost:8089", username: "user", password: "password", + storage: "file_storage", searches: []Search{ { Query: "search index=_internal", @@ -205,6 +234,7 @@ func TestValidate(t *testing.T) { endpoint: "http://localhost:8089", username: "user", password: "password", + storage: "file_storage", searches: []Search{ { Query: "search index=_internal", @@ -216,10 +246,11 @@ func TestValidate(t *testing.T) { errExpected: false, }, { - desc: "Query with ealiest and latest time", + desc: "Query with earliest and latest time", endpoint: "http://localhost:8089", username: "user", password: "password", + storage: "file_storage", searches: []Search{ { Query: "search index=_internal earliest=2024-10-30T04:00:00.000Z latest=2024-10-30T14:00:00.000Z", @@ -238,6 +269,9 @@ func TestValidate(t *testing.T) { cfg.Username = tc.username cfg.Password = tc.password cfg.Searches = tc.searches + if tc.storage != "" { + cfg.StorageID = &component.ID{} + } err := cfg.Validate() if tc.errExpected { require.EqualError(t, err, tc.errText) diff --git a/receiver/splunksearchapireceiver/go.mod b/receiver/splunksearchapireceiver/go.mod index 73417f9db..105ca1a2b 100644 --- a/receiver/splunksearchapireceiver/go.mod +++ b/receiver/splunksearchapireceiver/go.mod @@ -6,6 +6,7 @@ require ( github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza v0.113.0 github.com/stretchr/testify v1.9.0 go.opentelemetry.io/collector/component v0.113.0 + go.opentelemetry.io/collector/confmap v1.20.0 go.opentelemetry.io/collector/consumer v0.113.0 go.opentelemetry.io/collector/consumer/consumertest v0.112.0 go.opentelemetry.io/collector/pdata v1.19.0 @@ -30,7 +31,7 @@ require ( github.com/klauspost/compress v1.17.11 // indirect github.com/knadh/koanf/maps v0.1.1 // indirect github.com/knadh/koanf/providers/confmap v0.1.0 // indirect - github.com/knadh/koanf/v2 v2.1.1 // indirect + github.com/knadh/koanf/v2 v2.1.2 // indirect github.com/leodido/go-syslog/v4 v4.2.0 // indirect github.com/leodido/ragel-machinery v0.0.0-20190525184631-5f46317e436b // indirect github.com/magefile/mage v1.15.0 // indirect From c7f2d7f4a31093fd1a09f0f692bca5a0911ba782 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Mon, 18 Nov 2024 16:30:40 -0500 Subject: [PATCH 29/55] lint, tidy --- go.mod | 2 -- go.sum | 4 ---- receiver/splunksearchapireceiver/integration_test.go | 2 +- receiver/splunksearchapireceiver/receiver.go | 5 ++++- 4 files changed, 5 insertions(+), 8 deletions(-) diff --git a/go.mod b/go.mod index afc215ea8..82d9470eb 100644 --- a/go.mod +++ b/go.mod @@ -312,7 +312,6 @@ require ( github.com/briandowns/spinner v1.23.0 // indirect github.com/cihub/seelog v0.0.0-20170130134532-f561c5e57575 // indirect github.com/containerd/cgroups/v3 v3.0.3 // indirect - github.com/coreos/etcd v3.3.27+incompatible // indirect github.com/coreos/go-oidc/v3 v3.11.0 // indirect github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect github.com/distribution/reference v0.6.0 // indirect @@ -403,7 +402,6 @@ require ( go.elastic.co/apm/module/apmzap/v2 v2.6.0 // indirect go.elastic.co/apm/v2 v2.6.0 // indirect go.elastic.co/fastjson v1.3.0 // indirect - go.etcd.io/etcd v3.3.27+incompatible // indirect go.opencensus.io v0.24.0 // indirect go.opentelemetry.io/collector v0.114.0 // indirect go.opentelemetry.io/collector/client v1.20.0 // indirect diff --git a/go.sum b/go.sum index 35f73b246..8722962d7 100644 --- a/go.sum +++ b/go.sum @@ -1133,8 +1133,6 @@ github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkE github.com/coreos/bbolt v1.3.3/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk= github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= github.com/coreos/etcd v3.3.13+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= -github.com/coreos/etcd v3.3.27+incompatible h1:QIudLb9KeBsE5zyYxd1mjzRSkzLg9Wf9QlRwFgd6oTA= -github.com/coreos/etcd v3.3.27+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= github.com/coreos/go-oidc/v3 v3.11.0 h1:Ia3MxdwpSw702YW0xgfmP1GVCMA9aEFWu12XUZ3/OtI= github.com/coreos/go-oidc/v3 v3.11.0/go.mod h1:gE3LgjOgFoHi9a4ce4/tJczr0Ai2/BoDhf0r5lltWI0= @@ -2621,8 +2619,6 @@ go.elastic.co/fastjson v1.3.0/go.mod h1:K9vDh7O0ODsVKV2B5e2XYLY277QZaCbB3tS1SnAR go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= go.etcd.io/bbolt v1.3.11 h1:yGEzV1wPz2yVCLsD8ZAiGHhHVlczyC9d1rP43/VCRJ0= go.etcd.io/bbolt v1.3.11/go.mod h1:dksAq7YMXoljX0xu6VF5DMZGbhYYoLUalEiSySYAS4I= -go.etcd.io/etcd v3.3.27+incompatible h1:5hMrpf6REqTHV2LW2OclNpRtxI0k9ZplMemJsMSWju0= -go.etcd.io/etcd v3.3.27+incompatible/go.mod h1:yaeTdrJi5lOmYerz05bd8+V7KubZs8YSFZfzsF9A6aI= go.etcd.io/etcd/api/v3 v3.5.4/go.mod h1:5GB2vv4A4AOn3yk7MftYGHkUfGtDHnEraIjym4dYz5A= go.etcd.io/etcd/client/pkg/v3 v3.5.4/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= go.etcd.io/etcd/client/v3 v3.5.4/go.mod h1:ZaRkVgBZC+L+dLCjTcF1hRXpgZXQPOvnA/Ak/gq3kiY= diff --git a/receiver/splunksearchapireceiver/integration_test.go b/receiver/splunksearchapireceiver/integration_test.go index b765f34d2..3cf444257 100644 --- a/receiver/splunksearchapireceiver/integration_test.go +++ b/receiver/splunksearchapireceiver/integration_test.go @@ -40,7 +40,7 @@ func TestSplunkResultsPaginationFailure(t *testing.T) { EventBatchSize: 5, }, } - var callCount int = 0 + var callCount int server := newMockSplunkServer(&callCount) defer server.Close() settings := componenttest.NewNopTelemetrySettings() diff --git a/receiver/splunksearchapireceiver/receiver.go b/receiver/splunksearchapireceiver/receiver.go index f79e0582e..9c585e36d 100644 --- a/receiver/splunksearchapireceiver/receiver.go +++ b/receiver/splunksearchapireceiver/receiver.go @@ -83,7 +83,10 @@ func (ssapir *splunksearchapireceiver) Start(ctx context.Context, host component } ssapir.storageClient = storageClient - ssapir.initCheckpoint(ctx) + err = ssapir.initCheckpoint(ctx) + if err != nil { + return fmt.Errorf("failed to initialize checkpoint: %w", err) + } go ssapir.runQueries(ctx) return nil } From fbc9ee571fc2cd2a1d9c986ff5bfb9bc626fc64c Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Tue, 19 Nov 2024 10:52:23 -0500 Subject: [PATCH 30/55] return error on export fail --- receiver/splunksearchapireceiver/receiver.go | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/receiver/splunksearchapireceiver/receiver.go b/receiver/splunksearchapireceiver/receiver.go index 9c585e36d..a6fae45c3 100644 --- a/receiver/splunksearchapireceiver/receiver.go +++ b/receiver/splunksearchapireceiver/receiver.go @@ -176,8 +176,14 @@ func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { // pass logs, wait for exporter to confirm successful export to GCP err = ssapir.logsConsumer.ConsumeLogs(ctx, logs) if err != nil { +<<<<<<< HEAD // Error from down the pipeline, freak out ssapir.logger.Error("error consuming logs", zap.Error(err)) +======= + // error from down the pipeline, freak out + return fmt.Errorf("error consuming logs: %w", err) + +>>>>>>> 2153d9e0 (return error on export fail) } // last batch of logs has been successfully exported exportedEvents += logs.ResourceLogs().Len() From 9b6d1ddbe8a1fce844cce0a36bb72ee3d860cb44 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Tue, 19 Nov 2024 11:23:07 -0500 Subject: [PATCH 31/55] tidy --- receiver/splunksearchapireceiver/go.mod | 1 - 1 file changed, 1 deletion(-) diff --git a/receiver/splunksearchapireceiver/go.mod b/receiver/splunksearchapireceiver/go.mod index 105ca1a2b..6913b9fa1 100644 --- a/receiver/splunksearchapireceiver/go.mod +++ b/receiver/splunksearchapireceiver/go.mod @@ -6,7 +6,6 @@ require ( github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza v0.113.0 github.com/stretchr/testify v1.9.0 go.opentelemetry.io/collector/component v0.113.0 - go.opentelemetry.io/collector/confmap v1.20.0 go.opentelemetry.io/collector/consumer v0.113.0 go.opentelemetry.io/collector/consumer/consumertest v0.112.0 go.opentelemetry.io/collector/pdata v1.19.0 From 5c4d222cfb0e31a72b0730ef0007393ac31a9e07 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Wed, 20 Nov 2024 14:56:11 -0500 Subject: [PATCH 32/55] receiver tests --- receiver/splunksearchapireceiver/client.go | 19 +- .../splunksearchapireceiver/client_test.go | 58 ++--- receiver/splunksearchapireceiver/go.mod | 2 + receiver/splunksearchapireceiver/model.go | 15 +- receiver/splunksearchapireceiver/receiver.go | 33 +-- .../splunksearchapireceiver/receiver_test.go | 127 +++++++++++ .../testdata/logs/testPollJobStatus/input.xml | 212 ++++++++++++++++++ 7 files changed, 395 insertions(+), 71 deletions(-) create mode 100644 receiver/splunksearchapireceiver/receiver_test.go create mode 100644 receiver/splunksearchapireceiver/testdata/logs/testPollJobStatus/input.xml diff --git a/receiver/splunksearchapireceiver/client.go b/receiver/splunksearchapireceiver/client.go index c54519f6b..3b0ab6c77 100644 --- a/receiver/splunksearchapireceiver/client.go +++ b/receiver/splunksearchapireceiver/client.go @@ -24,6 +24,7 @@ import ( "io" "net/http" "net/url" + "strings" "go.opentelemetry.io/collector/component" "go.uber.org/zap" @@ -60,6 +61,10 @@ func newSplunkSearchAPIClient(ctx context.Context, settings component.TelemetryS func (c defaultSplunkSearchAPIClient) CreateSearchJob(search string) (CreateJobResponse, error) { endpoint := fmt.Sprintf("%s/services/search/jobs", c.endpoint) + if !strings.Contains(search, "starttime=") || !strings.Contains(search, "endtime=") || !strings.Contains(search, "timeformat=") { + return CreateJobResponse{}, fmt.Errorf("search query must contain starttime, endtime, and timeformat") + } + reqBody := fmt.Sprintf(`search=%s`, url.QueryEscape(search)) req, err := http.NewRequest("POST", endpoint, bytes.NewBuffer([]byte(reqBody))) if err != nil { @@ -90,33 +95,33 @@ func (c defaultSplunkSearchAPIClient) CreateSearchJob(search string) (CreateJobR return jobResponse, nil } -func (c defaultSplunkSearchAPIClient) GetJobStatus(sid string) (JobStatusResponse, error) { +func (c defaultSplunkSearchAPIClient) GetJobStatus(sid string) (SearchStatusResponse, error) { endpoint := fmt.Sprintf("%s/services/search/v2/jobs/%s", c.endpoint, sid) req, err := http.NewRequest("GET", endpoint, nil) if err != nil { - return JobStatusResponse{}, err + return SearchStatusResponse{}, err } req.SetBasicAuth(c.username, c.password) resp, err := c.client.Do(req) if err != nil { - return JobStatusResponse{}, err + return SearchStatusResponse{}, err } defer resp.Body.Close() if resp.StatusCode != http.StatusOK { - return JobStatusResponse{}, fmt.Errorf("failed to get search job status: %d", resp.StatusCode) + return SearchStatusResponse{}, fmt.Errorf("failed to get search job status: %d", resp.StatusCode) } body, err := io.ReadAll(resp.Body) if err != nil { - return JobStatusResponse{}, fmt.Errorf("failed to read search job status response: %v", err) + return SearchStatusResponse{}, fmt.Errorf("failed to read search job status response: %v", err) } - var jobStatusResponse JobStatusResponse + var jobStatusResponse SearchStatusResponse err = xml.Unmarshal(body, &jobStatusResponse) if err != nil { - return JobStatusResponse{}, fmt.Errorf("failed to unmarshal search job status response: %v", err) + return SearchStatusResponse{}, fmt.Errorf("failed to unmarshal search job status response: %v", err) } return jobStatusResponse, nil diff --git a/receiver/splunksearchapireceiver/client_test.go b/receiver/splunksearchapireceiver/client_test.go index b0deb9396..1d4c3562a 100644 --- a/receiver/splunksearchapireceiver/client_test.go +++ b/receiver/splunksearchapireceiver/client_test.go @@ -25,7 +25,7 @@ import ( ) var ( - server = newMockSplunkServer() + server = newMockServer() testClient = defaultSplunkSearchAPIClient{ client: server.Client(), endpoint: server.URL, @@ -33,17 +33,22 @@ var ( ) func TestCreateSearchJob(t *testing.T) { - resp, err := testClient.CreateSearchJob("index=otel") + resp, err := testClient.CreateSearchJob("index=otel starttime=\"\" endtime=\"\" timeformat=\"\"") require.NoError(t, err) require.Equal(t, "123456", resp.SID) + // returns an error if the search doesn't have times + resp, err = testClient.CreateSearchJob("index=otel") + require.EqualError(t, err, "search query must contain starttime, endtime, and timeformat") + require.Empty(t, resp) + // returns an error if the response status isn't 201 - resp, err = testClient.CreateSearchJob("index=fail_to_create_job") + resp, err = testClient.CreateSearchJob("index=fail_to_create_job starttime=\"\" endtime=\"\" timeformat=\"\"") require.ErrorContains(t, err, "failed to create search job") require.Empty(t, resp) // returns an error if the response body can't be unmarshalled - resp, err = testClient.CreateSearchJob("index=fail_to_unmarshal") + resp, err = testClient.CreateSearchJob("index=fail_to_unmarshal starttime=\"\" endtime=\"\" timeformat=\"\"") require.ErrorContains(t, err, "failed to unmarshal search job create response") require.Empty(t, resp) @@ -84,12 +89,12 @@ func TestGetSearchResults(t *testing.T) { } // mock Splunk servers -func newMockSplunkServer() *httptest.Server { +func newMockServer() *httptest.Server { return httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { switch req.URL.String() { case "/services/search/jobs": body, _ := io.ReadAll(req.Body) - if strings.Contains(string(body), "index=otel") { + if strings.Contains(string(body), "search=index%3Dotel") { rw.Header().Set("Content-Type", "application/xml") rw.WriteHeader(http.StatusCreated) rw.Write([]byte(` @@ -98,13 +103,12 @@ func newMockSplunkServer() *httptest.Server { `)) } - if strings.Contains(string(body), "index=fail_to_create_job") { + if strings.Contains(string(body), "index%3Dfail_to_create_job") { rw.WriteHeader(http.StatusNotFound) } - if strings.Contains(string(body), "index=fail_to_unmarshal") { + if strings.Contains(string(body), "index%3Dfail_to_unmarshal") { rw.WriteHeader(http.StatusCreated) rw.Write([]byte(`invalid xml`)) - req.Body = &errorReader{} } case "/services/search/v2/jobs/123456": rw.Header().Set("Content-Type", "application/xml") @@ -136,39 +140,3 @@ func newMockSplunkServer() *httptest.Server { } })) } - -var splunkEventsResultsP1 = []byte(`{ - "init_offset": 0, - "results": [ - { - "_raw": "Hello, world!", - "_time": "2024-11-14T13:02:31.000-05:00" - }, - { - "_raw": "Goodbye, world!", - "_time": "2024-11-14T13:02:30.000-05:00" - }, - { - "_raw": "lorem ipsum", - "_time": "2024-11-14T13:02:29.000-05:00" - }, - { - "_raw": "dolor sit amet", - "_time": "2024-11-14T13:02:28.000-05:00" - }, - { - "_raw": "consectetur adipiscing elit", - "_time": "2024-11-14T13:02:27.000-05:00" - } - ] -}`) - -type errorReader struct{} - -func (e *errorReader) Read(p []byte) (n int, err error) { - return 0, io.ErrUnexpectedEOF -} - -func (e *errorReader) Close() error { - return nil -} diff --git a/receiver/splunksearchapireceiver/go.mod b/receiver/splunksearchapireceiver/go.mod index 6913b9fa1..f03f865ea 100644 --- a/receiver/splunksearchapireceiver/go.mod +++ b/receiver/splunksearchapireceiver/go.mod @@ -10,6 +10,7 @@ require ( go.opentelemetry.io/collector/consumer/consumertest v0.112.0 go.opentelemetry.io/collector/pdata v1.19.0 go.opentelemetry.io/collector/receiver v0.113.0 + go.opentelemetry.io/collector/receiver/receivertest v0.113.0 go.uber.org/zap v1.27.0 ) @@ -40,6 +41,7 @@ require ( github.com/pierrec/lz4/v4 v4.1.21 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/rs/cors v1.11.1 // indirect + github.com/stretchr/objx v0.5.2 // indirect github.com/valyala/fastjson v1.6.4 // indirect go.opentelemetry.io/collector/client v1.19.0 // indirect go.opentelemetry.io/collector/config/configauth v0.113.0 // indirect diff --git a/receiver/splunksearchapireceiver/model.go b/receiver/splunksearchapireceiver/model.go index 100fc4e8e..9f3a249bb 100644 --- a/receiver/splunksearchapireceiver/model.go +++ b/receiver/splunksearchapireceiver/model.go @@ -20,13 +20,16 @@ type CreateJobResponse struct { SID string `xml:"sid"` } -// JobStatusResponse struct to represent the XML response from Splunk job status endpoint +// SearchStatusResponse struct to represent the XML response from Splunk job status endpoint // https://docs.splunk.com/Documentation/Splunk/9.3.1/RESTREF/RESTsearch#search.2Fjobs.2F.7Bsearch_id.7D -type JobStatusResponse struct { - Content struct { - Type string `xml:"type,attr"` - Dict Dict `xml:"dict"` - } `xml:"content"` +type SearchStatusResponse struct { + Content Content `xml:"content"` +} + +// Content struct to represent elements +type Content struct { + Type string `xml:"type,attr"` + Dict Dict `xml:"dict"` } // Dict struct to represent elements diff --git a/receiver/splunksearchapireceiver/receiver.go b/receiver/splunksearchapireceiver/receiver.go index a6fae45c3..ac3fe5641 100644 --- a/receiver/splunksearchapireceiver/receiver.go +++ b/receiver/splunksearchapireceiver/receiver.go @@ -47,6 +47,7 @@ type splunksearchapireceiver struct { config *Config settings component.TelemetrySettings id component.ID + cancel context.CancelFunc client splunkSearchAPIClient storageClient storage.Client checkpointRecord *EventRecord @@ -62,6 +63,7 @@ func newSSAPIReceiver( logger: logger, config: config, settings: settings, + storageClient: storage.NewNopClient(), id: id, checkpointRecord: &EventRecord{}, } @@ -74,7 +76,10 @@ func (ssapir *splunksearchapireceiver) Start(ctx context.Context, host component if err != nil { return err } - ssapir.client = client + + // set cancel function + cancelCtx, cancel := context.WithCancel(ctx) + ssapir.cancel = cancel // create storage client storageClient, err := adapter.GetStorageClient(ctx, host, ssapir.config.StorageID, ssapir.id) @@ -83,16 +88,19 @@ func (ssapir *splunksearchapireceiver) Start(ctx context.Context, host component } ssapir.storageClient = storageClient - err = ssapir.initCheckpoint(ctx) + err = ssapir.initCheckpoint(cancelCtx) if err != nil { return fmt.Errorf("failed to initialize checkpoint: %w", err) } - go ssapir.runQueries(ctx) + go ssapir.runQueries(cancelCtx) return nil } func (ssapir *splunksearchapireceiver) Shutdown(ctx context.Context) error { ssapir.logger.Debug("shutting down logs receiver") + if ssapir.cancel != nil { + ssapir.cancel() + } err := ssapir.checkpoint(ctx) if err != nil { @@ -217,10 +225,11 @@ func (ssapir *splunksearchapireceiver) pollSearchCompletion(ctx context.Context, select { case <-t.C: ssapir.logger.Debug("polling for search completion") - done, err := ssapir.isSearchCompleted(searchID) + resp, err := ssapir.client.GetJobStatus(searchID) if err != nil { - return fmt.Errorf("error polling for search completion: %v", err) + return fmt.Errorf("error getting search job status: %v", err) } + done := ssapir.isSearchCompleted(resp) if done { ssapir.logger.Info("search completed") return nil @@ -243,21 +252,16 @@ func (ssapir *splunksearchapireceiver) createSplunkSearch(search Search) (string return resp.SID, nil } -func (ssapir *splunksearchapireceiver) isSearchCompleted(sid string) (bool, error) { - resp, err := ssapir.getJobStatus(ssapir.config, sid) - if err != nil { - return false, err - } - +func (ssapir *splunksearchapireceiver) isSearchCompleted(resp SearchStatusResponse) bool { for _, key := range resp.Content.Dict.Keys { if key.Name == "dispatchState" { if key.Value == "DONE" { - return true, nil + return true } break } } - return false, nil + return false } func (ssapir *splunksearchapireceiver) getSplunkSearchResults(sid string, offset int, batchSize int) (SearchResults, error) { @@ -288,6 +292,9 @@ func (ssapir *splunksearchapireceiver) initCheckpoint(ctx context.Context) error } func (ssapir *splunksearchapireceiver) checkpoint(ctx context.Context) error { + if ssapir.checkpointRecord == nil { + return nil + } marshalBytes, err := json.Marshal(ssapir.checkpointRecord) if err != nil { return fmt.Errorf("failed to write checkpoint: %w", err) diff --git a/receiver/splunksearchapireceiver/receiver_test.go b/receiver/splunksearchapireceiver/receiver_test.go new file mode 100644 index 000000000..88718d933 --- /dev/null +++ b/receiver/splunksearchapireceiver/receiver_test.go @@ -0,0 +1,127 @@ +// Copyright observIQ, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package splunksearchapireceiver + +import ( + "context" + "encoding/xml" + "os" + "path/filepath" + "testing" + "time" + + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/require" + "go.opentelemetry.io/collector/component" + "go.uber.org/zap" +) + +var ( + logger = zap.NewNop() + config = &Config{} + settings = component.TelemetrySettings{} + id = component.ID{} + ssapireceiver = newSSAPIReceiver(logger, config, settings, id) +) + +func TestPolling(t *testing.T) { + cfg := createDefaultConfig().(*Config) + cfg.JobPollInterval = 1 * time.Second + ssapireceiver.config = cfg + + client := &mockLogsClient{} + ssapireceiver.client = client + + file := filepath.Join("testdata", "logs", "testPollJobStatus", "input.xml") + client.On("GetJobStatus", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(client.loadTestStatusResponse(t, file), nil) + cancelCtx, cancel := context.WithCancel(context.Background()) + ssapireceiver.cancel = cancel + ssapireceiver.checkpointRecord = &EventRecord{} + + err := ssapireceiver.pollSearchCompletion(cancelCtx, "123456") + require.NoError(t, err) + + client.AssertCalled(t, "GetJobStatus", "123456") + + err = ssapireceiver.Shutdown(context.Background()) + require.NoError(t, err) +} + +func TestIsSearchCompleted(t *testing.T) { + jobResponse := SearchStatusResponse{ + Content: Content{ + Dict: Dict{ + Keys: []Key{ + { + Name: "dispatchState", + Value: "DONE", + }, + }, + }, + }, + } + + emptyResponse := SearchStatusResponse{} + + done := ssapireceiver.isSearchCompleted(jobResponse) + require.True(t, done) + + jobResponse.Content.Dict.Keys[0].Value = "RUNNING" + done = ssapireceiver.isSearchCompleted(jobResponse) + require.False(t, done) + + done = ssapireceiver.isSearchCompleted(emptyResponse) + require.False(t, done) +} + +func TestInitCheckpoint(t *testing.T) { + +} + +func TestCheckpoint(t *testing.T) { + t.Skip("Not implemented") +} + +func TestLoadCheckpoint(t *testing.T) { + t.Skip("Not implemented") +} + +type mockLogsClient struct { + mock.Mock +} + +func (m *mockLogsClient) loadTestStatusResponse(t *testing.T, file string) SearchStatusResponse { + logBytes, err := os.ReadFile(file) + require.NoError(t, err) + var resp SearchStatusResponse + err = xml.Unmarshal(logBytes, &resp) + require.NoError(t, err) + return resp +} + +func (m *mockLogsClient) GetJobStatus(searchID string) (SearchStatusResponse, error) { + args := m.Called(searchID) + return args.Get(0).(SearchStatusResponse), args.Error(1) +} + +func (m *mockLogsClient) CreateSearchJob(searchQuery string) (CreateJobResponse, error) { + args := m.Called(searchQuery) + return args.Get(0).(CreateJobResponse), args.Error(1) +} + +func (m *mockLogsClient) GetSearchResults(searchID string, offset int, batchSize int) (SearchResultsResponse, error) { + args := m.Called(searchID, offset, batchSize) + return args.Get(0).(SearchResultsResponse), args.Error(1) +} diff --git a/receiver/splunksearchapireceiver/testdata/logs/testPollJobStatus/input.xml b/receiver/splunksearchapireceiver/testdata/logs/testPollJobStatus/input.xml new file mode 100644 index 000000000..43037b0a3 --- /dev/null +++ b/receiver/splunksearchapireceiver/testdata/logs/testPollJobStatus/input.xml @@ -0,0 +1,212 @@ + + search index + https://localhost:8089/services/search/jobs/mysearch_02151949 + 2011-07-07T20:49:58.000-07:00 + + 2011-07-07T20:49:57.000-07:00 + + + + + + + + + admin + + + + 1969-12-31T16:00:00.000-08:00 + + 2174976 + DONE + 1.00000 + 0 + 2011-07-07T11:18:08.000-07:00 + 287 + 287 + 6 + 1 + 0 + search index + desc + 1 + 0 + 0 + 0 + 0 + 0 + 0 + 0 + 0 + 0 + index + + 1969-12-31T16:00:00.000-08:00 + 0 + 5 + litsearch index | fields keepcolorder=t "host" "index" "linecount" "source" "sourcetype" "splunk_server" + + 287 + 1 + 287 + 1.004000 + 287 + mysearch_02151949 + 0 + 516 + + + + + 0.004 + 4 + 287 + 287 + + + + + 0.089 + 4 + 0 + 287 + + + + + 0.002 + 2 + 287 + 287 + + + + + 0.005 + 4 + + + + + 0.002 + 2 + + + + + 0.002 + 2 + 287 + 287 + + + + + 0.083 + 2 + + + + + 0.004 + 4 + 287 + 287 + + + + + 0.004 + 4 + 287 + 287 + + + + + 0.059 + 1 + + + + + 0.037 + 1 + + + + + 0.036 + 1 + + + + + 0.092 + 5 + + + + + 0.110 + 1 + + + + + 0.089 + 4 + + + + + 0.359 + 5 + + + + + + + + + + mysearch_02151949 + search index + + + + + + + + + admin + + + + + admin + + + + + admin + true + global + search + true + + + + + mbp15.splunk.com + + + + + \ No newline at end of file From 5fc579c91ec87132d9d2d2f700e79f3b1a1f0231 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Wed, 20 Nov 2024 15:06:35 -0500 Subject: [PATCH 33/55] receiver tests --- receiver/splunksearchapireceiver/receiver.go | 2 + .../splunksearchapireceiver/receiver_test.go | 92 ++++++++++++++++++- 2 files changed, 92 insertions(+), 2 deletions(-) diff --git a/receiver/splunksearchapireceiver/receiver.go b/receiver/splunksearchapireceiver/receiver.go index ac3fe5641..e835608a4 100644 --- a/receiver/splunksearchapireceiver/receiver.go +++ b/receiver/splunksearchapireceiver/receiver.go @@ -285,8 +285,10 @@ func (ssapir *splunksearchapireceiver) initCheckpoint(ctx context.Context) error // skip searches that have already been processed, use the offset from the checkpoint ssapir.config.Searches = ssapir.config.Searches[idx:] offset = ssapir.checkpointRecord.Offset + return nil } } + ssapir.logger.Debug("while initializing checkpoint, no matching search query found, starting from the beginning") } return nil } diff --git a/receiver/splunksearchapireceiver/receiver_test.go b/receiver/splunksearchapireceiver/receiver_test.go index 88718d933..a5623a868 100644 --- a/receiver/splunksearchapireceiver/receiver_test.go +++ b/receiver/splunksearchapireceiver/receiver_test.go @@ -25,6 +25,7 @@ import ( "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" "go.opentelemetry.io/collector/component" + "go.opentelemetry.io/collector/extension/experimental/storage" "go.uber.org/zap" ) @@ -87,15 +88,73 @@ func TestIsSearchCompleted(t *testing.T) { } func TestInitCheckpoint(t *testing.T) { + mockStorage := &mockStorage{} + searches := []Search{ + { + Query: "index=otel", + }, + { + Query: "index=otel2", + }, + { + Query: "index=otel3", + }, + { + Query: "index=otel4", + }, + { + Query: "index=otel5", + }, + } + ssapireceiver.config.Searches = searches + ssapireceiver.storageClient = mockStorage + err := ssapireceiver.initCheckpoint(context.Background()) + require.NoError(t, err) + require.Equal(t, 0, ssapireceiver.checkpointRecord.Offset) + mockStorage.Value = []byte(`{"offset":5,"search":"index=otel3"}`) + err = ssapireceiver.initCheckpoint(context.Background()) + require.NoError(t, err) + require.Equal(t, 5, ssapireceiver.checkpointRecord.Offset) + require.Equal(t, "index=otel3", ssapireceiver.checkpointRecord.Search) } func TestCheckpoint(t *testing.T) { - t.Skip("Not implemented") + mockStorage := &mockStorage{} + ssapireceiver.storageClient = mockStorage + mockStorage.On("Set", mock.Anything, eventStorageKey, mock.Anything).Return(nil) + err := ssapireceiver.checkpoint(context.Background()) + require.NoError(t, err) + mockStorage.AssertCalled(t, "Set", mock.Anything, eventStorageKey, []byte(`{"offset":0,"search":""}`)) + + ssapireceiver.checkpointRecord = &EventRecord{ + Offset: 5, + Search: "index=otel3", + } + + err = ssapireceiver.checkpoint(context.Background()) + require.NoError(t, err) + mockStorage.AssertCalled(t, "Set", mock.Anything, eventStorageKey, []byte(`{"offset":5,"search":"index=otel3"}`)) } func TestLoadCheckpoint(t *testing.T) { - t.Skip("Not implemented") + mockStorage := &mockStorage{} + ssapireceiver.storageClient = mockStorage + mockStorage.Value = []byte(`{"offset":5,"search":"index=otel3"}`) + err := ssapireceiver.loadCheckpoint(context.Background()) + require.NoError(t, err) + require.Equal(t, 5, ssapireceiver.checkpointRecord.Offset) + require.Equal(t, "index=otel3", ssapireceiver.checkpointRecord.Search) + + mockStorage.Value = []byte(`{"offset":10,"search":"index=otel4"}`) + err = ssapireceiver.loadCheckpoint(context.Background()) + require.NoError(t, err) + require.Equal(t, 10, ssapireceiver.checkpointRecord.Offset) + require.Equal(t, "index=otel4", ssapireceiver.checkpointRecord.Search) + + mockStorage.Value = []byte(`{}`) + err = ssapireceiver.loadCheckpoint(context.Background()) + require.NoError(t, err) } type mockLogsClient struct { @@ -125,3 +184,32 @@ func (m *mockLogsClient) GetSearchResults(searchID string, offset int, batchSize args := m.Called(searchID, offset, batchSize) return args.Get(0).(SearchResultsResponse), args.Error(1) } + +type mockStorage struct { + mock.Mock + Key string + Value []byte +} + +func (m *mockStorage) Get(ctx context.Context, key string) ([]byte, error) { + return []byte(m.Value), nil +} + +func (m *mockStorage) Set(ctx context.Context, key string, value []byte) error { + args := m.Called(ctx, key, value) + m.Key = key + m.Value = value + return args.Error(0) +} + +func (m *mockStorage) Batch(ctx context.Context, op ...storage.Operation) error { + return nil +} + +func (m *mockStorage) Close(ctx context.Context) error { + return nil +} + +func (m *mockStorage) Delete(ctx context.Context, key string) error { + return nil +} From 1145d00e8aea0161ff9180e4cc71547b82b3d740 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Wed, 20 Nov 2024 15:12:19 -0500 Subject: [PATCH 34/55] lint --- receiver/splunksearchapireceiver/receiver_test.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/receiver/splunksearchapireceiver/receiver_test.go b/receiver/splunksearchapireceiver/receiver_test.go index a5623a868..1a2c30778 100644 --- a/receiver/splunksearchapireceiver/receiver_test.go +++ b/receiver/splunksearchapireceiver/receiver_test.go @@ -191,7 +191,7 @@ type mockStorage struct { Value []byte } -func (m *mockStorage) Get(ctx context.Context, key string) ([]byte, error) { +func (m *mockStorage) Get(_ context.Context, _ string) ([]byte, error) { return []byte(m.Value), nil } @@ -202,14 +202,14 @@ func (m *mockStorage) Set(ctx context.Context, key string, value []byte) error { return args.Error(0) } -func (m *mockStorage) Batch(ctx context.Context, op ...storage.Operation) error { +func (m *mockStorage) Batch(_ context.Context, _ ...storage.Operation) error { return nil } -func (m *mockStorage) Close(ctx context.Context) error { +func (m *mockStorage) Close(_ context.Context) error { return nil } -func (m *mockStorage) Delete(ctx context.Context, key string) error { +func (m *mockStorage) Delete(_ context.Context, _ string) error { return nil } From 60f8cd8a39bcf530eaa0e3a6132216ef81eb1de2 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Wed, 20 Nov 2024 15:42:20 -0500 Subject: [PATCH 35/55] fix TestCheckpoint --- receiver/splunksearchapireceiver/receiver_test.go | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/receiver/splunksearchapireceiver/receiver_test.go b/receiver/splunksearchapireceiver/receiver_test.go index 1a2c30778..ec531d631 100644 --- a/receiver/splunksearchapireceiver/receiver_test.go +++ b/receiver/splunksearchapireceiver/receiver_test.go @@ -123,6 +123,10 @@ func TestCheckpoint(t *testing.T) { mockStorage := &mockStorage{} ssapireceiver.storageClient = mockStorage mockStorage.On("Set", mock.Anything, eventStorageKey, mock.Anything).Return(nil) + ssapireceiver.checkpointRecord = &EventRecord{ + Offset: 0, + Search: "", + } err := ssapireceiver.checkpoint(context.Background()) require.NoError(t, err) mockStorage.AssertCalled(t, "Set", mock.Anything, eventStorageKey, []byte(`{"offset":0,"search":""}`)) From 9054f802c1845f0fdb396d7a383f033c597c1712 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Thu, 21 Nov 2024 10:13:36 -0500 Subject: [PATCH 36/55] rename structs --- receiver/splunksearchapireceiver/client.go | 20 +- .../splunksearchapireceiver/client_test.go | 20 +- receiver/splunksearchapireceiver/model.go | 10 +- receiver/splunksearchapireceiver/receiver.go | 2 +- .../splunksearchapireceiver/receiver_test.go | 24 +- .../{input.xml => input-done.xml} | 2 +- .../logs/testPollJobStatus/input-queued.xml | 212 ++++++++++++++++++ 7 files changed, 259 insertions(+), 31 deletions(-) rename receiver/splunksearchapireceiver/testdata/logs/testPollJobStatus/{input.xml => input-done.xml} (99%) create mode 100644 receiver/splunksearchapireceiver/testdata/logs/testPollJobStatus/input-queued.xml diff --git a/receiver/splunksearchapireceiver/client.go b/receiver/splunksearchapireceiver/client.go index 3b0ab6c77..0b6ec2ce8 100644 --- a/receiver/splunksearchapireceiver/client.go +++ b/receiver/splunksearchapireceiver/client.go @@ -32,8 +32,8 @@ import ( type splunkSearchAPIClient interface { CreateSearchJob(search string) (CreateJobResponse, error) - GetJobStatus(searchID string) (JobStatusResponse, error) - GetSearchResults(searchID string) (SearchResultsResponse, error) + GetJobStatus(searchID string) (SearchJobStatusResponse, error) + GetSearchResults(searchID string, offset int, batchSize int) (SearchResultsResponse, error) } type defaultSplunkSearchAPIClient struct { @@ -61,7 +61,7 @@ func newSplunkSearchAPIClient(ctx context.Context, settings component.TelemetryS func (c defaultSplunkSearchAPIClient) CreateSearchJob(search string) (CreateJobResponse, error) { endpoint := fmt.Sprintf("%s/services/search/jobs", c.endpoint) - if !strings.Contains(search, "starttime=") || !strings.Contains(search, "endtime=") || !strings.Contains(search, "timeformat=") { + if !strings.Contains(search, strings.ToLower("starttime=")) || !strings.Contains(search, strings.ToLower("endtime=")) || !strings.Contains(search, strings.ToLower("timeformat=")) { return CreateJobResponse{}, fmt.Errorf("search query must contain starttime, endtime, and timeformat") } @@ -95,33 +95,33 @@ func (c defaultSplunkSearchAPIClient) CreateSearchJob(search string) (CreateJobR return jobResponse, nil } -func (c defaultSplunkSearchAPIClient) GetJobStatus(sid string) (SearchStatusResponse, error) { +func (c defaultSplunkSearchAPIClient) GetJobStatus(sid string) (SearchJobStatusResponse, error) { endpoint := fmt.Sprintf("%s/services/search/v2/jobs/%s", c.endpoint, sid) req, err := http.NewRequest("GET", endpoint, nil) if err != nil { - return SearchStatusResponse{}, err + return SearchJobStatusResponse{}, err } req.SetBasicAuth(c.username, c.password) resp, err := c.client.Do(req) if err != nil { - return SearchStatusResponse{}, err + return SearchJobStatusResponse{}, err } defer resp.Body.Close() if resp.StatusCode != http.StatusOK { - return SearchStatusResponse{}, fmt.Errorf("failed to get search job status: %d", resp.StatusCode) + return SearchJobStatusResponse{}, fmt.Errorf("failed to get search job status: %d", resp.StatusCode) } body, err := io.ReadAll(resp.Body) if err != nil { - return SearchStatusResponse{}, fmt.Errorf("failed to read search job status response: %v", err) + return SearchJobStatusResponse{}, fmt.Errorf("failed to read search job status response: %v", err) } - var jobStatusResponse SearchStatusResponse + var jobStatusResponse SearchJobStatusResponse err = xml.Unmarshal(body, &jobStatusResponse) if err != nil { - return SearchStatusResponse{}, fmt.Errorf("failed to unmarshal search job status response: %v", err) + return SearchJobStatusResponse{}, fmt.Errorf("failed to unmarshal search job status response: %v", err) } return jobStatusResponse, nil diff --git a/receiver/splunksearchapireceiver/client_test.go b/receiver/splunksearchapireceiver/client_test.go index 1d4c3562a..9e3edec18 100644 --- a/receiver/splunksearchapireceiver/client_test.go +++ b/receiver/splunksearchapireceiver/client_test.go @@ -24,15 +24,13 @@ import ( "github.com/stretchr/testify/require" ) -var ( - server = newMockServer() - testClient = defaultSplunkSearchAPIClient{ +func TestCreateSearchJob(t *testing.T) { + server := newMockServer() + testClient := defaultSplunkSearchAPIClient{ client: server.Client(), endpoint: server.URL, } -) -func TestCreateSearchJob(t *testing.T) { resp, err := testClient.CreateSearchJob("index=otel starttime=\"\" endtime=\"\" timeformat=\"\"") require.NoError(t, err) require.Equal(t, "123456", resp.SID) @@ -55,6 +53,12 @@ func TestCreateSearchJob(t *testing.T) { } func TestGetJobStatus(t *testing.T) { + server := newMockServer() + testClient := defaultSplunkSearchAPIClient{ + client: server.Client(), + endpoint: server.URL, + } + resp, err := testClient.GetJobStatus("123456") require.NoError(t, err) require.Equal(t, "DONE", resp.Content.Dict.Keys[0].Value) @@ -72,6 +76,12 @@ func TestGetJobStatus(t *testing.T) { } func TestGetSearchResults(t *testing.T) { + server := newMockServer() + testClient := defaultSplunkSearchAPIClient{ + client: server.Client(), + endpoint: server.URL, + } + resp, err := testClient.GetSearchResults("123456", 0, 5) require.NoError(t, err) require.Equal(t, 5, len(resp.Results)) diff --git a/receiver/splunksearchapireceiver/model.go b/receiver/splunksearchapireceiver/model.go index 9f3a249bb..0eeed5a3f 100644 --- a/receiver/splunksearchapireceiver/model.go +++ b/receiver/splunksearchapireceiver/model.go @@ -20,14 +20,14 @@ type CreateJobResponse struct { SID string `xml:"sid"` } -// SearchStatusResponse struct to represent the XML response from Splunk job status endpoint +// SearchJobStatusResponse struct to represent the XML response from Splunk job status endpoint // https://docs.splunk.com/Documentation/Splunk/9.3.1/RESTREF/RESTsearch#search.2Fjobs.2F.7Bsearch_id.7D -type SearchStatusResponse struct { - Content Content `xml:"content"` +type SearchJobStatusResponse struct { + Content SearchJobContent `xml:"content"` } -// Content struct to represent elements -type Content struct { +// SearchJobContent struct to represent elements +type SearchJobContent struct { Type string `xml:"type,attr"` Dict Dict `xml:"dict"` } diff --git a/receiver/splunksearchapireceiver/receiver.go b/receiver/splunksearchapireceiver/receiver.go index e835608a4..23d977187 100644 --- a/receiver/splunksearchapireceiver/receiver.go +++ b/receiver/splunksearchapireceiver/receiver.go @@ -252,7 +252,7 @@ func (ssapir *splunksearchapireceiver) createSplunkSearch(search Search) (string return resp.SID, nil } -func (ssapir *splunksearchapireceiver) isSearchCompleted(resp SearchStatusResponse) bool { +func (ssapir *splunksearchapireceiver) isSearchCompleted(resp SearchJobStatusResponse) bool { for _, key := range resp.Content.Dict.Keys { if key.Name == "dispatchState" { if key.Value == "DONE" { diff --git a/receiver/splunksearchapireceiver/receiver_test.go b/receiver/splunksearchapireceiver/receiver_test.go index ec531d631..84d4e7886 100644 --- a/receiver/splunksearchapireceiver/receiver_test.go +++ b/receiver/splunksearchapireceiver/receiver_test.go @@ -45,7 +45,7 @@ func TestPolling(t *testing.T) { client := &mockLogsClient{} ssapireceiver.client = client - file := filepath.Join("testdata", "logs", "testPollJobStatus", "input.xml") + file := filepath.Join("testdata", "logs", "testPollJobStatus", "input-done.xml") client.On("GetJobStatus", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(client.loadTestStatusResponse(t, file), nil) cancelCtx, cancel := context.WithCancel(context.Background()) ssapireceiver.cancel = cancel @@ -53,16 +53,22 @@ func TestPolling(t *testing.T) { err := ssapireceiver.pollSearchCompletion(cancelCtx, "123456") require.NoError(t, err) + client.AssertNumberOfCalls(t, "GetJobStatus", 1) - client.AssertCalled(t, "GetJobStatus", "123456") + // Test polling for a job that is still running + file = filepath.Join("testdata", "logs", "testPollJobStatus", "input-queued.xml") + client.On("GetJobStatus", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(client.loadTestStatusResponse(t, file), nil) + err = ssapireceiver.pollSearchCompletion(cancelCtx, "123456") + require.NoError(t, err) + client.AssertNumberOfCalls(t, "GetJobStatus", 2) err = ssapireceiver.Shutdown(context.Background()) require.NoError(t, err) } func TestIsSearchCompleted(t *testing.T) { - jobResponse := SearchStatusResponse{ - Content: Content{ + jobResponse := SearchJobStatusResponse{ + Content: SearchJobContent{ Dict: Dict{ Keys: []Key{ { @@ -74,7 +80,7 @@ func TestIsSearchCompleted(t *testing.T) { }, } - emptyResponse := SearchStatusResponse{} + emptyResponse := SearchJobStatusResponse{} done := ssapireceiver.isSearchCompleted(jobResponse) require.True(t, done) @@ -165,18 +171,18 @@ type mockLogsClient struct { mock.Mock } -func (m *mockLogsClient) loadTestStatusResponse(t *testing.T, file string) SearchStatusResponse { +func (m *mockLogsClient) loadTestStatusResponse(t *testing.T, file string) SearchJobStatusResponse { logBytes, err := os.ReadFile(file) require.NoError(t, err) - var resp SearchStatusResponse + var resp SearchJobStatusResponse err = xml.Unmarshal(logBytes, &resp) require.NoError(t, err) return resp } -func (m *mockLogsClient) GetJobStatus(searchID string) (SearchStatusResponse, error) { +func (m *mockLogsClient) GetJobStatus(searchID string) (SearchJobStatusResponse, error) { args := m.Called(searchID) - return args.Get(0).(SearchStatusResponse), args.Error(1) + return args.Get(0).(SearchJobStatusResponse), args.Error(1) } func (m *mockLogsClient) CreateSearchJob(searchQuery string) (CreateJobResponse, error) { diff --git a/receiver/splunksearchapireceiver/testdata/logs/testPollJobStatus/input.xml b/receiver/splunksearchapireceiver/testdata/logs/testPollJobStatus/input-done.xml similarity index 99% rename from receiver/splunksearchapireceiver/testdata/logs/testPollJobStatus/input.xml rename to receiver/splunksearchapireceiver/testdata/logs/testPollJobStatus/input-done.xml index 43037b0a3..049f0d90a 100644 --- a/receiver/splunksearchapireceiver/testdata/logs/testPollJobStatus/input.xml +++ b/receiver/splunksearchapireceiver/testdata/logs/testPollJobStatus/input-done.xml @@ -209,4 +209,4 @@ - \ No newline at end of file + diff --git a/receiver/splunksearchapireceiver/testdata/logs/testPollJobStatus/input-queued.xml b/receiver/splunksearchapireceiver/testdata/logs/testPollJobStatus/input-queued.xml new file mode 100644 index 000000000..17b01d572 --- /dev/null +++ b/receiver/splunksearchapireceiver/testdata/logs/testPollJobStatus/input-queued.xml @@ -0,0 +1,212 @@ + + search index + https://localhost:8089/services/search/jobs/mysearch_02151949 + 2011-07-07T20:49:58.000-07:00 + + 2011-07-07T20:49:57.000-07:00 + + + + + + + + + admin + + + + 1969-12-31T16:00:00.000-08:00 + + 2174976 + QUEUED + 1.00000 + 0 + 2011-07-07T11:18:08.000-07:00 + 287 + 287 + 6 + 1 + 0 + search index + desc + 1 + 0 + 0 + 0 + 0 + 0 + 0 + 0 + 0 + 0 + index + + 1969-12-31T16:00:00.000-08:00 + 0 + 5 + litsearch index | fields keepcolorder=t "host" "index" "linecount" "source" "sourcetype" "splunk_server" + + 287 + 1 + 287 + 1.004000 + 287 + mysearch_02151949 + 0 + 516 + + + + + 0.004 + 4 + 287 + 287 + + + + + 0.089 + 4 + 0 + 287 + + + + + 0.002 + 2 + 287 + 287 + + + + + 0.005 + 4 + + + + + 0.002 + 2 + + + + + 0.002 + 2 + 287 + 287 + + + + + 0.083 + 2 + + + + + 0.004 + 4 + 287 + 287 + + + + + 0.004 + 4 + 287 + 287 + + + + + 0.059 + 1 + + + + + 0.037 + 1 + + + + + 0.036 + 1 + + + + + 0.092 + 5 + + + + + 0.110 + 1 + + + + + 0.089 + 4 + + + + + 0.359 + 5 + + + + + + + + + + mysearch_02151949 + search index + + + + + + + + + admin + + + + + admin + + + + + admin + true + global + search + true + + + + + mbp15.splunk.com + + + + + From 950ecf30a168a5f97c7d8b17a0dd84c80631a380 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Tue, 3 Dec 2024 09:43:22 -0500 Subject: [PATCH 37/55] exporter fail test --- .../integration_test.go | 92 ++++++++++++++++++- 1 file changed, 90 insertions(+), 2 deletions(-) diff --git a/receiver/splunksearchapireceiver/integration_test.go b/receiver/splunksearchapireceiver/integration_test.go index 3cf444257..e5b2b1cb4 100644 --- a/receiver/splunksearchapireceiver/integration_test.go +++ b/receiver/splunksearchapireceiver/integration_test.go @@ -16,15 +16,19 @@ package splunksearchapireceiver import ( "context" + "errors" "net/http" "net/http/httptest" "testing" + "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/component/componenttest" + "go.opentelemetry.io/collector/consumer" "go.opentelemetry.io/collector/consumer/consumertest" "go.opentelemetry.io/collector/extension/experimental/storage" + "go.opentelemetry.io/collector/pdata/plog" "go.uber.org/zap" ) @@ -41,7 +45,7 @@ func TestSplunkResultsPaginationFailure(t *testing.T) { }, } var callCount int - server := newMockSplunkServer(&callCount) + server := newMockSplunkServerPagination(&callCount) defer server.Close() settings := componenttest.NewNopTelemetrySettings() ssapir := newSSAPIReceiver(zap.NewNop(), cfg, settings, component.NewID(typeStr)) @@ -58,7 +62,7 @@ func TestSplunkResultsPaginationFailure(t *testing.T) { require.Equal(t, 1, callCount) } -func newMockSplunkServer(callCount *int) *httptest.Server { +func newMockSplunkServerPagination(callCount *int) *httptest.Server { return httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { if req.URL.String() == "/services/search/jobs" { rw.Header().Set("Content-Type", "application/xml") @@ -96,6 +100,77 @@ func newMockSplunkServer(callCount *int) *httptest.Server { })) } +// Test the case where the GCP exporter returns an error +func TestExporterFailure(t *testing.T) { + factory := NewFactory() + cfg := factory.CreateDefaultConfig().(*Config) + cfg.Searches = []Search{ + { + Query: "search index=otel", + EarliestTime: "2024-11-14T00:00:00.000Z", + LatestTime: "2024-11-14T23:59:59.000Z", + EventBatchSize: 5, + }, + } + server := newMockSplunkServer() + defer server.Close() + settings := componenttest.NewNopTelemetrySettings() + ssapir := newSSAPIReceiver(zap.NewNop(), cfg, settings, component.NewID(typeStr)) + logsConsumer := &mockLogsConsumerExporterErr{} + logsConsumer.On("ConsumeLogs", mock.Anything, mock.Anything).Return(nil) + + ssapir.logsConsumer = logsConsumer + ssapir.client, _ = newSplunkSearchAPIClient(context.Background(), settings, *cfg, componenttest.NewNopHost()) + ssapir.client.(*defaultSplunkSearchAPIClient).client = server.Client() + ssapir.client.(*defaultSplunkSearchAPIClient).endpoint = server.URL + + ssapir.initCheckpoint(context.Background()) + err := ssapir.runQueries(context.Background()) + require.NoError(t, err) + require.Equal(t, 5, ssapir.checkpointRecord.Offset) + + // simulate 2nd batch of data failing + // the checkpoint should not be updated, and an error should be returned + logsConsumerErr := &mockLogsConsumerExporterErr{} + logsConsumerErr.On("ConsumeLogs", mock.Anything, mock.Anything).Return(errors.New("error exporting logs")) + ssapir.logsConsumer = logsConsumerErr + err = ssapir.runQueries(context.Background()) + require.EqualError(t, err, "error consuming logs: error exporting logs") + require.Equal(t, 5, ssapir.checkpointRecord.Offset) +} + +func newMockSplunkServer() *httptest.Server { + return httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { + if req.URL.String() == "/services/search/jobs" { + rw.Header().Set("Content-Type", "application/xml") + rw.WriteHeader(201) + rw.Write([]byte(` + + 123456 + + `)) + } + if req.URL.String() == "/services/search/v2/jobs/123456" { + rw.Header().Set("Content-Type", "application/xml") + rw.WriteHeader(200) + rw.Write([]byte(` + + + DISPATCH + + DONE + + + `)) + } + if req.URL.String() == "/services/search/v2/jobs/123456/results?output_mode=json&offset=0&count=5" { + rw.Header().Set("Content-Type", "application/json") + rw.WriteHeader(200) + rw.Write(splunkEventsResultsP1) + } + })) +} + var splunkEventsResultsP1 = []byte(`{ "init_offset": 0, "results": [ @@ -121,3 +196,16 @@ var splunkEventsResultsP1 = []byte(`{ } ] }`) + +type mockLogsConsumerExporterErr struct { + mock.Mock +} + +func (m *mockLogsConsumerExporterErr) ConsumeLogs(ctx context.Context, logs plog.Logs) error { + args := m.Called(ctx, logs) + return args.Error(0) +} + +func (m *mockLogsConsumerExporterErr) Capabilities() consumer.Capabilities { + return consumer.Capabilities{MutatesData: false} +} From 3693b90233750ea7b64198dad6ec99e82aee0f1b Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Fri, 6 Dec 2024 16:13:18 -0500 Subject: [PATCH 38/55] fix search checkpointing --- receiver/splunksearchapireceiver/integration_test.go | 2 ++ receiver/splunksearchapireceiver/receiver.go | 4 ++++ 2 files changed, 6 insertions(+) diff --git a/receiver/splunksearchapireceiver/integration_test.go b/receiver/splunksearchapireceiver/integration_test.go index e5b2b1cb4..b7a440133 100644 --- a/receiver/splunksearchapireceiver/integration_test.go +++ b/receiver/splunksearchapireceiver/integration_test.go @@ -128,6 +128,7 @@ func TestExporterFailure(t *testing.T) { err := ssapir.runQueries(context.Background()) require.NoError(t, err) require.Equal(t, 5, ssapir.checkpointRecord.Offset) + require.Equal(t, "search index=otel", ssapir.checkpointRecord.Search) // simulate 2nd batch of data failing // the checkpoint should not be updated, and an error should be returned @@ -137,6 +138,7 @@ func TestExporterFailure(t *testing.T) { err = ssapir.runQueries(context.Background()) require.EqualError(t, err, "error consuming logs: error exporting logs") require.Equal(t, 5, ssapir.checkpointRecord.Offset) + require.Equal(t, "search index=otel", ssapir.checkpointRecord.Search) } func newMockSplunkServer() *httptest.Server { diff --git a/receiver/splunksearchapireceiver/receiver.go b/receiver/splunksearchapireceiver/receiver.go index 23d977187..cc58ad8c5 100644 --- a/receiver/splunksearchapireceiver/receiver.go +++ b/receiver/splunksearchapireceiver/receiver.go @@ -112,6 +112,9 @@ func (ssapir *splunksearchapireceiver) Shutdown(ctx context.Context) error { func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { for _, search := range ssapir.config.Searches { + // set current search query + ssapir.checkpointRecord.Search = search.Query + // create search in Splunk searchID, err := ssapir.createSplunkSearch(search) if err != nil { @@ -297,6 +300,7 @@ func (ssapir *splunksearchapireceiver) checkpoint(ctx context.Context) error { if ssapir.checkpointRecord == nil { return nil } + marshalBytes, err := json.Marshal(ssapir.checkpointRecord) if err != nil { return fmt.Errorf("failed to write checkpoint: %w", err) From 97628ced2616d9b10ad4820c1fc49e04c617b122 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Thu, 21 Nov 2024 16:12:53 -0500 Subject: [PATCH 39/55] auth token --- receiver/splunksearchapireceiver/client.go | 58 +++++++++++---- .../splunksearchapireceiver/client_test.go | 24 +++++++ receiver/splunksearchapireceiver/config.go | 28 ++++++-- .../splunksearchapireceiver/config_test.go | 72 ++++++++++++++++++- receiver/splunksearchapireceiver/receiver.go | 26 ++++--- 5 files changed, 175 insertions(+), 33 deletions(-) diff --git a/receiver/splunksearchapireceiver/client.go b/receiver/splunksearchapireceiver/client.go index 0b6ec2ce8..1e2519f5c 100644 --- a/receiver/splunksearchapireceiver/client.go +++ b/receiver/splunksearchapireceiver/client.go @@ -37,11 +37,13 @@ type splunkSearchAPIClient interface { } type defaultSplunkSearchAPIClient struct { - client *http.Client - endpoint string - logger *zap.Logger - username string - password string + client *http.Client + endpoint string + logger *zap.Logger + username string + password string + authToken string + tokenType string } func newSplunkSearchAPIClient(ctx context.Context, settings component.TelemetrySettings, conf Config, host component.Host) (*defaultSplunkSearchAPIClient, error) { @@ -49,12 +51,15 @@ func newSplunkSearchAPIClient(ctx context.Context, settings component.TelemetryS if err != nil { return nil, err } + return &defaultSplunkSearchAPIClient{ - client: client, - endpoint: conf.Endpoint, - logger: settings.Logger, - username: conf.Username, - password: conf.Password, + client: client, + endpoint: conf.Endpoint, + logger: settings.Logger, + username: conf.Username, + password: conf.Password, + authToken: conf.AuthToken, + tokenType: conf.TokenType, }, nil } @@ -70,7 +75,11 @@ func (c defaultSplunkSearchAPIClient) CreateSearchJob(search string) (CreateJobR if err != nil { return CreateJobResponse{}, err } - req.SetBasicAuth(c.username, c.password) + + err = c.SetSplunkRequestAuth(req) + if err != nil { + return CreateJobResponse{}, err + } resp, err := c.client.Do(req) if err != nil { @@ -102,7 +111,11 @@ func (c defaultSplunkSearchAPIClient) GetJobStatus(sid string) (SearchJobStatusR if err != nil { return SearchJobStatusResponse{}, err } - req.SetBasicAuth(c.username, c.password) + + err = c.SetSplunkRequestAuth(req) + if err != nil { + return JobStatusResponse{}, err + } resp, err := c.client.Do(req) if err != nil { @@ -133,7 +146,11 @@ func (c defaultSplunkSearchAPIClient) GetSearchResults(sid string) (SearchResult if err != nil { return SearchResultsResponse{}, err } - req.SetBasicAuth(c.username, c.password) + + err = c.SetSplunkRequestAuth(req) + if err != nil { + return SearchResultsResponse{}, err + } resp, err := c.client.Do(req) if err != nil { @@ -158,3 +175,18 @@ func (c defaultSplunkSearchAPIClient) GetSearchResults(sid string) (SearchResult return searchResults, nil } + +func (c defaultSplunkSearchAPIClient) SetSplunkRequestAuth(req *http.Request) error { + if c.authToken != "" { + if strings.EqualFold(c.tokenType, TokenTypeBearer) { + req.Header.Set("Authorization", "Bearer "+string(c.authToken)) + } else if strings.EqualFold(c.tokenType, TokenTypeSplunk) { + req.Header.Set("Authorization", "Splunk "+string(c.authToken)) + } else { + return fmt.Errorf("auth_token provided without a correct token type, valid token types are %v", []string{TokenTypeBearer, TokenTypeSplunk}) + } + } else { + req.SetBasicAuth(c.username, c.password) + } + return nil +} diff --git a/receiver/splunksearchapireceiver/client_test.go b/receiver/splunksearchapireceiver/client_test.go index 9e3edec18..bcf64087a 100644 --- a/receiver/splunksearchapireceiver/client_test.go +++ b/receiver/splunksearchapireceiver/client_test.go @@ -98,6 +98,30 @@ func TestGetSearchResults(t *testing.T) { require.Empty(t, resp) } +func TestSetSplunkRequestAuth(t *testing.T) { + client := defaultSplunkSearchAPIClient{ + username: "user", + password: "password", + } + req := httptest.NewRequest("GET", "http://localhost:8089", nil) + client.SetSplunkRequestAuth(req) + require.Equal(t, req.Header.Get("Authorization"), "Basic dXNlcjpwYXNzd29yZA==") + + client = defaultSplunkSearchAPIClient{ + authToken: "token", + tokenType: TokenTypeBearer, + } + client.SetSplunkRequestAuth(req) + require.Equal(t, req.Header.Get("Authorization"), "Bearer token") + + client = defaultSplunkSearchAPIClient{ + authToken: "token", + tokenType: TokenTypeSplunk, + } + client.SetSplunkRequestAuth(req) + require.Equal(t, req.Header.Get("Authorization"), "Splunk token") +} + // mock Splunk servers func newMockServer() *httptest.Server { return httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { diff --git a/receiver/splunksearchapireceiver/config.go b/receiver/splunksearchapireceiver/config.go index 345cd6ab9..51c1a2a4d 100644 --- a/receiver/splunksearchapireceiver/config.go +++ b/receiver/splunksearchapireceiver/config.go @@ -16,6 +16,7 @@ package splunksearchapireceiver import ( "errors" + "fmt" "strings" "time" @@ -25,13 +26,17 @@ import ( var ( errNonStandaloneSearchQuery = errors.New("only standalone search commands can be used for scraping data") + TokenTypeBearer = "Bearer" + TokenTypeSplunk = "Splunk" ) // Config struct to represent the configuration for the Splunk Search API receiver type Config struct { confighttp.ClientConfig `mapstructure:",squash"` - Username string `mapstructure:"splunk_username"` - Password string `mapstructure:"splunk_password"` + Username string `mapstructure:"splunk_username,omitempty"` + Password string `mapstructure:"splunk_password,omitempty"` + AuthToken string `mapstructure:"auth_token,omitempty"` + TokenType string `mapstructure:"token_type,omitempty"` Searches []Search `mapstructure:"searches"` JobPollInterval time.Duration `mapstructure:"job_poll_interval"` StorageID *component.ID `mapstructure:"storage"` @@ -51,12 +56,27 @@ func (cfg *Config) Validate() error { if cfg.Endpoint == "" { return errors.New("missing Splunk server endpoint") } - if cfg.Username == "" { + + if cfg.Username == "" && cfg.AuthToken == "" { return errors.New("missing Splunk username") } - if cfg.Password == "" { + + if cfg.Password == "" && cfg.AuthToken == "" { return errors.New("missing Splunk password") } + + if cfg.AuthToken != "" { + if cfg.TokenType == "" { + return errors.New("auth_token provided without a token type") + } + if !strings.EqualFold(cfg.TokenType, TokenTypeBearer) && !strings.EqualFold(cfg.TokenType, TokenTypeSplunk) { + return fmt.Errorf("auth_token provided without a correct token type, valid token types are %v", []string{TokenTypeBearer, TokenTypeSplunk}) + } + if cfg.Username != "" || cfg.Password != "" { + return errors.New("auth_token and username/password were both provided, only one can be provided to authenticate with Splunk") + } + } + if len(cfg.Searches) == 0 { return errors.New("at least one search must be provided") } diff --git a/receiver/splunksearchapireceiver/config_test.go b/receiver/splunksearchapireceiver/config_test.go index 90440234c..5816d952d 100644 --- a/receiver/splunksearchapireceiver/config_test.go +++ b/receiver/splunksearchapireceiver/config_test.go @@ -27,6 +27,8 @@ func TestValidate(t *testing.T) { endpoint string username string password string + authToken string + tokenType string storage string searches []Search errExpected bool @@ -48,7 +50,7 @@ func TestValidate(t *testing.T) { errText: "missing Splunk server endpoint", }, { - desc: "Missing username", + desc: "Missing username, no auth token", endpoint: "http://localhost:8089", password: "password", storage: "file_storage", @@ -63,7 +65,7 @@ func TestValidate(t *testing.T) { errText: "missing Splunk username", }, { - desc: "Missing password", + desc: "Missing password, no auth token", endpoint: "http://localhost:8089", username: "user", storage: "file_storage", @@ -77,6 +79,55 @@ func TestValidate(t *testing.T) { errExpected: true, errText: "missing Splunk password", }, + { + desc: "Auth token without token type", + endpoint: "http://localhost:8089", + authToken: "token", + storage: "file_storage", + searches: []Search{ + { + Query: "search index=_internal", + EarliestTime: "2024-10-30T04:00:00.000Z", + LatestTime: "2024-10-30T14:00:00.000Z", + }, + }, + errExpected: true, + errText: "auth_token provided without a token type", + }, + { + desc: "Auth token with invalid token type", + endpoint: "http://localhost:8089", + authToken: "token", + tokenType: "invalid", + storage: "file_storage", + searches: []Search{ + { + Query: "search index=_internal", + EarliestTime: "2024-10-30T04:00:00.000Z", + LatestTime: "2024-10-30T14:00:00.000Z", + }, + }, + errExpected: true, + errText: "auth_token provided without a correct token type, valid token types are [Bearer Splunk]", + }, + { + desc: "Auth token and username/password provided", + endpoint: "http://localhost:8089", + username: "user", + password: "password", + authToken: "token", + tokenType: "Bearer", + storage: "file_storage", + searches: []Search{ + { + Query: "search index=_internal", + EarliestTime: "2024-10-30T04:00:00.000Z", + LatestTime: "2024-10-30T14:00:00.000Z", + }, + }, + errExpected: true, + errText: "auth_token and username/password were both provided, only one can be provided to authenticate with Splunk", + }, { desc: "Missing storage", endpoint: "http://localhost:8089", @@ -209,6 +260,21 @@ func TestValidate(t *testing.T) { }, errExpected: false, }, + { + desc: "Valid config with auth token", + endpoint: "http://localhost:8089", + authToken: "token", + tokenType: "Bearer", + storage: "file_storage", + searches: []Search{ + { + Query: "search index=_internal", + EarliestTime: "2024-10-30T04:00:00.000Z", + LatestTime: "2024-10-30T14:00:00.000Z", + }, + }, + errExpected: false, + }, { desc: "Valid config with multiple searches", endpoint: "http://localhost:8089", @@ -268,6 +334,8 @@ func TestValidate(t *testing.T) { cfg.Endpoint = tc.endpoint cfg.Username = tc.username cfg.Password = tc.password + cfg.AuthToken = tc.authToken + cfg.TokenType = tc.tokenType cfg.Searches = tc.searches if tc.storage != "" { cfg.StorageID = &component.ID{} diff --git a/receiver/splunksearchapireceiver/receiver.go b/receiver/splunksearchapireceiver/receiver.go index cc58ad8c5..949819086 100644 --- a/receiver/splunksearchapireceiver/receiver.go +++ b/receiver/splunksearchapireceiver/receiver.go @@ -119,25 +119,23 @@ func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { searchID, err := ssapir.createSplunkSearch(search) if err != nil { ssapir.logger.Error("error creating search", zap.Error(err)) + return err } // wait for search to complete + if err = ssapir.pollSearchCompletion(ctx, searchID); err != nil { + ssapir.logger.Error("error polling for search completion", zap.Error(err)) + return err + } + for { - done, err := ssapir.isSearchCompleted(searchID) + ssapir.logger.Info("fetching search results") + results, err := ssapir.getSplunkSearchResults(searchID, offset, search.EventBatchSize) if err != nil { - ssapir.logger.Error("error checking search status", zap.Error(err)) - } - if done { - break + ssapir.logger.Error("error fetching search results", zap.Error(err)) + return err } - time.Sleep(2 * time.Second) - } - - // fetch search results - results, err := ssapir.getSplunkSearchResults(ssapir.config, searchID) - if err != nil { - ssapir.logger.Error("error fetching search results", zap.Error(err)) - } + ssapir.logger.Info("search results fetched", zap.Int("num_results", len(results.Results))) // parse time strings to time.Time earliestTime, err := time.Parse(time.RFC3339, search.EarliestTime) @@ -212,9 +210,9 @@ func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { } // if the number of results is less than the results per request, we have queried all pages for the search if len(results.Results) < search.EventBatchSize { + ssapir.logger.Debug("results less than batch size, stopping search result export") break } - } ssapir.logger.Info("search results exported", zap.String("query", search.Query), zap.Int("total results", exportedEvents)) } From ccacaa040e203296905f62644ef68a5268e85dcd Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Thu, 21 Nov 2024 16:15:46 -0500 Subject: [PATCH 40/55] lint --- receiver/splunksearchapireceiver/config.go | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/receiver/splunksearchapireceiver/config.go b/receiver/splunksearchapireceiver/config.go index 51c1a2a4d..071af30f8 100644 --- a/receiver/splunksearchapireceiver/config.go +++ b/receiver/splunksearchapireceiver/config.go @@ -26,8 +26,10 @@ import ( var ( errNonStandaloneSearchQuery = errors.New("only standalone search commands can be used for scraping data") - TokenTypeBearer = "Bearer" - TokenTypeSplunk = "Splunk" + // TokenTypeBearer is the token type for Bearer tokens + TokenTypeBearer = "Bearer" + // TokenTypeSplunk is the token type for Splunk tokens + TokenTypeSplunk = "Splunk" ) // Config struct to represent the configuration for the Splunk Search API receiver From 0c12f425aedae732e9c2d36f948d6e7bff54791f Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Tue, 26 Nov 2024 10:18:39 -0500 Subject: [PATCH 41/55] fix struct name --- receiver/splunksearchapireceiver/client.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/receiver/splunksearchapireceiver/client.go b/receiver/splunksearchapireceiver/client.go index 1e2519f5c..4139c0f38 100644 --- a/receiver/splunksearchapireceiver/client.go +++ b/receiver/splunksearchapireceiver/client.go @@ -114,7 +114,7 @@ func (c defaultSplunkSearchAPIClient) GetJobStatus(sid string) (SearchJobStatusR err = c.SetSplunkRequestAuth(req) if err != nil { - return JobStatusResponse{}, err + return SearchJobStatusResponse{}, err } resp, err := c.client.Do(req) From a904a886703dc668f4ff32c1b95811a626683e3a Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Mon, 2 Dec 2024 11:19:32 -0500 Subject: [PATCH 42/55] rm prints, fix error messages --- receiver/splunksearchapireceiver/config.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/receiver/splunksearchapireceiver/config.go b/receiver/splunksearchapireceiver/config.go index 071af30f8..9c15c31ca 100644 --- a/receiver/splunksearchapireceiver/config.go +++ b/receiver/splunksearchapireceiver/config.go @@ -60,11 +60,11 @@ func (cfg *Config) Validate() error { } if cfg.Username == "" && cfg.AuthToken == "" { - return errors.New("missing Splunk username") + return errors.New("missing Splunk username or auth token") } if cfg.Password == "" && cfg.AuthToken == "" { - return errors.New("missing Splunk password") + return errors.New("missing Splunk password or auth token") } if cfg.AuthToken != "" { From 981eeb431bbd79c1f661f6c951a30f191898814b Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Mon, 2 Dec 2024 11:40:02 -0500 Subject: [PATCH 43/55] fix tests --- receiver/splunksearchapireceiver/config_test.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/receiver/splunksearchapireceiver/config_test.go b/receiver/splunksearchapireceiver/config_test.go index 5816d952d..789825451 100644 --- a/receiver/splunksearchapireceiver/config_test.go +++ b/receiver/splunksearchapireceiver/config_test.go @@ -62,7 +62,7 @@ func TestValidate(t *testing.T) { }, }, errExpected: true, - errText: "missing Splunk username", + errText: "missing Splunk username or auth token", }, { desc: "Missing password, no auth token", @@ -77,7 +77,7 @@ func TestValidate(t *testing.T) { }, }, errExpected: true, - errText: "missing Splunk password", + errText: "missing Splunk password or auth token", }, { desc: "Auth token without token type", From 257e5a18c57fdd72faa9650e3f0793804aaf73e5 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Wed, 4 Dec 2024 10:01:49 -0500 Subject: [PATCH 44/55] default batch size --- receiver/splunksearchapireceiver/receiver.go | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/receiver/splunksearchapireceiver/receiver.go b/receiver/splunksearchapireceiver/receiver.go index 949819086..651412095 100644 --- a/receiver/splunksearchapireceiver/receiver.go +++ b/receiver/splunksearchapireceiver/receiver.go @@ -114,6 +114,11 @@ func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { for _, search := range ssapir.config.Searches { // set current search query ssapir.checkpointRecord.Search = search.Query + + // set default event batch size (matches Splunk API default) + if search.EventBatchSize == 0 { + search.EventBatchSize = 100 + } // create search in Splunk searchID, err := ssapir.createSplunkSearch(search) @@ -216,6 +221,7 @@ func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { } ssapir.logger.Info("search results exported", zap.String("query", search.Query), zap.Int("total results", exportedEvents)) } + ssapir.logger.Debug("all search results exported", zap.Int("total results", exportedEvents)) return nil } From 2be2b6d4a080f4733c0232dba2396749042580b1 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Thu, 5 Dec 2024 10:20:39 -0500 Subject: [PATCH 45/55] log end of export --- receiver/splunksearchapireceiver/receiver.go | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/receiver/splunksearchapireceiver/receiver.go b/receiver/splunksearchapireceiver/receiver.go index 651412095..e4c619caf 100644 --- a/receiver/splunksearchapireceiver/receiver.go +++ b/receiver/splunksearchapireceiver/receiver.go @@ -221,7 +221,7 @@ func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { } ssapir.logger.Info("search results exported", zap.String("query", search.Query), zap.Int("total results", exportedEvents)) } - ssapir.logger.Debug("all search results exported", zap.Int("total results", exportedEvents)) + ssapir.logger.Info("all search results exported") return nil } @@ -280,6 +280,7 @@ func (ssapir *splunksearchapireceiver) getSplunkSearchResults(sid string, offset } func (ssapir *splunksearchapireceiver) initCheckpoint(ctx context.Context) error { + ssapir.logger.Info("initializing checkpoint") // if a checkpoint already exists, use the offset from the checkpoint if err := ssapir.loadCheckpoint(ctx); err != nil { return fmt.Errorf("failed to load checkpoint: %w", err) @@ -295,7 +296,7 @@ func (ssapir *splunksearchapireceiver) initCheckpoint(ctx context.Context) error return nil } } - ssapir.logger.Debug("while initializing checkpoint, no matching search query found, starting from the beginning") + ssapir.logger.Info("while initializing checkpoint, no matching search query found, starting from the beginning") } return nil } From ad3255e6b174d342dd32de808be7e374dfbe61dd Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Fri, 6 Dec 2024 11:18:28 -0500 Subject: [PATCH 46/55] readme --- receiver/splunksearchapireceiver/README.md | 61 +++++++++++++++++++- receiver/splunksearchapireceiver/receiver.go | 2 +- 2 files changed, 61 insertions(+), 2 deletions(-) diff --git a/receiver/splunksearchapireceiver/README.md b/receiver/splunksearchapireceiver/README.md index becbe410b..1cd3251c9 100644 --- a/receiver/splunksearchapireceiver/README.md +++ b/receiver/splunksearchapireceiver/README.md @@ -1 +1,60 @@ -# Splunk Search API Receiver \ No newline at end of file +# Splunk Search API Receiver +This receiver collects Splunk events using the [Splunk Search API](https://docs.splunk.com/Documentation/Splunk/9.3.1/RESTREF/RESTsearch). + +## Supported Pipelines +- Logs + +## Prerequisites +- Splunk admin credentials +- Configured storage extension + +## Configuration +| Field | Type | Default | Description | +|---------------------|----------|-------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| endpoint | string | `required` `(no default)` | The endpoint of the splunk instance to collect from. | +| splunk_username | string | `(no default)` | Specifies the username used to authenticate to Splunk using basic auth. | +| splunk_password | string | `(no default)` | Specifies the password used to authenticate to Splunk using basic auth. | +| auth_token | string | `(no default)` | Specifies the token used to authenticate to Splunk using token auth. | +| token_type | string | `(no default)` | Specifies the type of token used to authenticate to Splunk using token auth. Accepted values are "Bearer" or "Splunk". | +| job_poll_interval | duration | `5s` | The receiver uses an API call to determine if a search has completed. Specifies how long to wait between polling for search job completion. | +| searches.query | string | `required (no default)` | The Splunk search to run to retrieve the desired events. Queries must start with `search` and should not contain additional commands, nor any time fields (e.g. `earliesttime`) | +| searches.earliest_time | string | `required (no default)` | The earliest timestamp to collect logs. Only logs that occurred at or after this timestamp will be collected. Must be in ISO 8601 or RFC3339 format. | +| searches.latest_time | string | `required (no default)` | The latest timestamp to collect logs. Only logs that occurred at or before this timestamp will be collected. Must be in ISO 8601 or RFC3339 format. | +| searches.event_batch_size | int | `100` | The amount of events to query from Splunk for a single request. | +| storage | component | `required (no default)` | The component ID of a storage extension which can be used when polling for `logs`. The storage extension prevents duplication of data after an exporter error by remembering which events were previously exported. | + +### Example Configuration +```yaml +receivers: + splunksearchapi: + endpoint: "https://splunk-c4-0.example.localnet:8089" + tls: + insecure_skip_verify: true + splunk_username: "user" + splunk_password: "pass" + job_poll_interval: 5s + searches: + - query: 'search index=my_index' + earliest_time: "2024-11-01T01:00:00.000-05:00" + latest_time: "2024-11-30T23:59:59.999-05:00" + event_batch_size: 500 + storage: file_storage +exporters: + googlecloud: + project: "my-gcp-project" + log: + default_log_name: "splunk-events" + sending_queue: + enabled: false + +extensions: + file_storage: + directory: "./local/storage" + +service: + extensions: [file_storage] + pipelines: + logs: + receivers: [splunksearchapi] + exporters: [googlecloud] +``` \ No newline at end of file diff --git a/receiver/splunksearchapireceiver/receiver.go b/receiver/splunksearchapireceiver/receiver.go index e4c619caf..ae92ce5bf 100644 --- a/receiver/splunksearchapireceiver/receiver.go +++ b/receiver/splunksearchapireceiver/receiver.go @@ -280,7 +280,7 @@ func (ssapir *splunksearchapireceiver) getSplunkSearchResults(sid string, offset } func (ssapir *splunksearchapireceiver) initCheckpoint(ctx context.Context) error { - ssapir.logger.Info("initializing checkpoint") + ssapir.logger.Debug("initializing checkpoint") // if a checkpoint already exists, use the offset from the checkpoint if err := ssapir.loadCheckpoint(ctx); err != nil { return fmt.Errorf("failed to load checkpoint: %w", err) From bcc1600720c1323f4c6c3ad76dd694c40f146ffd Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Fri, 6 Dec 2024 14:19:20 -0500 Subject: [PATCH 47/55] how-to --- receiver/splunksearchapireceiver/README.md | 36 ++++++++++++---------- 1 file changed, 20 insertions(+), 16 deletions(-) diff --git a/receiver/splunksearchapireceiver/README.md b/receiver/splunksearchapireceiver/README.md index 1cd3251c9..b212f1dad 100644 --- a/receiver/splunksearchapireceiver/README.md +++ b/receiver/splunksearchapireceiver/README.md @@ -14,8 +14,8 @@ This receiver collects Splunk events using the [Splunk Search API](https://docs. | endpoint | string | `required` `(no default)` | The endpoint of the splunk instance to collect from. | | splunk_username | string | `(no default)` | Specifies the username used to authenticate to Splunk using basic auth. | | splunk_password | string | `(no default)` | Specifies the password used to authenticate to Splunk using basic auth. | -| auth_token | string | `(no default)` | Specifies the token used to authenticate to Splunk using token auth. | -| token_type | string | `(no default)` | Specifies the type of token used to authenticate to Splunk using token auth. Accepted values are "Bearer" or "Splunk". | +| auth_token | string | `(no default)` | Specifies the token used to authenticate to Splunk using token auth. Mutually exclusive with basic auth using `splunk_username` and `splunk_password`. | +| token_type | string | `(no default)` | Specifies the type of token used to authenticate to Splunk using `auth_token`. Accepted values are "Bearer" or "Splunk". | | job_poll_interval | duration | `5s` | The receiver uses an API call to determine if a search has completed. Specifies how long to wait between polling for search job completion. | | searches.query | string | `required (no default)` | The Splunk search to run to retrieve the desired events. Queries must start with `search` and should not contain additional commands, nor any time fields (e.g. `earliesttime`) | | searches.earliest_time | string | `required (no default)` | The earliest timestamp to collect logs. Only logs that occurred at or after this timestamp will be collected. Must be in ISO 8601 or RFC3339 format. | @@ -39,22 +39,26 @@ receivers: latest_time: "2024-11-30T23:59:59.999-05:00" event_batch_size: 500 storage: file_storage -exporters: - googlecloud: - project: "my-gcp-project" - log: - default_log_name: "splunk-events" - sending_queue: - enabled: false extensions: file_storage: directory: "./local/storage" +``` + +## How To + +### Migrate historical events to Google Cloud Logging +1. Identify the Splunk index to migrate events from. Create a Splunk search to capture the events from that index. This will be the `searches.query` you pass to the receiver. + - Example: `search index=my_index1` + - Note: queries must begin with the explicit `search` command, and must not include additional commands, nor any time fields (e.g. `earliesttime`) +2. Determine the timeframe you want to migrate events from, and set the `searches.earliest_time` and `searches.latest_time` config fields accordingly. + - To migrate events from December 2024, EST (UTC-5): + - `earliest_time: "2024-12-01T00:00:00.000-05:00"` + - `latest_time: "2024-12-31T23:59:59.999-05:00"` + - Note: By default, GCL will not accept logs with a timestamp older than 30 days. Contact Google to modify this rule. +3. Repeat steps 1 & 2 for each index you wish to collect from +3. Configure a storage extension to store checkpointing data for the receiver. +4. Configure the rest of the receiver fields according to your Splunk environment. +5. Add a `googlecloud` exporter to your config. Configure the exporter to send to a GCP project where your service account has Logging Admin role. To check the permissions of service accounts in your project, go to the [IAM page](https://console.cloud.google.com/iam-admin/iam). +6. Disable the `sending_queue` field on the GCP exporter. The sending queue introduces an asynchronous step to the pipeline, which will jeopardize the receiver's ability to checkpoint correctly and recover from errors. For this same reason, avoid using any asynchronous processors (e.g., batch processor). -service: - extensions: [file_storage] - pipelines: - logs: - receivers: [splunksearchapi] - exporters: [googlecloud] -``` \ No newline at end of file From 70dcbde3470c3a2dc497fd4bdf92f21ed1775d3e Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Fri, 6 Dec 2024 14:29:45 -0500 Subject: [PATCH 48/55] how-to example config --- receiver/splunksearchapireceiver/README.md | 36 ++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/receiver/splunksearchapireceiver/README.md b/receiver/splunksearchapireceiver/README.md index b212f1dad..b7f9501e9 100644 --- a/receiver/splunksearchapireceiver/README.md +++ b/receiver/splunksearchapireceiver/README.md @@ -62,3 +62,39 @@ extensions: 5. Add a `googlecloud` exporter to your config. Configure the exporter to send to a GCP project where your service account has Logging Admin role. To check the permissions of service accounts in your project, go to the [IAM page](https://console.cloud.google.com/iam-admin/iam). 6. Disable the `sending_queue` field on the GCP exporter. The sending queue introduces an asynchronous step to the pipeline, which will jeopardize the receiver's ability to checkpoint correctly and recover from errors. For this same reason, avoid using any asynchronous processors (e.g., batch processor). +After following these steps, your configuration should look something like this: +```yaml +receivers: + splunksearchapi: + endpoint: "https://splunk-c4-0.example.localnet:8089" + tls: + insecure_skip_verify: true + splunk_username: "user" + splunk_password: "pass" + job_poll_interval: 5s + searches: + - query: 'search index=my_index' + earliest_time: "2024-11-01T01:00:00.000-05:00" + latest_time: "2024-11-30T23:59:59.999-05:00" + event_batch_size: 500 + storage: file_storage +exporters: + googlecloud: + project: "my-gcp-project" + log: + default_log_name: "splunk-events" + sending_queue: + enabled: false + +extensions: + file_storage: + directory: "./local/storage" + +service: + extensions: [file_storage] + pipelines: + logs: + receivers: [splunksearchapi] + exporters: [googlecloud] +``` +You are now ready to migrate events from Splunk to Google Cloud Logging. \ No newline at end of file From cb2e7f7a4b996da6bf4ff686de19652275ce0abd Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Fri, 6 Dec 2024 14:31:43 -0500 Subject: [PATCH 49/55] change how-to conf values --- receiver/splunksearchapireceiver/README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/receiver/splunksearchapireceiver/README.md b/receiver/splunksearchapireceiver/README.md index b7f9501e9..91afecf7c 100644 --- a/receiver/splunksearchapireceiver/README.md +++ b/receiver/splunksearchapireceiver/README.md @@ -49,7 +49,7 @@ extensions: ### Migrate historical events to Google Cloud Logging 1. Identify the Splunk index to migrate events from. Create a Splunk search to capture the events from that index. This will be the `searches.query` you pass to the receiver. - - Example: `search index=my_index1` + - Example: `search index=my_index` - Note: queries must begin with the explicit `search` command, and must not include additional commands, nor any time fields (e.g. `earliesttime`) 2. Determine the timeframe you want to migrate events from, and set the `searches.earliest_time` and `searches.latest_time` config fields accordingly. - To migrate events from December 2024, EST (UTC-5): @@ -74,8 +74,8 @@ receivers: job_poll_interval: 5s searches: - query: 'search index=my_index' - earliest_time: "2024-11-01T01:00:00.000-05:00" - latest_time: "2024-11-30T23:59:59.999-05:00" + earliest_time: "2024-12-01T00:00:00.000-05:00" + latest_time: "2024-12-31T23:59:59.999-05:00" event_batch_size: 500 storage: file_storage exporters: From 699510b4307c2ec9fac0d90f14c2f2bcb74d1acc Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Mon, 9 Dec 2024 13:40:25 -0500 Subject: [PATCH 50/55] change test batch size --- receiver/splunksearchapireceiver/integration_test.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/receiver/splunksearchapireceiver/integration_test.go b/receiver/splunksearchapireceiver/integration_test.go index b7a440133..8611188e3 100644 --- a/receiver/splunksearchapireceiver/integration_test.go +++ b/receiver/splunksearchapireceiver/integration_test.go @@ -109,7 +109,7 @@ func TestExporterFailure(t *testing.T) { Query: "search index=otel", EarliestTime: "2024-11-14T00:00:00.000Z", LatestTime: "2024-11-14T23:59:59.000Z", - EventBatchSize: 5, + EventBatchSize: 10, }, } server := newMockSplunkServer() @@ -165,7 +165,7 @@ func newMockSplunkServer() *httptest.Server { `)) } - if req.URL.String() == "/services/search/v2/jobs/123456/results?output_mode=json&offset=0&count=5" { + if req.URL.String() == "/services/search/v2/jobs/123456/results?output_mode=json&offset=0&count=10" { rw.Header().Set("Content-Type", "application/json") rw.WriteHeader(200) rw.Write(splunkEventsResultsP1) From 1f81928feaa44f470caa71ecfdeac493d9dd1227 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Mon, 9 Dec 2024 15:55:56 -0500 Subject: [PATCH 51/55] fix test case --- .../integration_test.go | 39 ++++++++++++------- 1 file changed, 24 insertions(+), 15 deletions(-) diff --git a/receiver/splunksearchapireceiver/integration_test.go b/receiver/splunksearchapireceiver/integration_test.go index 8611188e3..868c60b75 100644 --- a/receiver/splunksearchapireceiver/integration_test.go +++ b/receiver/splunksearchapireceiver/integration_test.go @@ -58,7 +58,7 @@ func TestSplunkResultsPaginationFailure(t *testing.T) { ssapir.initCheckpoint(context.Background()) ssapir.runQueries(context.Background()) - require.Equal(t, 5, ssapir.checkpointRecord.Offset) + require.Equal(t, 3, ssapir.checkpointRecord.Offset) require.Equal(t, 1, callCount) } @@ -72,8 +72,7 @@ func newMockSplunkServerPagination(callCount *int) *httptest.Server { 123456 `)) - } - if req.URL.String() == "/services/search/v2/jobs/123456" { + } else if req.URL.String() == "/services/search/v2/jobs/123456" { rw.Header().Set("Content-Type", "application/xml") rw.WriteHeader(200) rw.Write([]byte(` @@ -85,14 +84,12 @@ func newMockSplunkServerPagination(callCount *int) *httptest.Server { `)) - } - if req.URL.String() == "/services/search/v2/jobs/123456/results?output_mode=json&offset=0&count=5" && req.URL.Query().Get("offset") == "0" { + } else if req.URL.String() == "/services/search/v2/jobs/123456/results?output_mode=json&offset=0&count=5" && req.URL.Query().Get("offset") == "0" { rw.Header().Set("Content-Type", "application/json") rw.WriteHeader(200) rw.Write(splunkEventsResultsP1) *callCount++ - } - if req.URL.String() == "/services/search/v2/jobs/123456/results?output_mode=json&offset=5&count=5" && req.URL.Query().Get("offset") == "5" { + } else if req.URL.String() == "/services/search/v2/jobs/123456/results?output_mode=json&offset=5&count=5" && req.URL.Query().Get("offset") == "5" { rw.Header().Set("Content-Type", "application/json") rw.WriteHeader(400) rw.Write([]byte("error, bad request")) @@ -109,7 +106,7 @@ func TestExporterFailure(t *testing.T) { Query: "search index=otel", EarliestTime: "2024-11-14T00:00:00.000Z", LatestTime: "2024-11-14T23:59:59.000Z", - EventBatchSize: 10, + EventBatchSize: 3, }, } server := newMockSplunkServer() @@ -130,14 +127,18 @@ func TestExporterFailure(t *testing.T) { require.Equal(t, 5, ssapir.checkpointRecord.Offset) require.Equal(t, "search index=otel", ssapir.checkpointRecord.Search) - // simulate 2nd batch of data failing + // simulate data failing // the checkpoint should not be updated, and an error should be returned + ssapir.checkpointRecord.Offset = 0 + offset = 0 logsConsumerErr := &mockLogsConsumerExporterErr{} logsConsumerErr.On("ConsumeLogs", mock.Anything, mock.Anything).Return(errors.New("error exporting logs")) + ssapir.logsConsumer = logsConsumerErr + ssapir.initCheckpoint(context.Background()) err = ssapir.runQueries(context.Background()) require.EqualError(t, err, "error consuming logs: error exporting logs") - require.Equal(t, 5, ssapir.checkpointRecord.Offset) + require.Equal(t, 0, ssapir.checkpointRecord.Offset) require.Equal(t, "search index=otel", ssapir.checkpointRecord.Search) } @@ -151,8 +152,7 @@ func newMockSplunkServer() *httptest.Server { 123456 `)) - } - if req.URL.String() == "/services/search/v2/jobs/123456" { + } else if req.URL.String() == "/services/search/v2/jobs/123456" { rw.Header().Set("Content-Type", "application/xml") rw.WriteHeader(200) rw.Write([]byte(` @@ -164,11 +164,14 @@ func newMockSplunkServer() *httptest.Server { `)) - } - if req.URL.String() == "/services/search/v2/jobs/123456/results?output_mode=json&offset=0&count=10" { + } else if req.URL.String() == "/services/search/v2/jobs/123456/results?output_mode=json&offset=0&count=3" { rw.Header().Set("Content-Type", "application/json") rw.WriteHeader(200) rw.Write(splunkEventsResultsP1) + } else if req.URL.String() == "/services/search/v2/jobs/123456/results?output_mode=json&offset=3&count=3" { + rw.Header().Set("Content-Type", "application/json") + rw.WriteHeader(200) + rw.Write(splunkEventsResultsP2) } })) } @@ -187,7 +190,13 @@ var splunkEventsResultsP1 = []byte(`{ { "_raw": "lorem ipsum", "_time": "2024-11-14T13:02:29.000-05:00" - }, + } + ] +}`) + +var splunkEventsResultsP2 = []byte(`{ + "init_offset": 3, + "results": [ { "_raw": "dolor sit amet", "_time": "2024-11-14T13:02:28.000-05:00" From dac6a6bb1e413e25202606d04230c89a305c78d3 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Mon, 9 Dec 2024 16:10:53 -0500 Subject: [PATCH 52/55] fix client test --- receiver/splunksearchapireceiver/client_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/receiver/splunksearchapireceiver/client_test.go b/receiver/splunksearchapireceiver/client_test.go index bcf64087a..0dfe2d8ed 100644 --- a/receiver/splunksearchapireceiver/client_test.go +++ b/receiver/splunksearchapireceiver/client_test.go @@ -84,7 +84,7 @@ func TestGetSearchResults(t *testing.T) { resp, err := testClient.GetSearchResults("123456", 0, 5) require.NoError(t, err) - require.Equal(t, 5, len(resp.Results)) + require.Equal(t, 3, len(resp.Results)) require.Equal(t, "Hello, world!", resp.Results[0].Raw) // returns an error if the response status isn't 200 From 1f91ded7a00f818e7229f757a5c812b1cc77ea30 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Tue, 10 Dec 2024 11:03:22 -0500 Subject: [PATCH 53/55] fix rebase errors --- receiver/splunksearchapireceiver/client.go | 20 ++-- receiver/splunksearchapireceiver/go.mod | 16 +++- receiver/splunksearchapireceiver/go.sum | 91 +++++++++++++++---- receiver/splunksearchapireceiver/receiver.go | 12 +-- .../splunksearchapireceiver/receiver_test.go | 4 +- 5 files changed, 101 insertions(+), 42 deletions(-) diff --git a/receiver/splunksearchapireceiver/client.go b/receiver/splunksearchapireceiver/client.go index 4139c0f38..7b612e4c9 100644 --- a/receiver/splunksearchapireceiver/client.go +++ b/receiver/splunksearchapireceiver/client.go @@ -33,7 +33,7 @@ import ( type splunkSearchAPIClient interface { CreateSearchJob(search string) (CreateJobResponse, error) GetJobStatus(searchID string) (SearchJobStatusResponse, error) - GetSearchResults(searchID string, offset int, batchSize int) (SearchResultsResponse, error) + GetSearchResults(searchID string, offset int, batchSize int) (SearchResults, error) } type defaultSplunkSearchAPIClient struct { @@ -140,37 +140,37 @@ func (c defaultSplunkSearchAPIClient) GetJobStatus(sid string) (SearchJobStatusR return jobStatusResponse, nil } -func (c defaultSplunkSearchAPIClient) GetSearchResults(sid string) (SearchResultsResponse, error) { - endpoint := fmt.Sprintf("%s/services/search/v2/jobs/%s/results?output_mode=json", c.endpoint, sid) +func (c defaultSplunkSearchAPIClient) GetSearchResults(sid string, offset int, batchSize int) (SearchResults, error) { + endpoint := fmt.Sprintf("%s/services/search/v2/jobs/%s/results?output_mode=json&offset=%d&count=%d", c.endpoint, sid, offset, batchSize) req, err := http.NewRequest("GET", endpoint, nil) if err != nil { - return SearchResultsResponse{}, err + return SearchResults{}, err } err = c.SetSplunkRequestAuth(req) if err != nil { - return SearchResultsResponse{}, err + return SearchResults{}, err } resp, err := c.client.Do(req) if err != nil { - return SearchResultsResponse{}, err + return SearchResults{}, err } defer resp.Body.Close() if resp.StatusCode != http.StatusOK { - return SearchResultsResponse{}, fmt.Errorf("failed to get search job results: %d", resp.StatusCode) + return SearchResults{}, fmt.Errorf("failed to get search job results: %d", resp.StatusCode) } - var searchResults SearchResultsResponse + var searchResults SearchResults body, err := io.ReadAll(resp.Body) if err != nil { - return SearchResultsResponse{}, fmt.Errorf("failed to read search job results response: %v", err) + return SearchResults{}, fmt.Errorf("failed to read search job results response: %v", err) } // fmt.Println("Body: ", string(body)) err = json.Unmarshal(body, &searchResults) if err != nil { - return SearchResultsResponse{}, fmt.Errorf("failed to unmarshal search job results response: %v", err) + return SearchResults{}, fmt.Errorf("failed to unmarshal search job results response: %v", err) } return searchResults, nil diff --git a/receiver/splunksearchapireceiver/go.mod b/receiver/splunksearchapireceiver/go.mod index f03f865ea..181b5657d 100644 --- a/receiver/splunksearchapireceiver/go.mod +++ b/receiver/splunksearchapireceiver/go.mod @@ -7,7 +7,8 @@ require ( github.com/stretchr/testify v1.9.0 go.opentelemetry.io/collector/component v0.113.0 go.opentelemetry.io/collector/consumer v0.113.0 - go.opentelemetry.io/collector/consumer/consumertest v0.112.0 + go.opentelemetry.io/collector/consumer/consumertest v0.113.0 + go.opentelemetry.io/collector/extension/experimental/storage v0.113.0 go.opentelemetry.io/collector/pdata v1.19.0 go.opentelemetry.io/collector/receiver v0.113.0 go.opentelemetry.io/collector/receiver/receivertest v0.113.0 @@ -28,6 +29,7 @@ require ( github.com/goccy/go-json v0.10.3 // indirect github.com/golang/snappy v0.0.4 // indirect github.com/google/uuid v1.6.0 // indirect + github.com/hashicorp/go-version v1.7.0 // indirect github.com/klauspost/compress v1.17.11 // indirect github.com/knadh/koanf/maps v0.1.1 // indirect github.com/knadh/koanf/providers/confmap v0.1.0 // indirect @@ -49,15 +51,19 @@ require ( go.opentelemetry.io/collector/config/configopaque v1.19.0 // indirect go.opentelemetry.io/collector/config/configtls v1.19.0 // indirect go.opentelemetry.io/collector/config/internal v0.113.0 // indirect - go.opentelemetry.io/collector/consumer/consumererror v0.112.0 // indirect - go.opentelemetry.io/collector/consumer/consumerprofiles v0.112.0 // indirect + go.opentelemetry.io/collector/confmap v1.19.0 // indirect + go.opentelemetry.io/collector/consumer/consumererror v0.113.0 // indirect + go.opentelemetry.io/collector/consumer/consumerprofiles v0.113.0 // indirect go.opentelemetry.io/collector/extension v0.113.0 // indirect go.opentelemetry.io/collector/extension/auth v0.113.0 // indirect - go.opentelemetry.io/collector/pdata/pprofile v0.112.0 // indirect - go.opentelemetry.io/collector/receiver/receiverprofiles v0.112.0 // indirect + go.opentelemetry.io/collector/featuregate v1.19.0 // indirect + go.opentelemetry.io/collector/pdata/pprofile v0.113.0 // indirect + go.opentelemetry.io/collector/receiver/receiverprofiles v0.113.0 // indirect + go.opentelemetry.io/collector/semconv v0.113.0 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.56.0 // indirect go.opentelemetry.io/otel/sdk v1.31.0 // indirect go.opentelemetry.io/otel/sdk/metric v1.31.0 // indirect + gonum.org/v1/gonum v0.15.1 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/receiver/splunksearchapireceiver/go.sum b/receiver/splunksearchapireceiver/go.sum index 5680b4a21..36d4d0b22 100644 --- a/receiver/splunksearchapireceiver/go.sum +++ b/receiver/splunksearchapireceiver/go.sum @@ -1,6 +1,14 @@ +github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= +github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/elastic/lunes v0.1.0 h1:amRtLPjwkWtzDF/RKzcEPMvSsSseLDLW+bnhfNSLRe4= +github.com/elastic/lunes v0.1.0/go.mod h1:xGphYIt3XdZRtyWosHQTErsQTd4OP1p9wsbVoHelrd4= +github.com/expr-lang/expr v1.16.9 h1:WUAzmR0JNI9JCiF0/ewwHB1gmcGw5wW7nWt8gc6PpCI= +github.com/expr-lang/expr v1.16.9/go.mod h1:8/vRC7+7HBzESEqt5kKpYXxrxkr31SaO8r40VO/1IT4= github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/fsnotify/fsnotify v1.8.0 h1:dAwr6QBTBZIkG8roQaJjGof0pp0EeF+tNV7YBP3F/8M= @@ -10,30 +18,61 @@ github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-viper/mapstructure/v2 v2.2.1 h1:ZAaOCxANMuZx5RCeg0mBdEZk7DZasvvZIxtHqx8aGss= +github.com/go-viper/mapstructure/v2 v2.2.1/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= +github.com/goccy/go-json v0.10.3 h1:KZ5WoDbxAIgm2HNbYckL0se1fHD6rz5j4ywS6ebzDqA= +github.com/goccy/go-json v0.10.3/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/hashicorp/go-version v1.7.0 h1:5tqGy27NaOTB8yJKUZELlFAS/LTKJkrmONwQKeRZfjY= +github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc= github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0= +github.com/knadh/koanf/maps v0.1.1 h1:G5TjmUh2D7G2YWf5SQQqSiHRJEjaicvU0KpypqB3NIs= +github.com/knadh/koanf/maps v0.1.1/go.mod h1:npD/QZY3V6ghQDdcQzl1W4ICNVTkohC8E73eI2xW4yI= +github.com/knadh/koanf/providers/confmap v0.1.0 h1:gOkxhHkemwG4LezxxN8DMOFopOPghxRVp7JbIvdvqzU= +github.com/knadh/koanf/providers/confmap v0.1.0/go.mod h1:2uLhxQzJnyHKfxG927awZC7+fyHFdQkd697K4MdLnIU= +github.com/knadh/koanf/v2 v2.1.2 h1:I2rtLRqXRy1p01m/utEtpZSSA6dcJbgGVuE27kW2PzQ= +github.com/knadh/koanf/v2 v2.1.2/go.mod h1:Gphfaen0q1Fc1HTgJgSTC4oRX9R2R5ErYMZJy8fLJBo= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/leodido/go-syslog/v4 v4.2.0 h1:A7vpbYxsO4e2E8udaurkLlxP5LDpDbmPMsGnuhb7jVk= +github.com/leodido/go-syslog/v4 v4.2.0/go.mod h1:eJ8rUfDN5OS6dOkCOBYlg2a+hbAg6pJa99QXXgMrd98= +github.com/leodido/ragel-machinery v0.0.0-20190525184631-5f46317e436b h1:11UHH39z1RhZ5dc4y4r/4koJo6IYFgTRMe/LlwRTEw0= +github.com/leodido/ragel-machinery v0.0.0-20190525184631-5f46317e436b/go.mod h1:WZxr2/6a/Ar9bMDc2rN/LJrE/hF6bXE4LPyDSIxwAfg= +github.com/magefile/mage v1.15.0 h1:BvGheCMAsG3bWUDbZ8AyXXpCNwU9u5CB6sM+HNb9HYg= +github.com/magefile/mage v1.15.0/go.mod h1:z5UZb/iS3GoOSn0JgWuiw7dxlurVYTu+/jHXqQg881A= +github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= +github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= +github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= +github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/open-telemetry/opentelemetry-collector-contrib/extension/storage v0.113.0 h1:ERdOiTmsDruI/s5oEgN45NsZW2roWXmO0u2aceR4GuM= +github.com/open-telemetry/opentelemetry-collector-contrib/extension/storage v0.113.0/go.mod h1:RkClsQhl8hdAg874Ot4kaG92s+6dW0Dvlt5HRxhsavc= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/common v0.113.0 h1:7A8MgFPYRQWq1RkFBktq01CW+eTYhiGML0IxQNv2uaM= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/common v0.113.0/go.mod h1:E1pc7mDXH+5s7RyXw291h8lz2dhzPzaDrAHqP1Lawvw= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.113.0 h1:EZ/ZNsovNcQq+wwAbTAWNY+6BHnv24NxvVoC6eYmtg8= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.113.0/go.mod h1:u21dEQ9yQ0JyLMSrKLWWzHG/lHSlteNfa/EQ7Vqcle4= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza v0.113.0 h1:G8w+wg4nnqBqe297fBWnjJ5Tg2OYDVEMsdWA9/3ozxQ= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza v0.113.0/go.mod h1:m3hDVsXPQzQfeji3+hn7NYJPHDRlHhQRNd5T7N5wZqc= github.com/pierrec/lz4/v4 v4.1.21 h1:yOVMLb6qSIDP67pl/5F7RepeKYu/VmTyEXvuMI5d9mQ= github.com/pierrec/lz4/v4 v4.1.21/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= @@ -43,9 +82,13 @@ github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99 github.com/rs/cors v1.11.1 h1:eU3gRzXLRK57F5rKMGMZURNdIG4EoAmX8k94r9wXWHA= github.com/rs/cors v1.11.1/go.mod h1:XyqrcTp5zjWr1wsJ8PIRZssZ8b/WMcMf71DJnit4EMU= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/valyala/fastjson v1.6.4 h1:uAUNq9Z6ymTgGhcm0UynUAB6tlbakBrz6CQFax3BXVQ= +github.com/valyala/fastjson v1.6.4/go.mod h1:CLCAqky6SMuOcxStkYQvblddUtoRxhYMGLrsQns1aXY= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= go.opentelemetry.io/collector/client v1.19.0 h1:TUal8WV1agTrZStgE7BJ8ZC0IHLGtrfgO9ogU9t1mv8= @@ -66,30 +109,40 @@ go.opentelemetry.io/collector/config/configtls v1.19.0 h1:GQ/cF1hgNqHVBq2oSSrOFX go.opentelemetry.io/collector/config/configtls v1.19.0/go.mod h1:1hyqnYB3JqEUlk1ME/s9HYz4oCRcxQCRxsJitFFT/cA= go.opentelemetry.io/collector/config/internal v0.113.0 h1:9RAzH8v7ItFT1npHpvP0SvUzBHcZDliCGRo9Spp6v7c= go.opentelemetry.io/collector/config/internal v0.113.0/go.mod h1:yC7E4h1Uj0SubxcFImh6OvBHFTjMh99+A5PuyIgDWqc= +go.opentelemetry.io/collector/confmap v1.19.0 h1:TQ0lZpAKqgsE0EKk+u4JA+uBbPYeFRmWP3GH43w40CY= +go.opentelemetry.io/collector/confmap v1.19.0/go.mod h1:GgNu1ElPGmLn9govqIfjaopvdspw4PJ9KeDtWC4E2Q4= go.opentelemetry.io/collector/consumer v0.113.0 h1:KJSiK5vSIY9dgPxwKfQ3gOgKtQsqc+7IB7mGhUAL5c8= go.opentelemetry.io/collector/consumer v0.113.0/go.mod h1:zHMlXYFaJlZoLCBR6UwWoyXZ/adcO1u2ydqUal3VmYU= -go.opentelemetry.io/collector/consumer/consumererror v0.112.0 h1:dCqWEi3Yws5V5oGhCSOwxCHK6tYya5UzfzXmSLMHZ8E= -go.opentelemetry.io/collector/consumer/consumererror v0.112.0/go.mod h1:X9RJt5caDnwxoG++GhQHvlmDi2TMWEr6S/XRhZTSmOI= -go.opentelemetry.io/collector/consumer/consumerprofiles v0.112.0 h1:ym+QxemlbWwfMSUto1hRTfcZeYbj2q8FpMzjk8O+X60= -go.opentelemetry.io/collector/consumer/consumerprofiles v0.112.0/go.mod h1:4PjDUpURFh85R6NLEHrEf/uZjpk4LAYmmOrqu+iZsyE= -go.opentelemetry.io/collector/consumer/consumertest v0.112.0 h1:pGvNH+H4rMygUOql6ynVQim6UFdimTiJ0HRfQL6v0GE= -go.opentelemetry.io/collector/consumer/consumertest v0.112.0/go.mod h1:rfVo0tYt/BaLWw3IaQKVQafjUlMsA5qTkvsSOfFrr9c= +go.opentelemetry.io/collector/consumer/consumererror v0.113.0 h1:Hd2N7n9RKbnKRaVrdw6fPBoQko5zZIgCxwVxkL6SAIE= +go.opentelemetry.io/collector/consumer/consumererror v0.113.0/go.mod h1:o0MAGFdzcr7LFTUQ6iivPPhbVmn2ZVIYm3FPXk2+JUo= +go.opentelemetry.io/collector/consumer/consumerprofiles v0.113.0 h1:RftAcQUY5UOfbEK4s16jnORqTx16y9+PxA1lQwt98cQ= +go.opentelemetry.io/collector/consumer/consumerprofiles v0.113.0/go.mod h1:ZuHrQ4pWguh6dw0DgTfcUtdY/T+cnOJJNP6LMbm5Y5A= +go.opentelemetry.io/collector/consumer/consumertest v0.113.0 h1:ua2AjNx3DUA8qElXNkggB4w3VDL/rBKBvryOQkhumH8= +go.opentelemetry.io/collector/consumer/consumertest v0.113.0/go.mod h1:vK8o4ZTZSiG3rVyqxZcCNmT/cvEfx34ig7V65L9+6Rg= go.opentelemetry.io/collector/extension v0.113.0 h1:Vp/YSL8ZCkJQrP1lf2Bm5yaTvcp6ROO3AnfuSL3GEXM= go.opentelemetry.io/collector/extension v0.113.0/go.mod h1:Pwp0TNqdHeER4V1I6H6oCvrto/riiOAqs3737BWCnjw= go.opentelemetry.io/collector/extension/auth v0.113.0 h1:4ggRy1vepOabUiCWfU+6M9P/ftXojMUNAvBpeLihYj8= go.opentelemetry.io/collector/extension/auth v0.113.0/go.mod h1:VbvAm2YZAqePkWgwn0m0vBaq3aC49CxPVwHmrJ24aeQ= +go.opentelemetry.io/collector/extension/experimental/storage v0.113.0 h1:Qq4IaB6bMUrf/bWoPZ5ESWywCt+vDi8I/ChYejIEPcc= +go.opentelemetry.io/collector/extension/experimental/storage v0.113.0/go.mod h1:BRmo+A7f06u/rhyLauU/Vogk+QRN0y1j2VVVgMGWrfQ= +go.opentelemetry.io/collector/featuregate v1.19.0 h1:ASea2sU+tdpKI3RxIJC/pufDAfwAmrvcQ4EmTHVu0B0= +go.opentelemetry.io/collector/featuregate v1.19.0/go.mod h1:47xrISO71vJ83LSMm8+yIDsUbKktUp48Ovt7RR6VbRs= go.opentelemetry.io/collector/pdata v1.19.0 h1:jmnU5R8TOCbwRr4B8sjdRxM7L5WnEKlQWX1dtLYxIbE= go.opentelemetry.io/collector/pdata v1.19.0/go.mod h1:Ox1YVLe87cZDB/TL30i4SUz1cA5s6AM6SpFMfY61ICs= -go.opentelemetry.io/collector/pdata/pprofile v0.112.0 h1:t+LYorcMqZ3sDz5/jp3xU2l5lIhIXuIOOGO4Ef9CG2c= -go.opentelemetry.io/collector/pdata/pprofile v0.112.0/go.mod h1:F2aTCoDzIaxEUK1g92LZvMwradySFMo3ZsAnBIpOdUg= -go.opentelemetry.io/collector/pdata/testdata v0.112.0 h1:7jJzNvRE+CpYrwHbAYwPiN9a/hqmVRlRADJNeDJTvYI= -go.opentelemetry.io/collector/pdata/testdata v0.112.0/go.mod h1:9kO148Qp12B93SSUE52s0QGGV8Nf9RFN2G/PnZx3l+w= -go.opentelemetry.io/collector/pipeline v0.112.0 h1:jqKDdb8k53OLPibvxzX6fmMec0ZHAtqe4p2+cuHclEI= -go.opentelemetry.io/collector/pipeline v0.112.0/go.mod h1:4vOvjVsoYTHVGTbfFwqfnQOSV2K3RKUHofh3jNRc2Mg= -go.opentelemetry.io/collector/receiver v0.112.0 h1:gdTBDOPGKMZlZghtN5A7ZLNlNwCHWYcoJQeIiXvyGEQ= -go.opentelemetry.io/collector/receiver v0.112.0/go.mod h1:3QmfSUiyFzRTnHUqF8fyEvQpU5q/xuwS43jGt8JXEEA= -go.opentelemetry.io/collector/receiver/receiverprofiles v0.112.0 h1:SShkZsWRsFss3iWZa9JwMC7h4gD5RbWDhUcz1/9dXSs= -go.opentelemetry.io/collector/receiver/receiverprofiles v0.112.0/go.mod h1:615smszDXiz4YWwXslxlAjX7FzOVDU7Bk6xARFk+zpk= +go.opentelemetry.io/collector/pdata/pprofile v0.113.0 h1:VRf4p0VhfuaR+Epy/nMIlu/9t39WU9CUgHVUvpuGxfU= +go.opentelemetry.io/collector/pdata/pprofile v0.113.0/go.mod h1:5aDejksdXh5PdJN/OhpzATGT3kbNL0RMmw2Q0Q6E/o0= +go.opentelemetry.io/collector/pdata/testdata v0.113.0 h1:vRfn85jicO2F4eOTgsWtzmU/K3E/uZUtM1HEefvvJD8= +go.opentelemetry.io/collector/pdata/testdata v0.113.0/go.mod h1:sR+6eR+YEJhYZu9StbqzeWcCmHpfBAgX/qjP82HY9Gw= +go.opentelemetry.io/collector/pipeline v0.113.0 h1:vSRzRe3717jV0btCNPhVkhg2lu0uFxcm2VO+vhad/eE= +go.opentelemetry.io/collector/pipeline v0.113.0/go.mod h1:4vOvjVsoYTHVGTbfFwqfnQOSV2K3RKUHofh3jNRc2Mg= +go.opentelemetry.io/collector/receiver v0.113.0 h1:vraAbkPy8Pz9x5X39gV+j9t6x23PNsY2aJ6gQMugRbQ= +go.opentelemetry.io/collector/receiver v0.113.0/go.mod h1:IUa8/lNw8Qh4L5Q3jOeRWKW0ebQPoNcfhytxN5Puq2A= +go.opentelemetry.io/collector/receiver/receiverprofiles v0.113.0 h1:uVxuzjGe2t1sbwahSBowVHYnGzpzn8brmfn8z1UHvQg= +go.opentelemetry.io/collector/receiver/receiverprofiles v0.113.0/go.mod h1:khKDkzYJR2x2OPUqGSmoSncdINT9lUE5IThiHPDbqZk= +go.opentelemetry.io/collector/receiver/receivertest v0.113.0 h1:0vOvz3S4Q/KwcNCS9C7zPo0uxD6RSWktG88yGdxfV6g= +go.opentelemetry.io/collector/receiver/receivertest v0.113.0/go.mod h1:sRq5ctm5UE/0Ar562wnCVQ1zbAie/D127D1WbtbEuEc= +go.opentelemetry.io/collector/semconv v0.113.0 h1:twenSI7M7MJMJKW8D6a/GXxPZTPbama/weywBtV2iFw= +go.opentelemetry.io/collector/semconv v0.113.0/go.mod h1:zCJ5njhWpejR+A40kiEoeFm1xq1uzyZwMnRNX6/D82A= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.56.0 h1:UP6IpuHFkUgOQL9FFQFrZ+5LiwhhYRbi7VZSIx6Nj5s= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.56.0/go.mod h1:qxuZLtbq5QDtdeSHsS7bcf6EH6uO6jUAgk764zd3rhM= go.opentelemetry.io/otel v1.31.0 h1:NsJcKPIW0D0H3NgzPDHmo0WW6SptzPdqg/L1zsIm2hY= @@ -111,6 +164,8 @@ go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842 h1:vr/HnozRka3pE4EsMEg1lgkXJkTFJCVUX+S/ZT6wYzM= +golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842/go.mod h1:XtvwrStGgqGPLc4cjQfWqZHG1YFdYs6swckp8vpsjnc= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= @@ -139,6 +194,8 @@ golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8T golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +gonum.org/v1/gonum v0.15.1 h1:FNy7N6OUZVUaWG9pTiD+jlhdQ3lMP+/LcTpJ6+a8sQ0= +gonum.org/v1/gonum v0.15.1/go.mod h1:eZTZuRFrzu5pcyjN5wJhcIhnUdNijYxX1T2IcrOGY0o= google.golang.org/genproto/googleapis/rpc v0.0.0-20240822170219-fc7c04adadcd h1:6TEm2ZxXoQmFWFlt1vNxvVOa1Q0dXFQD1m/rYjXmS0E= google.golang.org/genproto/googleapis/rpc v0.0.0-20240822170219-fc7c04adadcd/go.mod h1:UqMtugtsSgubUsoxbuAoiCXvqvErP7Gf0so0mK9tHxU= google.golang.org/grpc v1.67.1 h1:zWnc1Vrcno+lHZCOofnIMvycFcc0QRGIzm9dhnDX68E= @@ -148,5 +205,7 @@ google.golang.org/protobuf v1.35.1/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojt gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/receiver/splunksearchapireceiver/receiver.go b/receiver/splunksearchapireceiver/receiver.go index ae92ce5bf..f95b788fb 100644 --- a/receiver/splunksearchapireceiver/receiver.go +++ b/receiver/splunksearchapireceiver/receiver.go @@ -18,7 +18,6 @@ import ( "context" "encoding/json" "fmt" - "net/http" "time" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/adapter" @@ -114,7 +113,7 @@ func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { for _, search := range ssapir.config.Searches { // set current search query ssapir.checkpointRecord.Search = search.Query - + // set default event batch size (matches Splunk API default) if search.EventBatchSize == 0 { search.EventBatchSize = 100 @@ -190,14 +189,9 @@ func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { // pass logs, wait for exporter to confirm successful export to GCP err = ssapir.logsConsumer.ConsumeLogs(ctx, logs) if err != nil { -<<<<<<< HEAD - // Error from down the pipeline, freak out - ssapir.logger.Error("error consuming logs", zap.Error(err)) -======= // error from down the pipeline, freak out return fmt.Errorf("error consuming logs: %w", err) ->>>>>>> 2153d9e0 (return error on export fail) } // last batch of logs has been successfully exported exportedEvents += logs.ResourceLogs().Len() @@ -272,9 +266,9 @@ func (ssapir *splunksearchapireceiver) isSearchCompleted(resp SearchJobStatusRes } func (ssapir *splunksearchapireceiver) getSplunkSearchResults(sid string, offset int, batchSize int) (SearchResults, error) { - resp, err := ssapir.getSearchResults(sid, offset, batchSize) + resp, err := ssapir.client.GetSearchResults(sid, offset, batchSize) if err != nil { - return SearchResultsResponse{}, err + return SearchResults{}, err } return resp, nil } diff --git a/receiver/splunksearchapireceiver/receiver_test.go b/receiver/splunksearchapireceiver/receiver_test.go index 84d4e7886..5e4d52066 100644 --- a/receiver/splunksearchapireceiver/receiver_test.go +++ b/receiver/splunksearchapireceiver/receiver_test.go @@ -190,9 +190,9 @@ func (m *mockLogsClient) CreateSearchJob(searchQuery string) (CreateJobResponse, return args.Get(0).(CreateJobResponse), args.Error(1) } -func (m *mockLogsClient) GetSearchResults(searchID string, offset int, batchSize int) (SearchResultsResponse, error) { +func (m *mockLogsClient) GetSearchResults(searchID string, offset int, batchSize int) (SearchResults, error) { args := m.Called(searchID, offset, batchSize) - return args.Get(0).(SearchResultsResponse), args.Error(1) + return args.Get(0).(SearchResults), args.Error(1) } type mockStorage struct { From dad623cd0e42f9e0a50c268724085d2b8a15192d Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Tue, 10 Dec 2024 11:19:05 -0500 Subject: [PATCH 54/55] tidy --- go.mod | 1 + 1 file changed, 1 insertion(+) diff --git a/go.mod b/go.mod index 82d9470eb..a5837ce17 100644 --- a/go.mod +++ b/go.mod @@ -198,6 +198,7 @@ require ( ) require ( + github.com/observiq/bindplane-agent/receiver/splunksearchapireceiver v0.0.0-00010101000000-000000000000 github.com/open-telemetry/opentelemetry-collector-contrib/confmap/provider/aesprovider v0.114.0 github.com/open-telemetry/opentelemetry-collector-contrib/processor/intervalprocessor v0.114.0 go.opentelemetry.io/collector/extension/extensiontest v0.114.0 From 7dea9f40c5e02d968758eabf4779957b04568353 Mon Sep 17 00:00:00 2001 From: Caleb Hurshman Date: Wed, 11 Dec 2024 15:14:31 -0500 Subject: [PATCH 55/55] initial feedback --- .gitignore | 1 - exporter/chronicleexporter/marshal_test.go | 1 - go.mod | 2 +- receiver/splunksearchapireceiver/client.go | 83 +++++------- .../splunksearchapireceiver/client_test.go | 18 +-- receiver/splunksearchapireceiver/config.go | 10 +- .../splunksearchapireceiver/config_test.go | 4 +- receiver/splunksearchapireceiver/go.mod | 59 ++++---- receiver/splunksearchapireceiver/go.sum | 126 +++++++++--------- .../integration_test.go | 10 +- receiver/splunksearchapireceiver/receiver.go | 42 +++--- 11 files changed, 164 insertions(+), 192 deletions(-) diff --git a/.gitignore b/.gitignore index 174981ba4..5221bfd9c 100644 --- a/.gitignore +++ b/.gitignore @@ -21,7 +21,6 @@ opentelemetry-java-contrib-jmx-metrics.jar VERSION.txt release_deps /tmp -/local # OpAmp Files collector*.yaml diff --git a/exporter/chronicleexporter/marshal_test.go b/exporter/chronicleexporter/marshal_test.go index ec68d91da..fb5694239 100644 --- a/exporter/chronicleexporter/marshal_test.go +++ b/exporter/chronicleexporter/marshal_test.go @@ -462,7 +462,6 @@ func TestProtoMarshaler_MarshalRawLogsForHTTP(t *testing.T) { cfg: Config{ CustomerID: uuid.New().String(), LogType: "WINEVTLOG", - IngestionLabels: map[string]string{`chronicle_ingestion_label["key1"]`: "value1", `chronicle_ingestion_label["key2"]`: "value2"}, RawLogField: "attributes", OverrideLogType: false, }, diff --git a/go.mod b/go.mod index a5837ce17..784101875 100644 --- a/go.mod +++ b/go.mod @@ -198,7 +198,7 @@ require ( ) require ( - github.com/observiq/bindplane-agent/receiver/splunksearchapireceiver v0.0.0-00010101000000-000000000000 + github.com/observiq/bindplane-agent/receiver/splunksearchapireceiver v1.67.0 github.com/open-telemetry/opentelemetry-collector-contrib/confmap/provider/aesprovider v0.114.0 github.com/open-telemetry/opentelemetry-collector-contrib/processor/intervalprocessor v0.114.0 go.opentelemetry.io/collector/extension/extensiontest v0.114.0 diff --git a/receiver/splunksearchapireceiver/client.go b/receiver/splunksearchapireceiver/client.go index 7b612e4c9..3560d1b63 100644 --- a/receiver/splunksearchapireceiver/client.go +++ b/receiver/splunksearchapireceiver/client.go @@ -46,7 +46,7 @@ type defaultSplunkSearchAPIClient struct { tokenType string } -func newSplunkSearchAPIClient(ctx context.Context, settings component.TelemetrySettings, conf Config, host component.Host) (*defaultSplunkSearchAPIClient, error) { +func newDefaultSplunkSearchAPIClient(ctx context.Context, settings component.TelemetrySettings, conf Config, host component.Host) (*defaultSplunkSearchAPIClient, error) { client, err := conf.ClientConfig.ToClient(ctx, host, settings) if err != nil { return nil, err @@ -63,7 +63,7 @@ func newSplunkSearchAPIClient(ctx context.Context, settings component.TelemetryS }, nil } -func (c defaultSplunkSearchAPIClient) CreateSearchJob(search string) (CreateJobResponse, error) { +func (c *defaultSplunkSearchAPIClient) CreateSearchJob(search string) (CreateJobResponse, error) { endpoint := fmt.Sprintf("%s/services/search/jobs", c.endpoint) if !strings.Contains(search, strings.ToLower("starttime=")) || !strings.Contains(search, strings.ToLower("endtime=")) || !strings.Contains(search, strings.ToLower("timeformat=")) { @@ -71,119 +71,102 @@ func (c defaultSplunkSearchAPIClient) CreateSearchJob(search string) (CreateJobR } reqBody := fmt.Sprintf(`search=%s`, url.QueryEscape(search)) - req, err := http.NewRequest("POST", endpoint, bytes.NewBuffer([]byte(reqBody))) - if err != nil { - return CreateJobResponse{}, err - } - - err = c.SetSplunkRequestAuth(req) - if err != nil { - return CreateJobResponse{}, err - } - - resp, err := c.client.Do(req) + resp, err := c.doSplunkRequest("POST", endpoint, bytes.NewBuffer([]byte(reqBody))) if err != nil { return CreateJobResponse{}, err } defer resp.Body.Close() if resp.StatusCode != http.StatusCreated { - return CreateJobResponse{}, fmt.Errorf("failed to create search job: %d", resp.StatusCode) + return CreateJobResponse{}, fmt.Errorf("create search job: %d", resp.StatusCode) } var jobResponse CreateJobResponse body, err := io.ReadAll(resp.Body) if err != nil { - return CreateJobResponse{}, fmt.Errorf("failed to read search job create response: %v", err) + return CreateJobResponse{}, fmt.Errorf("read search job create response: %w", err) } err = xml.Unmarshal(body, &jobResponse) if err != nil { - return CreateJobResponse{}, fmt.Errorf("failed to unmarshal search job create response: %v", err) + return CreateJobResponse{}, fmt.Errorf("unmarshal search job create response: %w", err) } return jobResponse, nil } -func (c defaultSplunkSearchAPIClient) GetJobStatus(sid string) (SearchJobStatusResponse, error) { +func (c *defaultSplunkSearchAPIClient) GetJobStatus(sid string) (SearchJobStatusResponse, error) { endpoint := fmt.Sprintf("%s/services/search/v2/jobs/%s", c.endpoint, sid) - req, err := http.NewRequest("GET", endpoint, nil) - if err != nil { - return SearchJobStatusResponse{}, err - } - - err = c.SetSplunkRequestAuth(req) - if err != nil { - return SearchJobStatusResponse{}, err - } - - resp, err := c.client.Do(req) + resp, err := c.doSplunkRequest("GET", endpoint, nil) if err != nil { return SearchJobStatusResponse{}, err } defer resp.Body.Close() if resp.StatusCode != http.StatusOK { - return SearchJobStatusResponse{}, fmt.Errorf("failed to get search job status: %d", resp.StatusCode) + return SearchJobStatusResponse{}, fmt.Errorf("get search job status: %d", resp.StatusCode) } body, err := io.ReadAll(resp.Body) if err != nil { - return SearchJobStatusResponse{}, fmt.Errorf("failed to read search job status response: %v", err) + return SearchJobStatusResponse{}, fmt.Errorf("read search job status response: %w", err) } var jobStatusResponse SearchJobStatusResponse err = xml.Unmarshal(body, &jobStatusResponse) if err != nil { - return SearchJobStatusResponse{}, fmt.Errorf("failed to unmarshal search job status response: %v", err) + return SearchJobStatusResponse{}, fmt.Errorf("unmarshal search job status response: %w", err) } return jobStatusResponse, nil } -func (c defaultSplunkSearchAPIClient) GetSearchResults(sid string, offset int, batchSize int) (SearchResults, error) { +func (c *defaultSplunkSearchAPIClient) GetSearchResults(sid string, offset int, batchSize int) (SearchResults, error) { endpoint := fmt.Sprintf("%s/services/search/v2/jobs/%s/results?output_mode=json&offset=%d&count=%d", c.endpoint, sid, offset, batchSize) - req, err := http.NewRequest("GET", endpoint, nil) - if err != nil { - return SearchResults{}, err - } - - err = c.SetSplunkRequestAuth(req) - if err != nil { - return SearchResults{}, err - } - - resp, err := c.client.Do(req) + resp, err := c.doSplunkRequest("GET", endpoint, nil) if err != nil { return SearchResults{}, err } defer resp.Body.Close() if resp.StatusCode != http.StatusOK { - return SearchResults{}, fmt.Errorf("failed to get search job results: %d", resp.StatusCode) + return SearchResults{}, fmt.Errorf("get search job results: %d", resp.StatusCode) } var searchResults SearchResults body, err := io.ReadAll(resp.Body) if err != nil { - return SearchResults{}, fmt.Errorf("failed to read search job results response: %v", err) + return SearchResults{}, fmt.Errorf("read search job results response: %w", err) } - // fmt.Println("Body: ", string(body)) err = json.Unmarshal(body, &searchResults) if err != nil { - return SearchResults{}, fmt.Errorf("failed to unmarshal search job results response: %v", err) + return SearchResults{}, fmt.Errorf("unmarshal search job results response: %w", err) } return searchResults, nil } -func (c defaultSplunkSearchAPIClient) SetSplunkRequestAuth(req *http.Request) error { +func (c *defaultSplunkSearchAPIClient) doSplunkRequest(method, endpoint string, body io.Reader) (*http.Response, error) { + req, err := http.NewRequest(method, endpoint, body) + if err != nil { + return nil, fmt.Errorf("new http request: %w", err) + } + err = c.setSplunkRequestAuth(req) + if err != nil { + return nil, fmt.Errorf("set splunk request auth: %w", err) + } + resp, err := c.client.Do(req) + if err != nil { + return nil, fmt.Errorf("client do request: %w", err) + } + return resp, nil +} + +func (c *defaultSplunkSearchAPIClient) setSplunkRequestAuth(req *http.Request) error { if c.authToken != "" { if strings.EqualFold(c.tokenType, TokenTypeBearer) { req.Header.Set("Authorization", "Bearer "+string(c.authToken)) } else if strings.EqualFold(c.tokenType, TokenTypeSplunk) { req.Header.Set("Authorization", "Splunk "+string(c.authToken)) - } else { - return fmt.Errorf("auth_token provided without a correct token type, valid token types are %v", []string{TokenTypeBearer, TokenTypeSplunk}) } } else { req.SetBasicAuth(c.username, c.password) diff --git a/receiver/splunksearchapireceiver/client_test.go b/receiver/splunksearchapireceiver/client_test.go index 0dfe2d8ed..c7266f951 100644 --- a/receiver/splunksearchapireceiver/client_test.go +++ b/receiver/splunksearchapireceiver/client_test.go @@ -42,12 +42,12 @@ func TestCreateSearchJob(t *testing.T) { // returns an error if the response status isn't 201 resp, err = testClient.CreateSearchJob("index=fail_to_create_job starttime=\"\" endtime=\"\" timeformat=\"\"") - require.ErrorContains(t, err, "failed to create search job") + require.ErrorContains(t, err, "create search job") require.Empty(t, resp) // returns an error if the response body can't be unmarshalled resp, err = testClient.CreateSearchJob("index=fail_to_unmarshal starttime=\"\" endtime=\"\" timeformat=\"\"") - require.ErrorContains(t, err, "failed to unmarshal search job create response") + require.ErrorContains(t, err, "unmarshal search job create response") require.Empty(t, resp) } @@ -66,12 +66,12 @@ func TestGetJobStatus(t *testing.T) { // returns an error if the response status isn't 200 resp, err = testClient.GetJobStatus("654321") - require.ErrorContains(t, err, "failed to get search job status") + require.ErrorContains(t, err, "get search job status") require.Empty(t, resp) // returns an error if the response body can't be unmarshalled resp, err = testClient.GetJobStatus("098765") - require.ErrorContains(t, err, "failed to unmarshal search job status response") + require.ErrorContains(t, err, "unmarshal search job status response") require.Empty(t, resp) } @@ -89,12 +89,12 @@ func TestGetSearchResults(t *testing.T) { // returns an error if the response status isn't 200 resp, err = testClient.GetSearchResults("654321", 0, 5) - require.ErrorContains(t, err, "failed to get search job results") + require.ErrorContains(t, err, "get search job results") require.Empty(t, resp) // returns an error if the response body can't be unmarshalled resp, err = testClient.GetSearchResults("098765", 0, 5) - require.ErrorContains(t, err, "failed to unmarshal search job results response") + require.ErrorContains(t, err, "unmarshal search job results response") require.Empty(t, resp) } @@ -104,21 +104,21 @@ func TestSetSplunkRequestAuth(t *testing.T) { password: "password", } req := httptest.NewRequest("GET", "http://localhost:8089", nil) - client.SetSplunkRequestAuth(req) + client.setSplunkRequestAuth(req) require.Equal(t, req.Header.Get("Authorization"), "Basic dXNlcjpwYXNzd29yZA==") client = defaultSplunkSearchAPIClient{ authToken: "token", tokenType: TokenTypeBearer, } - client.SetSplunkRequestAuth(req) + client.setSplunkRequestAuth(req) require.Equal(t, req.Header.Get("Authorization"), "Bearer token") client = defaultSplunkSearchAPIClient{ authToken: "token", tokenType: TokenTypeSplunk, } - client.SetSplunkRequestAuth(req) + client.setSplunkRequestAuth(req) require.Equal(t, req.Header.Get("Authorization"), "Splunk token") } diff --git a/receiver/splunksearchapireceiver/config.go b/receiver/splunksearchapireceiver/config.go index 9c15c31ca..13a8b15be 100644 --- a/receiver/splunksearchapireceiver/config.go +++ b/receiver/splunksearchapireceiver/config.go @@ -59,14 +59,6 @@ func (cfg *Config) Validate() error { return errors.New("missing Splunk server endpoint") } - if cfg.Username == "" && cfg.AuthToken == "" { - return errors.New("missing Splunk username or auth token") - } - - if cfg.Password == "" && cfg.AuthToken == "" { - return errors.New("missing Splunk password or auth token") - } - if cfg.AuthToken != "" { if cfg.TokenType == "" { return errors.New("auth_token provided without a token type") @@ -77,6 +69,8 @@ func (cfg *Config) Validate() error { if cfg.Username != "" || cfg.Password != "" { return errors.New("auth_token and username/password were both provided, only one can be provided to authenticate with Splunk") } + } else if cfg.Username == "" || cfg.Password == "" { + return errors.New("missing Splunk basic auth credentials, need username and password") } if len(cfg.Searches) == 0 { diff --git a/receiver/splunksearchapireceiver/config_test.go b/receiver/splunksearchapireceiver/config_test.go index 789825451..e228b77bf 100644 --- a/receiver/splunksearchapireceiver/config_test.go +++ b/receiver/splunksearchapireceiver/config_test.go @@ -62,7 +62,7 @@ func TestValidate(t *testing.T) { }, }, errExpected: true, - errText: "missing Splunk username or auth token", + errText: "missing Splunk basic auth credentials, need username and password", }, { desc: "Missing password, no auth token", @@ -77,7 +77,7 @@ func TestValidate(t *testing.T) { }, }, errExpected: true, - errText: "missing Splunk password or auth token", + errText: "missing Splunk basic auth credentials, need username and password", }, { desc: "Auth token without token type", diff --git a/receiver/splunksearchapireceiver/go.mod b/receiver/splunksearchapireceiver/go.mod index 181b5657d..b03ace39d 100644 --- a/receiver/splunksearchapireceiver/go.mod +++ b/receiver/splunksearchapireceiver/go.mod @@ -1,17 +1,18 @@ module github.com/open-telemetry/opentelemtry-collector-contrib/receiver/splunksearchapireceiver -go 1.22.5 +go 1.22.7 require ( - github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza v0.113.0 + github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza v0.114.0 github.com/stretchr/testify v1.9.0 - go.opentelemetry.io/collector/component v0.113.0 - go.opentelemetry.io/collector/consumer v0.113.0 - go.opentelemetry.io/collector/consumer/consumertest v0.113.0 - go.opentelemetry.io/collector/extension/experimental/storage v0.113.0 - go.opentelemetry.io/collector/pdata v1.19.0 - go.opentelemetry.io/collector/receiver v0.113.0 - go.opentelemetry.io/collector/receiver/receivertest v0.113.0 + go.opentelemetry.io/collector/component v0.114.0 + go.opentelemetry.io/collector/component/componenttest v0.114.0 + go.opentelemetry.io/collector/consumer v0.114.0 + go.opentelemetry.io/collector/consumer/consumertest v0.114.0 + go.opentelemetry.io/collector/extension/experimental/storage v0.114.0 + go.opentelemetry.io/collector/pdata v1.20.0 + go.opentelemetry.io/collector/receiver v0.114.0 + go.opentelemetry.io/collector/receiver/receivertest v0.114.0 go.uber.org/zap v1.27.0 ) @@ -39,30 +40,30 @@ require ( github.com/magefile/mage v1.15.0 // indirect github.com/mitchellh/copystructure v1.2.0 // indirect github.com/mitchellh/reflectwalk v1.0.2 // indirect - github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.113.0 // indirect + github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.114.0 // indirect github.com/pierrec/lz4/v4 v4.1.21 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/rs/cors v1.11.1 // indirect github.com/stretchr/objx v0.5.2 // indirect github.com/valyala/fastjson v1.6.4 // indirect - go.opentelemetry.io/collector/client v1.19.0 // indirect + go.opentelemetry.io/collector/client v1.20.0 // indirect go.opentelemetry.io/collector/config/configauth v0.113.0 // indirect go.opentelemetry.io/collector/config/configcompression v1.19.0 // indirect - go.opentelemetry.io/collector/config/configopaque v1.19.0 // indirect - go.opentelemetry.io/collector/config/configtls v1.19.0 // indirect + go.opentelemetry.io/collector/config/configopaque v1.20.0 // indirect + go.opentelemetry.io/collector/config/configtls v1.20.0 // indirect go.opentelemetry.io/collector/config/internal v0.113.0 // indirect - go.opentelemetry.io/collector/confmap v1.19.0 // indirect - go.opentelemetry.io/collector/consumer/consumererror v0.113.0 // indirect - go.opentelemetry.io/collector/consumer/consumerprofiles v0.113.0 // indirect - go.opentelemetry.io/collector/extension v0.113.0 // indirect + go.opentelemetry.io/collector/confmap v1.20.0 // indirect + go.opentelemetry.io/collector/consumer/consumererror v0.114.0 // indirect + go.opentelemetry.io/collector/consumer/consumerprofiles v0.114.0 // indirect + go.opentelemetry.io/collector/extension v0.114.0 // indirect go.opentelemetry.io/collector/extension/auth v0.113.0 // indirect - go.opentelemetry.io/collector/featuregate v1.19.0 // indirect - go.opentelemetry.io/collector/pdata/pprofile v0.113.0 // indirect - go.opentelemetry.io/collector/receiver/receiverprofiles v0.113.0 // indirect - go.opentelemetry.io/collector/semconv v0.113.0 // indirect + go.opentelemetry.io/collector/featuregate v1.20.0 // indirect + go.opentelemetry.io/collector/pdata/pprofile v0.114.0 // indirect + go.opentelemetry.io/collector/receiver/receiverprofiles v0.114.0 // indirect + go.opentelemetry.io/collector/semconv v0.114.0 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.56.0 // indirect - go.opentelemetry.io/otel/sdk v1.31.0 // indirect - go.opentelemetry.io/otel/sdk/metric v1.31.0 // indirect + go.opentelemetry.io/otel/sdk v1.32.0 // indirect + go.opentelemetry.io/otel/sdk/metric v1.32.0 // indirect gonum.org/v1/gonum v0.15.1 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) @@ -73,14 +74,14 @@ require ( github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect go.opentelemetry.io/collector/config/confighttp v0.113.0 - go.opentelemetry.io/collector/config/configtelemetry v0.113.0 // indirect - go.opentelemetry.io/collector/pipeline v0.113.0 // indirect - go.opentelemetry.io/otel v1.31.0 // indirect - go.opentelemetry.io/otel/metric v1.31.0 // indirect - go.opentelemetry.io/otel/trace v1.31.0 // indirect + go.opentelemetry.io/collector/config/configtelemetry v0.114.0 // indirect + go.opentelemetry.io/collector/pipeline v0.114.0 // indirect + go.opentelemetry.io/otel v1.32.0 // indirect + go.opentelemetry.io/otel/metric v1.32.0 // indirect + go.opentelemetry.io/otel/trace v1.32.0 // indirect go.uber.org/multierr v1.11.0 // indirect golang.org/x/net v0.30.0 // indirect - golang.org/x/sys v0.26.0 // indirect + golang.org/x/sys v0.27.0 // indirect golang.org/x/text v0.19.0 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20240822170219-fc7c04adadcd // indirect google.golang.org/grpc v1.67.1 // indirect diff --git a/receiver/splunksearchapireceiver/go.sum b/receiver/splunksearchapireceiver/go.sum index 36d4d0b22..3446cd783 100644 --- a/receiver/splunksearchapireceiver/go.sum +++ b/receiver/splunksearchapireceiver/go.sum @@ -65,14 +65,14 @@ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= -github.com/open-telemetry/opentelemetry-collector-contrib/extension/storage v0.113.0 h1:ERdOiTmsDruI/s5oEgN45NsZW2roWXmO0u2aceR4GuM= -github.com/open-telemetry/opentelemetry-collector-contrib/extension/storage v0.113.0/go.mod h1:RkClsQhl8hdAg874Ot4kaG92s+6dW0Dvlt5HRxhsavc= -github.com/open-telemetry/opentelemetry-collector-contrib/internal/common v0.113.0 h1:7A8MgFPYRQWq1RkFBktq01CW+eTYhiGML0IxQNv2uaM= -github.com/open-telemetry/opentelemetry-collector-contrib/internal/common v0.113.0/go.mod h1:E1pc7mDXH+5s7RyXw291h8lz2dhzPzaDrAHqP1Lawvw= -github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.113.0 h1:EZ/ZNsovNcQq+wwAbTAWNY+6BHnv24NxvVoC6eYmtg8= -github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.113.0/go.mod h1:u21dEQ9yQ0JyLMSrKLWWzHG/lHSlteNfa/EQ7Vqcle4= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza v0.113.0 h1:G8w+wg4nnqBqe297fBWnjJ5Tg2OYDVEMsdWA9/3ozxQ= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza v0.113.0/go.mod h1:m3hDVsXPQzQfeji3+hn7NYJPHDRlHhQRNd5T7N5wZqc= +github.com/open-telemetry/opentelemetry-collector-contrib/extension/storage v0.114.0 h1:mchuc816TxLpmsGvFbtGA3KBVx91vAXi7vJnlvsQdiU= +github.com/open-telemetry/opentelemetry-collector-contrib/extension/storage v0.114.0/go.mod h1:vgCMUWPVrfjNux9P9G053fRqGFF6BS3xtxNFZZdFTCM= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/common v0.114.0 h1:0LbaoE7Aof8J4CVQ5kYv1QbuL3usTxLRSMFisDNBX9U= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/common v0.114.0/go.mod h1:ByoGXMLeHE/k5ELO3EITitVmvq3bh4Z/GVwWZZxrQ5s= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.114.0 h1:d2wCLlENxH4I2axQWaogivx/5ZIjDYgn9MIf6sFxlJ4= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.114.0/go.mod h1:Psyligv8GKL9WI3TraW3BLwkOX4TRxaaa1BBQQyICzA= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza v0.114.0 h1:Xr3Hvm9cxOSQX94tLX1yX63uvuvtglJICrOz9YcxiuI= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza v0.114.0/go.mod h1:cgIgmEg66RhVtAv4JkIhHdy70kn2EtVhrH8CtyvhfuI= github.com/pierrec/lz4/v4 v4.1.21 h1:yOVMLb6qSIDP67pl/5F7RepeKYu/VmTyEXvuMI5d9mQ= github.com/pierrec/lz4/v4 v4.1.21/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= @@ -91,70 +91,72 @@ github.com/valyala/fastjson v1.6.4 h1:uAUNq9Z6ymTgGhcm0UynUAB6tlbakBrz6CQFax3BXV github.com/valyala/fastjson v1.6.4/go.mod h1:CLCAqky6SMuOcxStkYQvblddUtoRxhYMGLrsQns1aXY= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -go.opentelemetry.io/collector/client v1.19.0 h1:TUal8WV1agTrZStgE7BJ8ZC0IHLGtrfgO9ogU9t1mv8= -go.opentelemetry.io/collector/client v1.19.0/go.mod h1:jgiXMEM6l8L2QEyf2I/M47Zd8+G7e4z+6H8q5SkHOlQ= -go.opentelemetry.io/collector/component v0.113.0 h1:/nx+RvZgxUEXP+YcTj69rEtuSEGkfaCyp/ad5zQGLjU= -go.opentelemetry.io/collector/component v0.113.0/go.mod h1:2T779hIGHU9i7xbXbV3q1/JnRw2FyzUYXW2vq47A6EU= +go.opentelemetry.io/collector/client v1.20.0 h1:o60wPcj5nLtaRenF+1E5p4QXFS3TDL6vHlw+GOon3rg= +go.opentelemetry.io/collector/client v1.20.0/go.mod h1:6aqkszco9FaLWCxyJEVam6PP7cUa8mPRIXeS5eZGj0U= +go.opentelemetry.io/collector/component v0.114.0 h1:SVGbm5LvHGSTEDv7p92oPuBgK5tuiWR82I9+LL4TtBE= +go.opentelemetry.io/collector/component v0.114.0/go.mod h1:MLxtjZ6UVHjDxSdhGLuJfHBHvfl1iT/Y7IaQPD24Eww= +go.opentelemetry.io/collector/component/componenttest v0.114.0 h1:GM4FTTlfeXoVm6sZYBHImwlRN8ayh2oAfUhvaFj7Zo8= +go.opentelemetry.io/collector/component/componenttest v0.114.0/go.mod h1:ZZEJMtbJtoVC/3/9R1HzERq+cYQRxuMFQrPCpfZ4Xos= go.opentelemetry.io/collector/config/configauth v0.113.0 h1:CBz43fGpN41MwLdwe3mw/XVSIDvGRMT8aaaPuqKukTU= go.opentelemetry.io/collector/config/configauth v0.113.0/go.mod h1:Q8SlxrIvL3FJO51hXa4n9ARvox04lK8mmpjf4b3UNAU= go.opentelemetry.io/collector/config/configcompression v1.19.0 h1:bTSjTLhnPXX1NSFM6GzguEM/NBe8QUPsXHc9kMOAJzE= go.opentelemetry.io/collector/config/configcompression v1.19.0/go.mod h1:pnxkFCLUZLKWzYJvfSwZnPrnm0twX14CYj2ADth5xiU= go.opentelemetry.io/collector/config/confighttp v0.113.0 h1:a6iO0y1ZM5CPDvwbryzU+GpqAtAQ3eSfNseoAUogw7c= go.opentelemetry.io/collector/config/confighttp v0.113.0/go.mod h1:JZ9EwoiWMIrXt5v+d/q54TeUhPdAoLDimSEqTtddW6E= -go.opentelemetry.io/collector/config/configopaque v1.19.0 h1:7uvntQeAAtqCaeiS2dDGrT1wLPhWvDlEsD3SliA/koQ= -go.opentelemetry.io/collector/config/configopaque v1.19.0/go.mod h1:6zlLIyOoRpJJ+0bEKrlZOZon3rOp5Jrz9fMdR4twOS4= -go.opentelemetry.io/collector/config/configtelemetry v0.113.0 h1:hweTRrVddnUeA3k7HzRY4oUR9lRdMa7of3mHNUS5YyA= -go.opentelemetry.io/collector/config/configtelemetry v0.113.0/go.mod h1:R0MBUxjSMVMIhljuDHWIygzzJWQyZHXXWIgQNxcFwhc= -go.opentelemetry.io/collector/config/configtls v1.19.0 h1:GQ/cF1hgNqHVBq2oSSrOFXxVCyMDyd5kq4R/RMEbL98= -go.opentelemetry.io/collector/config/configtls v1.19.0/go.mod h1:1hyqnYB3JqEUlk1ME/s9HYz4oCRcxQCRxsJitFFT/cA= +go.opentelemetry.io/collector/config/configopaque v1.20.0 h1:2I48zKiyyyYqjm7y0B9OLp24ku2ZSX3nCHG0r5FdWOQ= +go.opentelemetry.io/collector/config/configopaque v1.20.0/go.mod h1:6zlLIyOoRpJJ+0bEKrlZOZon3rOp5Jrz9fMdR4twOS4= +go.opentelemetry.io/collector/config/configtelemetry v0.114.0 h1:kjLeyrumge6wsX6ZIkicdNOlBXaEyW2PI2ZdVXz/rzY= +go.opentelemetry.io/collector/config/configtelemetry v0.114.0/go.mod h1:R0MBUxjSMVMIhljuDHWIygzzJWQyZHXXWIgQNxcFwhc= +go.opentelemetry.io/collector/config/configtls v1.20.0 h1:hNlJdwfyY5Qe54RLJ41lfLqKTn9ypkR7sk7JNCcSe2U= +go.opentelemetry.io/collector/config/configtls v1.20.0/go.mod h1:sav/txSHguadTYlSSK+BJO2ljJeYEtRoBahgzWAguYg= go.opentelemetry.io/collector/config/internal v0.113.0 h1:9RAzH8v7ItFT1npHpvP0SvUzBHcZDliCGRo9Spp6v7c= go.opentelemetry.io/collector/config/internal v0.113.0/go.mod h1:yC7E4h1Uj0SubxcFImh6OvBHFTjMh99+A5PuyIgDWqc= -go.opentelemetry.io/collector/confmap v1.19.0 h1:TQ0lZpAKqgsE0EKk+u4JA+uBbPYeFRmWP3GH43w40CY= -go.opentelemetry.io/collector/confmap v1.19.0/go.mod h1:GgNu1ElPGmLn9govqIfjaopvdspw4PJ9KeDtWC4E2Q4= -go.opentelemetry.io/collector/consumer v0.113.0 h1:KJSiK5vSIY9dgPxwKfQ3gOgKtQsqc+7IB7mGhUAL5c8= -go.opentelemetry.io/collector/consumer v0.113.0/go.mod h1:zHMlXYFaJlZoLCBR6UwWoyXZ/adcO1u2ydqUal3VmYU= -go.opentelemetry.io/collector/consumer/consumererror v0.113.0 h1:Hd2N7n9RKbnKRaVrdw6fPBoQko5zZIgCxwVxkL6SAIE= -go.opentelemetry.io/collector/consumer/consumererror v0.113.0/go.mod h1:o0MAGFdzcr7LFTUQ6iivPPhbVmn2ZVIYm3FPXk2+JUo= -go.opentelemetry.io/collector/consumer/consumerprofiles v0.113.0 h1:RftAcQUY5UOfbEK4s16jnORqTx16y9+PxA1lQwt98cQ= -go.opentelemetry.io/collector/consumer/consumerprofiles v0.113.0/go.mod h1:ZuHrQ4pWguh6dw0DgTfcUtdY/T+cnOJJNP6LMbm5Y5A= -go.opentelemetry.io/collector/consumer/consumertest v0.113.0 h1:ua2AjNx3DUA8qElXNkggB4w3VDL/rBKBvryOQkhumH8= -go.opentelemetry.io/collector/consumer/consumertest v0.113.0/go.mod h1:vK8o4ZTZSiG3rVyqxZcCNmT/cvEfx34ig7V65L9+6Rg= -go.opentelemetry.io/collector/extension v0.113.0 h1:Vp/YSL8ZCkJQrP1lf2Bm5yaTvcp6ROO3AnfuSL3GEXM= -go.opentelemetry.io/collector/extension v0.113.0/go.mod h1:Pwp0TNqdHeER4V1I6H6oCvrto/riiOAqs3737BWCnjw= +go.opentelemetry.io/collector/confmap v1.20.0 h1:ARfOwmkKxFOud1njl03yAHQ30+uenlzqCO6LBYamDTE= +go.opentelemetry.io/collector/confmap v1.20.0/go.mod h1:DMpd9Ay/ffls3JoQBQ73vWeRsz1rNuLbwjo6WtjSQus= +go.opentelemetry.io/collector/consumer v0.114.0 h1:1zVaHvfIZowGwZRitRBRo3i+RP2StlU+GClYiofSw0Q= +go.opentelemetry.io/collector/consumer v0.114.0/go.mod h1:d+Mrzt9hsH1ub3zmwSlnQVPLeTYir4Mgo7CrWfnncN4= +go.opentelemetry.io/collector/consumer/consumererror v0.114.0 h1:r2YiELfWerb40FHD23V04gNjIkLUcjEKGxI4Vtm2iO4= +go.opentelemetry.io/collector/consumer/consumererror v0.114.0/go.mod h1:MzIrLQ5jptO2egypolhlAbZsWZr29WC4FhSxQjnxcvg= +go.opentelemetry.io/collector/consumer/consumerprofiles v0.114.0 h1:5pXYy3E6UK5Huu3aQbsYL8B6E6MyWx4fvXXDn+oXZaA= +go.opentelemetry.io/collector/consumer/consumerprofiles v0.114.0/go.mod h1:PMq3f54KcJQO4v1tue0QxQScu7REFVADlXxXSAYMiN0= +go.opentelemetry.io/collector/consumer/consumertest v0.114.0 h1:isaTwJK5DOy8Bs7GuLq23ejfgj8gLIo5dOUvkRnLF4g= +go.opentelemetry.io/collector/consumer/consumertest v0.114.0/go.mod h1:GNeLPkfRPdh06n/Rv1UKa/cAtCKjN0a7ADyHjIj4HFE= +go.opentelemetry.io/collector/extension v0.114.0 h1:9Qb92y8hD2WDC5aMDoj4JNQN+/5BQYJWPUPzLXX+iGw= +go.opentelemetry.io/collector/extension v0.114.0/go.mod h1:Yk2/1ptVgfTr12t+22v93nYJpioP14pURv2YercSzU0= go.opentelemetry.io/collector/extension/auth v0.113.0 h1:4ggRy1vepOabUiCWfU+6M9P/ftXojMUNAvBpeLihYj8= go.opentelemetry.io/collector/extension/auth v0.113.0/go.mod h1:VbvAm2YZAqePkWgwn0m0vBaq3aC49CxPVwHmrJ24aeQ= -go.opentelemetry.io/collector/extension/experimental/storage v0.113.0 h1:Qq4IaB6bMUrf/bWoPZ5ESWywCt+vDi8I/ChYejIEPcc= -go.opentelemetry.io/collector/extension/experimental/storage v0.113.0/go.mod h1:BRmo+A7f06u/rhyLauU/Vogk+QRN0y1j2VVVgMGWrfQ= -go.opentelemetry.io/collector/featuregate v1.19.0 h1:ASea2sU+tdpKI3RxIJC/pufDAfwAmrvcQ4EmTHVu0B0= -go.opentelemetry.io/collector/featuregate v1.19.0/go.mod h1:47xrISO71vJ83LSMm8+yIDsUbKktUp48Ovt7RR6VbRs= -go.opentelemetry.io/collector/pdata v1.19.0 h1:jmnU5R8TOCbwRr4B8sjdRxM7L5WnEKlQWX1dtLYxIbE= -go.opentelemetry.io/collector/pdata v1.19.0/go.mod h1:Ox1YVLe87cZDB/TL30i4SUz1cA5s6AM6SpFMfY61ICs= -go.opentelemetry.io/collector/pdata/pprofile v0.113.0 h1:VRf4p0VhfuaR+Epy/nMIlu/9t39WU9CUgHVUvpuGxfU= -go.opentelemetry.io/collector/pdata/pprofile v0.113.0/go.mod h1:5aDejksdXh5PdJN/OhpzATGT3kbNL0RMmw2Q0Q6E/o0= -go.opentelemetry.io/collector/pdata/testdata v0.113.0 h1:vRfn85jicO2F4eOTgsWtzmU/K3E/uZUtM1HEefvvJD8= -go.opentelemetry.io/collector/pdata/testdata v0.113.0/go.mod h1:sR+6eR+YEJhYZu9StbqzeWcCmHpfBAgX/qjP82HY9Gw= -go.opentelemetry.io/collector/pipeline v0.113.0 h1:vSRzRe3717jV0btCNPhVkhg2lu0uFxcm2VO+vhad/eE= -go.opentelemetry.io/collector/pipeline v0.113.0/go.mod h1:4vOvjVsoYTHVGTbfFwqfnQOSV2K3RKUHofh3jNRc2Mg= -go.opentelemetry.io/collector/receiver v0.113.0 h1:vraAbkPy8Pz9x5X39gV+j9t6x23PNsY2aJ6gQMugRbQ= -go.opentelemetry.io/collector/receiver v0.113.0/go.mod h1:IUa8/lNw8Qh4L5Q3jOeRWKW0ebQPoNcfhytxN5Puq2A= -go.opentelemetry.io/collector/receiver/receiverprofiles v0.113.0 h1:uVxuzjGe2t1sbwahSBowVHYnGzpzn8brmfn8z1UHvQg= -go.opentelemetry.io/collector/receiver/receiverprofiles v0.113.0/go.mod h1:khKDkzYJR2x2OPUqGSmoSncdINT9lUE5IThiHPDbqZk= -go.opentelemetry.io/collector/receiver/receivertest v0.113.0 h1:0vOvz3S4Q/KwcNCS9C7zPo0uxD6RSWktG88yGdxfV6g= -go.opentelemetry.io/collector/receiver/receivertest v0.113.0/go.mod h1:sRq5ctm5UE/0Ar562wnCVQ1zbAie/D127D1WbtbEuEc= -go.opentelemetry.io/collector/semconv v0.113.0 h1:twenSI7M7MJMJKW8D6a/GXxPZTPbama/weywBtV2iFw= -go.opentelemetry.io/collector/semconv v0.113.0/go.mod h1:zCJ5njhWpejR+A40kiEoeFm1xq1uzyZwMnRNX6/D82A= +go.opentelemetry.io/collector/extension/experimental/storage v0.114.0 h1:hLyX9UvmY0t6iBnk3CqvyNck2U0QjPACekj7pDRx2hA= +go.opentelemetry.io/collector/extension/experimental/storage v0.114.0/go.mod h1:WqYRQVJjJLE1rm+y/ks1wPdPRGWePEvE1VO07xm2J2k= +go.opentelemetry.io/collector/featuregate v1.20.0 h1:Mi7nMy/q52eruI+6jWnMKUOeM55XvwoPnGcdB1++O8c= +go.opentelemetry.io/collector/featuregate v1.20.0/go.mod h1:47xrISO71vJ83LSMm8+yIDsUbKktUp48Ovt7RR6VbRs= +go.opentelemetry.io/collector/pdata v1.20.0 h1:ePcwt4bdtISP0loHaE+C9xYoU2ZkIvWv89Fob16o9SM= +go.opentelemetry.io/collector/pdata v1.20.0/go.mod h1:Ox1YVLe87cZDB/TL30i4SUz1cA5s6AM6SpFMfY61ICs= +go.opentelemetry.io/collector/pdata/pprofile v0.114.0 h1:pUNfTzsI/JUTiE+DScDM4lsrPoxnVNLI2fbTxR/oapo= +go.opentelemetry.io/collector/pdata/pprofile v0.114.0/go.mod h1:4aNcj6WM1n1uXyFSXlhVs4ibrERgNYsTbzcYI2zGhxA= +go.opentelemetry.io/collector/pdata/testdata v0.114.0 h1:+AzszWSL1i4K6meQ8rU0JDDW55SYCXa6FVqfDixhhTo= +go.opentelemetry.io/collector/pdata/testdata v0.114.0/go.mod h1:bv8XFdCTZxG2MQB5l9dKxSxf5zBrcodwO6JOy1+AxXM= +go.opentelemetry.io/collector/pipeline v0.114.0 h1:v3YOhc5z0tD6QbO5n/pnftpIeroihM2ks9Z2yKPCcwY= +go.opentelemetry.io/collector/pipeline v0.114.0/go.mod h1:4vOvjVsoYTHVGTbfFwqfnQOSV2K3RKUHofh3jNRc2Mg= +go.opentelemetry.io/collector/receiver v0.114.0 h1:90SAnXAjNq7/k52/pFmmb06Cf1YauoPYtbio4aOXafY= +go.opentelemetry.io/collector/receiver v0.114.0/go.mod h1:KUGT0/D953LXbGH/D3lLPU8yrU3HfWnUqpt4W4hSOnE= +go.opentelemetry.io/collector/receiver/receiverprofiles v0.114.0 h1:ibhEfGpvNB3yrtpl2jYFabrunMk1hurxvMYpM0b1Ck4= +go.opentelemetry.io/collector/receiver/receiverprofiles v0.114.0/go.mod h1:UZyRfaasw+NLvN10AN8IQnmj5tQ3BOUH1uP2ctpO9f0= +go.opentelemetry.io/collector/receiver/receivertest v0.114.0 h1:D+Kh9t2n4asTnM+TiSxbrKlUemLZandWntj17BJWWb0= +go.opentelemetry.io/collector/receiver/receivertest v0.114.0/go.mod h1:mNSHQ13vFmqD+VAcRzLjStFBejbcWUn2Mp0pAd7Op+U= +go.opentelemetry.io/collector/semconv v0.114.0 h1:/eKcCJwZepQUtEuFuxa0thx2XIOvhFpaf214ZG1a11k= +go.opentelemetry.io/collector/semconv v0.114.0/go.mod h1:zCJ5njhWpejR+A40kiEoeFm1xq1uzyZwMnRNX6/D82A= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.56.0 h1:UP6IpuHFkUgOQL9FFQFrZ+5LiwhhYRbi7VZSIx6Nj5s= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.56.0/go.mod h1:qxuZLtbq5QDtdeSHsS7bcf6EH6uO6jUAgk764zd3rhM= -go.opentelemetry.io/otel v1.31.0 h1:NsJcKPIW0D0H3NgzPDHmo0WW6SptzPdqg/L1zsIm2hY= -go.opentelemetry.io/otel v1.31.0/go.mod h1:O0C14Yl9FgkjqcCZAsE053C13OaddMYr/hz6clDkEJE= -go.opentelemetry.io/otel/metric v1.31.0 h1:FSErL0ATQAmYHUIzSezZibnyVlft1ybhy4ozRPcF2fE= -go.opentelemetry.io/otel/metric v1.31.0/go.mod h1:C3dEloVbLuYoX41KpmAhOqNriGbA+qqH6PQ5E5mUfnY= -go.opentelemetry.io/otel/sdk v1.31.0 h1:xLY3abVHYZ5HSfOg3l2E5LUj2Cwva5Y7yGxnSW9H5Gk= -go.opentelemetry.io/otel/sdk v1.31.0/go.mod h1:TfRbMdhvxIIr/B2N2LQW2S5v9m3gOQ/08KsbbO5BPT0= -go.opentelemetry.io/otel/sdk/metric v1.31.0 h1:i9hxxLJF/9kkvfHppyLL55aW7iIJz4JjxTeYusH7zMc= -go.opentelemetry.io/otel/sdk/metric v1.31.0/go.mod h1:CRInTMVvNhUKgSAMbKyTMxqOBC0zgyxzW55lZzX43Y8= -go.opentelemetry.io/otel/trace v1.31.0 h1:ffjsj1aRouKewfr85U2aGagJ46+MvodynlQ1HYdmJys= -go.opentelemetry.io/otel/trace v1.31.0/go.mod h1:TXZkRk7SM2ZQLtR6eoAWQFIHPvzQ06FJAsO1tJg480A= +go.opentelemetry.io/otel v1.32.0 h1:WnBN+Xjcteh0zdk01SVqV55d/m62NJLJdIyb4y/WO5U= +go.opentelemetry.io/otel v1.32.0/go.mod h1:00DCVSB0RQcnzlwyTfqtxSm+DRr9hpYrHjNGiBHVQIg= +go.opentelemetry.io/otel/metric v1.32.0 h1:xV2umtmNcThh2/a/aCP+h64Xx5wsj8qqnkYZktzNa0M= +go.opentelemetry.io/otel/metric v1.32.0/go.mod h1:jH7CIbbK6SH2V2wE16W05BHCtIDzauciCRLoc/SyMv8= +go.opentelemetry.io/otel/sdk v1.32.0 h1:RNxepc9vK59A8XsgZQouW8ue8Gkb4jpWtJm9ge5lEG4= +go.opentelemetry.io/otel/sdk v1.32.0/go.mod h1:LqgegDBjKMmb2GC6/PrTnteJG39I8/vJCAP9LlJXEjU= +go.opentelemetry.io/otel/sdk/metric v1.32.0 h1:rZvFnvmvawYb0alrYkjraqJq0Z4ZUJAiyYCU9snn1CU= +go.opentelemetry.io/otel/sdk/metric v1.32.0/go.mod h1:PWeZlq0zt9YkYAp3gjKZ0eicRYvOh1Gd+X99x6GHpCQ= +go.opentelemetry.io/otel/trace v1.32.0 h1:WIC9mYrXf8TmY/EXuULKc8hR17vE+Hjv2cssQDe03fM= +go.opentelemetry.io/otel/trace v1.32.0/go.mod h1:+i4rkvCraA+tG6AzwloGaCtkx53Fa+L+V8e9a7YvhT8= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= @@ -180,8 +182,8 @@ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.19.0 h1:kTxAhCbGbxhK0IwgSKiMO5awPoDQ0RpfiVYBfK860YM= diff --git a/receiver/splunksearchapireceiver/integration_test.go b/receiver/splunksearchapireceiver/integration_test.go index 868c60b75..624190e03 100644 --- a/receiver/splunksearchapireceiver/integration_test.go +++ b/receiver/splunksearchapireceiver/integration_test.go @@ -49,7 +49,7 @@ func TestSplunkResultsPaginationFailure(t *testing.T) { defer server.Close() settings := componenttest.NewNopTelemetrySettings() ssapir := newSSAPIReceiver(zap.NewNop(), cfg, settings, component.NewID(typeStr)) - ssapir.client, _ = newSplunkSearchAPIClient(context.Background(), settings, *cfg, componenttest.NewNopHost()) + ssapir.client, _ = newDefaultSplunkSearchAPIClient(context.Background(), settings, *cfg, componenttest.NewNopHost()) ssapir.client.(*defaultSplunkSearchAPIClient).client = server.Client() ssapir.client.(*defaultSplunkSearchAPIClient).endpoint = server.URL ssapir.logsConsumer = &consumertest.LogsSink{} @@ -117,13 +117,12 @@ func TestExporterFailure(t *testing.T) { logsConsumer.On("ConsumeLogs", mock.Anything, mock.Anything).Return(nil) ssapir.logsConsumer = logsConsumer - ssapir.client, _ = newSplunkSearchAPIClient(context.Background(), settings, *cfg, componenttest.NewNopHost()) + ssapir.client, _ = newDefaultSplunkSearchAPIClient(context.Background(), settings, *cfg, componenttest.NewNopHost()) ssapir.client.(*defaultSplunkSearchAPIClient).client = server.Client() ssapir.client.(*defaultSplunkSearchAPIClient).endpoint = server.URL ssapir.initCheckpoint(context.Background()) - err := ssapir.runQueries(context.Background()) - require.NoError(t, err) + ssapir.runQueries(context.Background()) require.Equal(t, 5, ssapir.checkpointRecord.Offset) require.Equal(t, "search index=otel", ssapir.checkpointRecord.Search) @@ -136,8 +135,7 @@ func TestExporterFailure(t *testing.T) { ssapir.logsConsumer = logsConsumerErr ssapir.initCheckpoint(context.Background()) - err = ssapir.runQueries(context.Background()) - require.EqualError(t, err, "error consuming logs: error exporting logs") + ssapir.runQueries(context.Background()) require.Equal(t, 0, ssapir.checkpointRecord.Offset) require.Equal(t, "search index=otel", ssapir.checkpointRecord.Search) } diff --git a/receiver/splunksearchapireceiver/receiver.go b/receiver/splunksearchapireceiver/receiver.go index f95b788fb..5e756ea9e 100644 --- a/receiver/splunksearchapireceiver/receiver.go +++ b/receiver/splunksearchapireceiver/receiver.go @@ -30,7 +30,8 @@ import ( ) const ( - eventStorageKey = "last_event_offset" + eventStorageKey = "last_event_offset" + splunkDefaultEventBatchSize = 100 ) var ( @@ -40,7 +41,6 @@ var ( ) type splunksearchapireceiver struct { - host component.Host logger *zap.Logger logsConsumer consumer.Logs config *Config @@ -69,9 +69,8 @@ func newSSAPIReceiver( } func (ssapir *splunksearchapireceiver) Start(ctx context.Context, host component.Host) error { - ssapir.host = host var err error - ssapir.client, err = newSplunkSearchAPIClient(ctx, ssapir.settings, *ssapir.config, ssapir.host) + ssapir.client, err = newDefaultSplunkSearchAPIClient(ctx, ssapir.settings, *ssapir.config, host) if err != nil { return err } @@ -101,35 +100,34 @@ func (ssapir *splunksearchapireceiver) Shutdown(ctx context.Context) error { ssapir.cancel() } - err := ssapir.checkpoint(ctx) - if err != nil { + if err := ssapir.checkpoint(ctx); err != nil { ssapir.logger.Error("failed checkpoint", zap.Error(err)) } return ssapir.storageClient.Close(ctx) } -func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { +func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) { for _, search := range ssapir.config.Searches { // set current search query ssapir.checkpointRecord.Search = search.Query // set default event batch size (matches Splunk API default) if search.EventBatchSize == 0 { - search.EventBatchSize = 100 + search.EventBatchSize = splunkDefaultEventBatchSize } // create search in Splunk searchID, err := ssapir.createSplunkSearch(search) if err != nil { ssapir.logger.Error("error creating search", zap.Error(err)) - return err + return } // wait for search to complete if err = ssapir.pollSearchCompletion(ctx, searchID); err != nil { ssapir.logger.Error("error polling for search completion", zap.Error(err)) - return err + return } for { @@ -137,7 +135,6 @@ func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { results, err := ssapir.getSplunkSearchResults(searchID, offset, search.EventBatchSize) if err != nil { ssapir.logger.Error("error fetching search results", zap.Error(err)) - return err } ssapir.logger.Info("search results fetched", zap.Int("num_results", len(results.Results))) @@ -158,39 +155,39 @@ func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { limitReached = true break } - // convert log timestamp to ISO8601 (UTC() makes RFC3339 into ISO8601) + // convert log timestamp to ISO 8601 (UTC() makes RFC 3339 into ISO 8601) logTimestamp, err := time.Parse(time.RFC3339, splunkLog.Time) if err != nil { ssapir.logger.Error("error parsing log timestamp", zap.Error(err)) break } + if logTimestamp.UTC().Before(earliestTime) { + ssapir.logger.Info("skipping log entry - timestamp before earliestTime", zap.Time("time", logTimestamp.UTC()), zap.Time("earliestTime", earliestTime.UTC())) + break + } if logTimestamp.UTC().After(latestTime.UTC()) { ssapir.logger.Info("skipping log entry - timestamp after latestTime", zap.Time("time", logTimestamp.UTC()), zap.Time("latestTime", latestTime.UTC())) // logger will only log up to 10 times for a given code block, known weird behavior continue } - if logTimestamp.UTC().Before(earliestTime) { - ssapir.logger.Info("skipping log entry - timestamp before earliestTime", zap.Time("time", logTimestamp.UTC()), zap.Time("earliestTime", earliestTime.UTC())) - break - } log := logs.ResourceLogs().AppendEmpty().ScopeLogs().AppendEmpty().LogRecords().AppendEmpty() - // convert time to timestamp timestamp := pcommon.NewTimestampFromTime(logTimestamp.UTC()) log.SetTimestamp(timestamp) log.Body().SetStr(splunkLog.Raw) + } - if logs.ResourceLogs().Len() == 0 { - ssapir.logger.Info("search returned no logs within the given time range") - return nil - } + if logs.ResourceLogs().Len() == 0 { + ssapir.logger.Info("search returned no logs within the given time range") + break } // pass logs, wait for exporter to confirm successful export to GCP err = ssapir.logsConsumer.ConsumeLogs(ctx, logs) if err != nil { // error from down the pipeline, freak out - return fmt.Errorf("error consuming logs: %w", err) + ssapir.logger.Error("error exporting logs", zap.Error(err)) + return } // last batch of logs has been successfully exported @@ -216,7 +213,6 @@ func (ssapir *splunksearchapireceiver) runQueries(ctx context.Context) error { ssapir.logger.Info("search results exported", zap.String("query", search.Query), zap.Int("total results", exportedEvents)) } ssapir.logger.Info("all search results exported") - return nil } func (ssapir *splunksearchapireceiver) pollSearchCompletion(ctx context.Context, searchID string) error {