diff --git a/agent/agent.go b/agent/agent.go index ff038ff13..0482b2244 100644 --- a/agent/agent.go +++ b/agent/agent.go @@ -61,7 +61,10 @@ func (r *Agent) Validate(rcp recipe.Recipe) (errs []error) { if ext, err := r.extractorFactory.Get(rcp.Source.Name); err != nil { errs = append(errs, err) } else { - if err = ext.Validate(rcp.Source.Config); err != nil { + if err = ext.Validate(plugins.Config{ + URNScope: rcp.Source.Scope, + RawConfig: rcp.Source.Config, + }); err != nil { errs = append(errs, r.enrichInvalidConfigError(err, rcp.Source.Name, plugins.PluginTypeExtractor)) } } @@ -72,7 +75,7 @@ func (r *Agent) Validate(rcp recipe.Recipe) (errs []error) { errs = append(errs, err) continue } - if err = sink.Validate(s.Config); err != nil { + if err = sink.Validate(plugins.Config{RawConfig: s.Config}); err != nil { errs = append(errs, r.enrichInvalidConfigError(err, s.Name, plugins.PluginTypeSink)) } } @@ -83,7 +86,7 @@ func (r *Agent) Validate(rcp recipe.Recipe) (errs []error) { errs = append(errs, err) continue } - if err = procc.Validate(p.Config); err != nil { + if err = procc.Validate(plugins.Config{RawConfig: p.Config}); err != nil { errs = append(errs, r.enrichInvalidConfigError(err, p.Name, plugins.PluginTypeProcessor)) } } @@ -197,7 +200,7 @@ func (r *Agent) setupExtractor(ctx context.Context, sr recipe.PluginRecipe, str err = errors.Wrapf(err, "could not find extractor \"%s\"", sr.Name) return } - if err = extractor.Init(ctx, sr.Config); err != nil { + if err = extractor.Init(ctx, recipeToPluginConfig(sr)); err != nil { err = errors.Wrapf(err, "could not initiate extractor \"%s\"", sr.Name) return } @@ -217,7 +220,7 @@ func (r *Agent) setupProcessor(ctx context.Context, pr recipe.PluginRecipe, str if proc, err = r.processorFactory.Get(pr.Name); err != nil { return errors.Wrapf(err, "could not find processor \"%s\"", pr.Name) } - if err = proc.Init(ctx, pr.Config); err != nil { + if err = proc.Init(ctx, recipeToPluginConfig(pr)); err != nil { return errors.Wrapf(err, "could not initiate processor \"%s\"", pr.Name) } @@ -240,7 +243,7 @@ func (r *Agent) setupSink(ctx context.Context, sr recipe.PluginRecipe, stream *s if sink, err = r.sinkFactory.Get(sr.Name); err != nil { return errors.Wrapf(err, "could not find sink \"%s\"", sr.Name) } - if err = sink.Init(ctx, sr.Config); err != nil { + if err = sink.Init(ctx, recipeToPluginConfig(sr)); err != nil { return errors.Wrapf(err, "could not initiate sink \"%s\"", sr.Name) } retryNotification := func(e error, d time.Duration) { diff --git a/agent/agent_test.go b/agent/agent_test.go index fd27c4f8c..c3335886c 100644 --- a/agent/agent_test.go +++ b/agent/agent_test.go @@ -86,7 +86,7 @@ func TestAgentRun(t *testing.T) { t.Run("should return error if processor could not be found", func(t *testing.T) { extr := mocks.NewExtractor() - extr.On("Init", mockCtx, validRecipe.Source.Config).Return(nil).Once() + extr.On("Init", mockCtx, buildPluginConfig(validRecipe.Source)).Return(nil).Once() defer extr.AssertExpectations(t) ef := registry.NewExtractorFactory() if err := ef.Register("test-extractor", newExtractor(extr)); err != nil { @@ -117,7 +117,7 @@ func TestAgentRun(t *testing.T) { t.Run("should return error if sink could not be found", func(t *testing.T) { extr := mocks.NewExtractor() - extr.On("Init", mockCtx, validRecipe.Source.Config).Return(nil).Once() + extr.On("Init", mockCtx, buildPluginConfig(validRecipe.Source)).Return(nil).Once() defer extr.AssertExpectations(t) ef := registry.NewExtractorFactory() if err := ef.Register("test-extractor", newExtractor(extr)); err != nil { @@ -125,7 +125,7 @@ func TestAgentRun(t *testing.T) { } proc := mocks.NewProcessor() - proc.On("Init", mockCtx, validRecipe.Processors[0].Config).Return(nil).Once() + proc.On("Init", mockCtx, buildPluginConfig(validRecipe.Processors[0])).Return(nil).Once() defer proc.AssertExpectations(t) pf := registry.NewProcessorFactory() if err := pf.Register("test-processor", newProcessor(proc)); err != nil { @@ -150,7 +150,7 @@ func TestAgentRun(t *testing.T) { t.Run("should return error when initiating extractor fails", func(t *testing.T) { extr := mocks.NewExtractor() - extr.On("Init", mockCtx, validRecipe.Source.Config).Return(errors.New("some error")).Once() + extr.On("Init", mockCtx, buildPluginConfig(validRecipe.Source)).Return(errors.New("some error")).Once() defer extr.AssertExpectations(t) ef := registry.NewExtractorFactory() if err := ef.Register("test-extractor", newExtractor(extr)); err != nil { @@ -187,7 +187,7 @@ func TestAgentRun(t *testing.T) { t.Run("should return error when initiating processor fails", func(t *testing.T) { extr := mocks.NewExtractor() - extr.On("Init", mockCtx, validRecipe.Source.Config).Return(nil).Once() + extr.On("Init", mockCtx, buildPluginConfig(validRecipe.Source)).Return(nil).Once() defer extr.AssertExpectations(t) ef := registry.NewExtractorFactory() if err := ef.Register("test-extractor", newExtractor(extr)); err != nil { @@ -195,7 +195,7 @@ func TestAgentRun(t *testing.T) { } proc := mocks.NewProcessor() - proc.On("Init", mockCtx, validRecipe.Processors[0].Config).Return(errors.New("some error")).Once() + proc.On("Init", mockCtx, buildPluginConfig(validRecipe.Processors[0])).Return(errors.New("some error")).Once() defer proc.AssertExpectations(t) pf := registry.NewProcessorFactory() if err := pf.Register("test-processor", newProcessor(proc)); err != nil { @@ -226,7 +226,7 @@ func TestAgentRun(t *testing.T) { t.Run("should return error when initiating sink fails", func(t *testing.T) { extr := mocks.NewExtractor() - extr.On("Init", mockCtx, validRecipe.Source.Config).Return(nil).Once() + extr.On("Init", mockCtx, buildPluginConfig(validRecipe.Source)).Return(nil).Once() defer extr.AssertExpectations(t) ef := registry.NewExtractorFactory() if err := ef.Register("test-extractor", newExtractor(extr)); err != nil { @@ -234,7 +234,7 @@ func TestAgentRun(t *testing.T) { } proc := mocks.NewProcessor() - proc.On("Init", mockCtx, validRecipe.Processors[0].Config).Return(nil).Once() + proc.On("Init", mockCtx, buildPluginConfig(validRecipe.Processors[0])).Return(nil).Once() defer proc.AssertExpectations(t) pf := registry.NewProcessorFactory() if err := pf.Register("test-processor", newProcessor(proc)); err != nil { @@ -242,7 +242,7 @@ func TestAgentRun(t *testing.T) { } sink := mocks.NewSink() - sink.On("Init", mockCtx, validRecipe.Sinks[0].Config).Return(errors.New("some error")).Once() + sink.On("Init", mockCtx, buildPluginConfig(validRecipe.Sinks[0])).Return(errors.New("some error")).Once() defer sink.AssertExpectations(t) sf := registry.NewSinkFactory() if err := sf.Register("test-sink", newSink(sink)); err != nil { @@ -267,7 +267,7 @@ func TestAgentRun(t *testing.T) { t.Run("should return error when extracting fails", func(t *testing.T) { extr := mocks.NewExtractor() - extr.On("Init", mockCtx, validRecipe.Source.Config).Return(nil).Once() + extr.On("Init", mockCtx, buildPluginConfig(validRecipe.Source)).Return(nil).Once() extr.On("Extract", mockCtx, mock.AnythingOfType("plugins.Emit")).Return(errors.New("some error")).Once() ef := registry.NewExtractorFactory() if err := ef.Register("test-extractor", newExtractor(extr)); err != nil { @@ -275,7 +275,7 @@ func TestAgentRun(t *testing.T) { } proc := mocks.NewProcessor() - proc.On("Init", mockCtx, validRecipe.Processors[0].Config).Return(nil).Once() + proc.On("Init", mockCtx, buildPluginConfig(validRecipe.Processors[0])).Return(nil).Once() defer proc.AssertExpectations(t) pf := registry.NewProcessorFactory() if err := pf.Register("test-processor", newProcessor(proc)); err != nil { @@ -283,7 +283,7 @@ func TestAgentRun(t *testing.T) { } sink := mocks.NewSink() - sink.On("Init", mockCtx, validRecipe.Sinks[0].Config).Return(nil).Once() + sink.On("Init", mockCtx, buildPluginConfig(validRecipe.Sinks[0])).Return(nil).Once() sink.On("Close").Return(nil) defer sink.AssertExpectations(t) sf := registry.NewSinkFactory() @@ -309,14 +309,14 @@ func TestAgentRun(t *testing.T) { t.Run("should return error when extractor panicing", func(t *testing.T) { extr := new(panicExtractor) - extr.On("Init", mockCtx, validRecipe.Source.Config).Return(nil).Once() + extr.On("Init", mockCtx, buildPluginConfig(validRecipe.Source)).Return(nil).Once() ef := registry.NewExtractorFactory() if err := ef.Register("test-extractor", newExtractor(extr)); err != nil { t.Fatal(err) } proc := mocks.NewProcessor() - proc.On("Init", mockCtx, validRecipe.Processors[0].Config).Return(nil).Once() + proc.On("Init", mockCtx, buildPluginConfig(validRecipe.Processors[0])).Return(nil).Once() defer proc.AssertExpectations(t) pf := registry.NewProcessorFactory() if err := pf.Register("test-processor", newProcessor(proc)); err != nil { @@ -324,7 +324,7 @@ func TestAgentRun(t *testing.T) { } sink := mocks.NewSink() - sink.On("Init", mockCtx, validRecipe.Sinks[0].Config).Return(nil).Once() + sink.On("Init", mockCtx, buildPluginConfig(validRecipe.Sinks[0])).Return(nil).Once() sink.On("Close").Return(nil) defer sink.AssertExpectations(t) sf := registry.NewSinkFactory() @@ -357,7 +357,7 @@ func TestAgentRun(t *testing.T) { extr := mocks.NewExtractor() extr.SetEmit(data) - extr.On("Init", mockCtx, validRecipe.Source.Config).Return(nil).Once() + extr.On("Init", mockCtx, buildPluginConfig(validRecipe.Source)).Return(nil).Once() extr.On("Extract", mockCtx, mock.AnythingOfType("plugins.Emit")).Return(nil).Once() ef := registry.NewExtractorFactory() if err := ef.Register("test-extractor", newExtractor(extr)); err != nil { @@ -365,7 +365,7 @@ func TestAgentRun(t *testing.T) { } proc := mocks.NewProcessor() - proc.On("Init", mockCtx, validRecipe.Processors[0].Config).Return(nil).Once() + proc.On("Init", mockCtx, buildPluginConfig(validRecipe.Processors[0])).Return(nil).Once() proc.On("Process", mockCtx, data[0]).Return(data[0], errors.New("some error")).Once() defer proc.AssertExpectations(t) pf := registry.NewProcessorFactory() @@ -374,7 +374,7 @@ func TestAgentRun(t *testing.T) { } sink := mocks.NewSink() - sink.On("Init", mockCtx, validRecipe.Sinks[0].Config).Return(nil).Once() + sink.On("Init", mockCtx, buildPluginConfig(validRecipe.Sinks[0])).Return(nil).Once() sink.On("Close").Return(nil) defer sink.AssertExpectations(t) sf := registry.NewSinkFactory() @@ -407,7 +407,7 @@ func TestAgentRun(t *testing.T) { extr := mocks.NewExtractor() extr.SetEmit(data) - extr.On("Init", mockCtx, validRecipe.Source.Config).Return(nil).Once() + extr.On("Init", mockCtx, buildPluginConfig(validRecipe.Source)).Return(nil).Once() extr.On("Extract", mockCtx, mock.AnythingOfType("plugins.Emit")).Return(nil).Once() ef := registry.NewExtractorFactory() if err := ef.Register("test-extractor", newExtractor(extr)); err != nil { @@ -415,7 +415,7 @@ func TestAgentRun(t *testing.T) { } proc := new(panicProcessor) - proc.On("Init", mockCtx, validRecipe.Processors[0].Config).Return(nil).Once() + proc.On("Init", mockCtx, buildPluginConfig(validRecipe.Processors[0])).Return(nil).Once() defer proc.AssertExpectations(t) pf := registry.NewProcessorFactory() if err := pf.Register("test-processor", newProcessor(proc)); err != nil { @@ -423,7 +423,7 @@ func TestAgentRun(t *testing.T) { } sink := mocks.NewSink() - sink.On("Init", mockCtx, validRecipe.Sinks[0].Config).Return(nil).Once() + sink.On("Init", mockCtx, buildPluginConfig(validRecipe.Sinks[0])).Return(nil).Once() sink.On("Close").Return(nil) defer sink.AssertExpectations(t) sf := registry.NewSinkFactory() @@ -456,7 +456,7 @@ func TestAgentRun(t *testing.T) { extr := mocks.NewExtractor() extr.SetEmit(data) - extr.On("Init", mockCtx, validRecipe.Source.Config).Return(nil).Once() + extr.On("Init", mockCtx, buildPluginConfig(validRecipe.Source)).Return(nil).Once() extr.On("Extract", mockCtx, mock.AnythingOfType("plugins.Emit")).Return(nil) ef := registry.NewExtractorFactory() if err := ef.Register("test-extractor", newExtractor(extr)); err != nil { @@ -464,7 +464,7 @@ func TestAgentRun(t *testing.T) { } proc := mocks.NewProcessor() - proc.On("Init", mockCtx, validRecipe.Processors[0].Config).Return(nil).Once() + proc.On("Init", mockCtx, buildPluginConfig(validRecipe.Processors[0])).Return(nil).Once() proc.On("Process", mockCtx, data[0]).Return(data[0], nil) defer proc.AssertExpectations(t) pf := registry.NewProcessorFactory() @@ -473,7 +473,7 @@ func TestAgentRun(t *testing.T) { } sink := mocks.NewSink() - sink.On("Init", mockCtx, validRecipe.Sinks[0].Config).Return(nil).Once() + sink.On("Init", mockCtx, buildPluginConfig(validRecipe.Sinks[0])).Return(nil).Once() sink.On("Sink", mockCtx, data).Return(errors.New("some error")) sink.On("Close").Return(nil) defer sink.AssertExpectations(t) @@ -508,7 +508,7 @@ func TestAgentRun(t *testing.T) { extr := mocks.NewExtractor() extr.SetEmit(data) - extr.On("Init", mockCtx, validRecipe.Source.Config).Return(nil).Once() + extr.On("Init", mockCtx, buildPluginConfig(validRecipe.Source)).Return(nil).Once() extr.On("Extract", mockCtx, mock.AnythingOfType("plugins.Emit")).Return(nil) ef := registry.NewExtractorFactory() if err := ef.Register("test-extractor", newExtractor(extr)); err != nil { @@ -516,7 +516,7 @@ func TestAgentRun(t *testing.T) { } proc := mocks.NewProcessor() - proc.On("Init", mockCtx, validRecipe.Processors[0].Config).Return(nil).Once() + proc.On("Init", mockCtx, buildPluginConfig(validRecipe.Processors[0])).Return(nil).Once() proc.On("Process", mockCtx, data[0]).Return(data[0], nil) defer proc.AssertExpectations(t) pf := registry.NewProcessorFactory() @@ -525,7 +525,7 @@ func TestAgentRun(t *testing.T) { } sink := mocks.NewSink() - sink.On("Init", mockCtx, validRecipe.Sinks[0].Config).Return(nil).Once() + sink.On("Init", mockCtx, buildPluginConfig(validRecipe.Sinks[0])).Return(nil).Once() sink.On("Sink", mockCtx, data).Return(errors.New("some error")) sink.On("Close").Return(nil) defer sink.AssertExpectations(t) @@ -561,7 +561,7 @@ func TestAgentRun(t *testing.T) { extr := mocks.NewExtractor() extr.SetEmit(data) - extr.On("Init", mockCtx, validRecipe.Source.Config).Return(nil).Once() + extr.On("Init", mockCtx, buildPluginConfig(validRecipe.Source)).Return(nil).Once() extr.On("Extract", mockCtx, mock.AnythingOfType("plugins.Emit")).Return(nil) ef := registry.NewExtractorFactory() if err := ef.Register("test-extractor", newExtractor(extr)); err != nil { @@ -569,7 +569,7 @@ func TestAgentRun(t *testing.T) { } proc := mocks.NewProcessor() - proc.On("Init", mockCtx, validRecipe.Processors[0].Config).Return(nil).Once() + proc.On("Init", mockCtx, buildPluginConfig(validRecipe.Processors[0])).Return(nil).Once() proc.On("Process", mockCtx, data[0]).Return(data[0], nil) defer proc.AssertExpectations(t) pf := registry.NewProcessorFactory() @@ -578,7 +578,7 @@ func TestAgentRun(t *testing.T) { } sink := mocks.NewSink() - sink.On("Init", mockCtx, validRecipe.Sinks[0].Config).Return(nil).Once() + sink.On("Init", mockCtx, buildPluginConfig(validRecipe.Sinks[0])).Return(nil).Once() sink.On("Sink", mockCtx, data).Return(nil) sink.On("Close").Return(nil) defer sink.AssertExpectations(t) @@ -619,7 +619,7 @@ func TestAgentRun(t *testing.T) { extr := mocks.NewExtractor() extr.SetEmit(data) - extr.On("Init", mockCtx, validRecipe.Source.Config).Return(nil).Once() + extr.On("Init", mockCtx, buildPluginConfig(validRecipe.Source)).Return(nil).Once() extr.On("Extract", mockCtx, mock.AnythingOfType("plugins.Emit")).Return(nil) ef := registry.NewExtractorFactory() if err := ef.Register("test-extractor", newExtractor(extr)); err != nil { @@ -627,7 +627,7 @@ func TestAgentRun(t *testing.T) { } proc := mocks.NewProcessor() - proc.On("Init", mockCtx, validRecipe.Processors[0].Config).Return(nil).Once() + proc.On("Init", mockCtx, buildPluginConfig(validRecipe.Processors[0])).Return(nil).Once() proc.On("Process", mockCtx, data[0]).Return(data[0], nil) defer proc.AssertExpectations(t) pf := registry.NewProcessorFactory() @@ -636,7 +636,7 @@ func TestAgentRun(t *testing.T) { } sink := mocks.NewSink() - sink.On("Init", mockCtx, validRecipe.Sinks[0].Config).Return(nil).Once() + sink.On("Init", mockCtx, buildPluginConfig(validRecipe.Sinks[0])).Return(nil).Once() sink.On("Sink", mockCtx, data).Return(nil) sink.On("Close").Return(nil) defer sink.AssertExpectations(t) @@ -674,7 +674,7 @@ func TestAgentRun(t *testing.T) { extr := mocks.NewExtractor() extr.SetEmit(data) - extr.On("Init", mockCtx, validRecipe.Source.Config).Return(nil).Once() + extr.On("Init", mockCtx, buildPluginConfig(validRecipe.Source)).Return(nil).Once() extr.On("Extract", mockCtx, mock.AnythingOfType("plugins.Emit")).Return(nil) ef := registry.NewExtractorFactory() if err := ef.Register("test-extractor", newExtractor(extr)); err != nil { @@ -682,7 +682,7 @@ func TestAgentRun(t *testing.T) { } proc := mocks.NewProcessor() - proc.On("Init", mockCtx, validRecipe.Processors[0].Config).Return(nil).Once() + proc.On("Init", mockCtx, buildPluginConfig(validRecipe.Processors[0])).Return(nil).Once() proc.On("Process", mockCtx, data[0]).Return(data[0], nil) defer proc.AssertExpectations(t) pf := registry.NewProcessorFactory() @@ -691,7 +691,7 @@ func TestAgentRun(t *testing.T) { } sink := mocks.NewSink() - sink.On("Init", mockCtx, validRecipe.Sinks[0].Config).Return(nil).Once() + sink.On("Init", mockCtx, buildPluginConfig(validRecipe.Sinks[0])).Return(nil).Once() sink.On("Sink", mockCtx, data).Return(plugins.NewRetryError(err)).Once() sink.On("Sink", mockCtx, data).Return(nil) sink.On("Close").Return(nil) @@ -733,7 +733,7 @@ func TestAgentRunMultiple(t *testing.T) { } extr := mocks.NewExtractor() extr.SetEmit(data) - extr.On("Init", mockCtx, validRecipe.Source.Config).Return(nil) + extr.On("Init", mockCtx, buildPluginConfig(validRecipe.Source)).Return(nil) extr.On("Extract", mockCtx, mock.AnythingOfType("plugins.Emit")).Return(nil) ef := registry.NewExtractorFactory() if err := ef.Register("test-extractor", newExtractor(extr)); err != nil { @@ -741,7 +741,7 @@ func TestAgentRunMultiple(t *testing.T) { } proc := mocks.NewProcessor() - proc.On("Init", mockCtx, validRecipe.Processors[0].Config).Return(nil) + proc.On("Init", mockCtx, buildPluginConfig(validRecipe.Processors[0])).Return(nil) proc.On("Process", mockCtx, data[0]).Return(data[0], nil) defer proc.AssertExpectations(t) pf := registry.NewProcessorFactory() @@ -750,7 +750,7 @@ func TestAgentRunMultiple(t *testing.T) { } sink := mocks.NewSink() - sink.On("Init", mockCtx, validRecipe.Sinks[0].Config).Return(nil) + sink.On("Init", mockCtx, buildPluginConfig(validRecipe.Sinks[0])).Return(nil) sink.On("Sink", mockCtx, data).Return(nil) sink.On("Close").Return(nil) defer sink.AssertExpectations(t) @@ -826,14 +826,14 @@ func TestValidate(t *testing.T) { extr := mocks.NewExtractor() err := plugins.InvalidConfigError{} - extr.On("Validate", invalidRecipe.Source.Config).Return(err).Once() + extr.On("Validate", buildPluginConfig(invalidRecipe.Source)).Return(err).Once() ef := registry.NewExtractorFactory() if err := ef.Register("test-extractor", newExtractor(extr)); err != nil { t.Fatal(err) } proc := mocks.NewProcessor() - proc.On("Validate", invalidRecipe.Processors[0].Config).Return(err).Once() + proc.On("Validate", buildPluginConfig(invalidRecipe.Processors[0])).Return(err).Once() defer proc.AssertExpectations(t) pf := registry.NewProcessorFactory() if err := pf.Register("test-processor", newProcessor(proc)); err != nil { @@ -841,7 +841,7 @@ func TestValidate(t *testing.T) { } sink := mocks.NewSink() - sink.On("Validate", invalidRecipe.Sinks[0].Config).Return(err).Once() + sink.On("Validate", buildPluginConfig(invalidRecipe.Sinks[0])).Return(err).Once() defer sink.AssertExpectations(t) sf := registry.NewSinkFactory() if err := sf.Register("test-sink", newSink(sink)); err != nil { @@ -928,3 +928,7 @@ func enrichInvalidConfigError(err error, pluginName string, pluginType plugins.P return err } + +func buildPluginConfig(pr recipe.PluginRecipe) plugins.Config { + return plugins.Config{RawConfig: pr.Config, URNScope: pr.Scope} +} diff --git a/agent/util.go b/agent/util.go new file mode 100644 index 000000000..481f9799f --- /dev/null +++ b/agent/util.go @@ -0,0 +1,13 @@ +package agent + +import ( + "github.com/odpf/meteor/plugins" + "github.com/odpf/meteor/recipe" +) + +func recipeToPluginConfig(pr recipe.PluginRecipe) plugins.Config { + return plugins.Config{ + URNScope: pr.Scope, + RawConfig: pr.Config, + } +} diff --git a/go.mod b/go.mod index 71092c2c6..5b7703e59 100644 --- a/go.mod +++ b/go.mod @@ -19,31 +19,24 @@ require ( github.com/elastic/go-elasticsearch v0.0.0 github.com/elastic/go-elasticsearch/v8 v8.0.0-20210708134649-33f644c8e327 github.com/etsy/statsd v0.9.0 - github.com/flimzy/diff v0.1.7 // indirect - github.com/flimzy/testy v0.1.17 // indirect github.com/go-kivik/couchdb v2.0.0+incompatible github.com/go-kivik/kivik v2.0.0+incompatible - github.com/go-kivik/kiviktest v2.0.0+incompatible // indirect github.com/go-playground/validator/v10 v10.7.0 github.com/go-sql-driver/mysql v1.6.0 github.com/gocql/gocql v0.0.0-20210817081954-bc256bbb90de github.com/google/go-github/v37 v37.0.0 - github.com/gopherjs/gopherjs v0.0.0-20210503212227-fb464eba2686 // indirect github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 github.com/hashicorp/go-hclog v0.16.1 github.com/hashicorp/go-plugin v1.4.2 - github.com/kr/text v0.2.0 // indirect github.com/lib/pq v1.10.4 github.com/mcuadros/go-defaults v1.2.0 github.com/mitchellh/mapstructure v1.4.2 - github.com/muesli/reflow v0.3.0 // indirect github.com/nsf/jsondiff v0.0.0-20210926074059-1e845ec5d249 github.com/odpf/optimus v0.2.1-rc.1 github.com/odpf/salt v0.0.0-20220123093403-faac19525416 github.com/odpf/shield v0.2.3 github.com/ory/dockertest/v3 v3.8.0 - github.com/pierrec/lz4 v2.6.1+incompatible // indirect github.com/pkg/errors v0.9.1 github.com/prestodb/presto-go-client v0.0.0-20211201125635-ad28cec17d6c github.com/schollz/progressbar/v3 v3.8.5 @@ -54,7 +47,6 @@ require ( github.com/spf13/cobra v1.2.1 github.com/spf13/viper v1.9.0 github.com/stretchr/testify v1.7.0 - gitlab.com/flimzy/testy v0.8.0 // indirect go.mongodb.org/mongo-driver v1.7.0 go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.25.0 golang.org/x/oauth2 v0.0.0-20211005180243-6b3c2da341f1 @@ -64,3 +56,14 @@ require ( google.golang.org/protobuf v1.28.0 gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b ) + +require ( + github.com/flimzy/diff v0.1.7 // indirect + github.com/flimzy/testy v0.1.17 // indirect + github.com/go-kivik/kiviktest v2.0.0+incompatible // indirect + github.com/gopherjs/gopherjs v0.0.0-20210503212227-fb464eba2686 // indirect + github.com/kr/text v0.2.0 // indirect + github.com/muesli/reflow v0.3.0 // indirect + github.com/pierrec/lz4 v2.6.1+incompatible // indirect + gitlab.com/flimzy/testy v0.8.0 // indirect +) diff --git a/models/urn.go b/models/urn.go index 2ac8ec964..807ebbe76 100644 --- a/models/urn.go +++ b/models/urn.go @@ -2,14 +2,9 @@ package models import "fmt" -func TableURN(service, host, database, name string) string { - return fmt.Sprintf("%s::%s/%s/%s", service, host, database, name) -} - -func DashboardURN(service, host, id string) string { - return fmt.Sprintf("%s::%s/%s", service, host, id) -} - -func JobURN(service, host, id string) string { - return fmt.Sprintf("%s::%s/%s", service, host, id) +func NewURN(service, scope, kind, id string) string { + return fmt.Sprintf( + "urn:%s:%s:%s:%s", + service, scope, kind, id, + ) } diff --git a/models/urn_test.go b/models/urn_test.go new file mode 100644 index 000000000..0ab438dd6 --- /dev/null +++ b/models/urn_test.go @@ -0,0 +1,35 @@ +package models_test + +import ( + "fmt" + "testing" + + "github.com/odpf/meteor/models" + "github.com/stretchr/testify/assert" +) + +func TestNewURN(t *testing.T) { + testCases := []struct { + service string + scope string + kind string + id string + expected string + }{ + { + "metabase", "main-dashboard", "collection", "123", + "urn:metabase:main-dashboard:collection:123", + }, + { + "bigquery", "p-godata-id", "table", "p-godata-id:mydataset.mytable", + "urn:bigquery:p-godata-id:table:p-godata-id:mydataset.mytable", + }, + } + + for i, tc := range testCases { + t.Run(fmt.Sprintf("should return expected urn (#%d)", i+1), func(t *testing.T) { + actual := models.NewURN(tc.service, tc.scope, tc.kind, tc.id) + assert.Equal(t, tc.expected, actual) + }) + } +} diff --git a/plugins/base_extractor.go b/plugins/base_extractor.go new file mode 100644 index 000000000..4b72305c0 --- /dev/null +++ b/plugins/base_extractor.go @@ -0,0 +1,31 @@ +package plugins + +import "context" + +type BaseExtractor struct { + BasePlugin +} + +func NewBaseExtractor(info Info, configRef interface{}) BaseExtractor { + return BaseExtractor{ + BasePlugin: NewBasePlugin(info, configRef), + } +} + +// Validate checks if the given options is valid for the plugin. +func (p *BaseExtractor) Validate(config Config) error { + if config.URNScope == "" { + return ErrEmptyURNScope + } + + return p.BasePlugin.Validate(config) +} + +// Init will be called once before running the plugin. +// This is where you want to initiate any client or test any connection to external service. +func (p *BaseExtractor) Init(ctx context.Context, config Config) error { + p.UrnScope = config.URNScope + p.RawConfig = config.RawConfig + + return p.Validate(config) +} diff --git a/plugins/base_extractor_test.go b/plugins/base_extractor_test.go new file mode 100644 index 000000000..36ec5ce03 --- /dev/null +++ b/plugins/base_extractor_test.go @@ -0,0 +1,61 @@ +package plugins_test + +import ( + "testing" + + "github.com/odpf/meteor/plugins" + "github.com/stretchr/testify/assert" +) + +func TestNewBaseExtractor(t *testing.T) { + t.Run("should assign info and return base plugin", func(t *testing.T) { + info := plugins.Info{ + Description: "test-description", + SampleConfig: "sample-config", + Summary: "test-summary", + Tags: []string{"test", "plugin"}, + } + actual := plugins.NewBaseExtractor(info, nil) + + assert.Equal(t, info, actual.Info()) + }) +} + +func TestBaseExtractorValidate(t *testing.T) { + t.Run("should return ErrEmptyURNScope if Config.URNScope is empty", func(t *testing.T) { + basePlugin := plugins.NewBaseExtractor(plugins.Info{}, nil) + err := basePlugin.Validate(plugins.Config{URNScope: ""}) + + assert.ErrorIs(t, err, plugins.ErrEmptyURNScope) + }) + + t.Run("should return InvalidConfigError if config is invalid", func(t *testing.T) { + invalidConfig := struct { + FieldA string `validate:"required"` + }{} + + basePlugin := plugins.NewBaseExtractor(plugins.Info{}, &invalidConfig) + err := basePlugin.Validate(plugins.Config{ + URNScope: "test-scope", + RawConfig: map[string]interface{}{}, + }) + + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) + }) + + t.Run("should return no error if config is valid", func(t *testing.T) { + validConfig := struct { + FieldA string `validate:"required"` + }{} + + basePlugin := plugins.NewBaseExtractor(plugins.Info{}, &validConfig) + err := basePlugin.Validate(plugins.Config{ + URNScope: "test-scope", + RawConfig: map[string]interface{}{ + "FieldA": "test-value", + }, + }) + + assert.NoError(t, err) + }) +} diff --git a/plugins/base_plugin.go b/plugins/base_plugin.go new file mode 100644 index 000000000..2446aed01 --- /dev/null +++ b/plugins/base_plugin.go @@ -0,0 +1,38 @@ +package plugins + +import ( + "context" +) + +type BasePlugin struct { + UrnScope string + RawConfig map[string]interface{} + info Info + configRef interface{} +} + +func NewBasePlugin(info Info, configRef interface{}) BasePlugin { + return BasePlugin{ + info: info, + configRef: configRef, + } +} + +// Info returns plugin's information. +func (p *BasePlugin) Info() Info { + return p.info +} + +// Validate checks if the given options is valid for the plugin. +func (p *BasePlugin) Validate(config Config) error { + return buildConfig(config.RawConfig, p.configRef) +} + +// Init will be called once before running the plugin. +// This is where you want to initiate any client or test any connection to external service. +func (p *BasePlugin) Init(ctx context.Context, config Config) error { + p.UrnScope = config.URNScope + p.RawConfig = config.RawConfig + + return p.Validate(config) +} diff --git a/plugins/base_plugin_test.go b/plugins/base_plugin_test.go new file mode 100644 index 000000000..134b65b63 --- /dev/null +++ b/plugins/base_plugin_test.go @@ -0,0 +1,102 @@ +package plugins_test + +import ( + "testing" + + "github.com/odpf/meteor/plugins" + "github.com/stretchr/testify/assert" +) + +func TestNewBasePlugin(t *testing.T) { + t.Run("should assign info and return base plugin", func(t *testing.T) { + info := plugins.Info{ + Description: "test-description", + SampleConfig: "sample-config", + Summary: "test-summary", + Tags: []string{"test", "plugin"}, + } + actual := plugins.NewBasePlugin(info, nil) + + assert.Equal(t, info, actual.Info()) + }) +} + +func TestBasePluginInfo(t *testing.T) { + t.Run("should return info", func(t *testing.T) { + info := plugins.Info{ + Description: "test-description", + SampleConfig: "sample-config", + Summary: "test-summary", + Tags: []string{"test", "plugin"}, + } + basePlugin := plugins.NewBasePlugin(info, nil) + actual := basePlugin.Info() + + assert.Equal(t, info, actual) + }) +} + +func TestBasePluginValidate(t *testing.T) { + t.Run("should return InvalidConfigError if config is invalid", func(t *testing.T) { + invalidConfig := struct { + FieldA string `validate:"required"` + }{} + + basePlugin := plugins.NewBasePlugin(plugins.Info{}, &invalidConfig) + err := basePlugin.Validate(plugins.Config{ + URNScope: "test-scope", + RawConfig: map[string]interface{}{}, + }) + + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) + }) + + t.Run("should return no error if config is valid", func(t *testing.T) { + validConfig := struct { + FieldA string `validate:"required"` + }{} + + basePlugin := plugins.NewBasePlugin(plugins.Info{}, &validConfig) + err := basePlugin.Validate(plugins.Config{ + URNScope: "test-scope", + RawConfig: map[string]interface{}{ + "FieldA": "test-value", + }, + }) + + assert.NoError(t, err) + }) +} + +func TestBasePluginInit(t *testing.T) { + t.Run("should return InvalidConfigError if config is invalid", func(t *testing.T) { + invalidConfig := struct { + FieldA string `validate:"required"` + }{} + + basePlugin := plugins.NewBasePlugin(plugins.Info{}, &invalidConfig) + err := basePlugin.Validate(plugins.Config{ + URNScope: "test-scope", + RawConfig: map[string]interface{}{}, + }) + + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) + }) + + t.Run("should return populate config and return no error if config is valid", func(t *testing.T) { + validConfig := struct { + FieldA string `validate:"required"` + }{} + + basePlugin := plugins.NewBasePlugin(plugins.Info{}, &validConfig) + err := basePlugin.Validate(plugins.Config{ + URNScope: "test-scope", + RawConfig: map[string]interface{}{ + "FieldA": "test-value", + }, + }) + + assert.NoError(t, err) + assert.Equal(t, "test-value", validConfig.FieldA) + }) +} diff --git a/plugins/errors.go b/plugins/errors.go index f45338106..93d9ac260 100644 --- a/plugins/errors.go +++ b/plugins/errors.go @@ -1,6 +1,13 @@ package plugins -import "fmt" +import ( + "errors" + "fmt" +) + +var ( + ErrEmptyURNScope = errors.New("urn scope is required to generate unique urn") +) // ConfigError contains fields to check error type ConfigError struct { diff --git a/plugins/external/discover.go b/plugins/external/discover.go deleted file mode 100644 index fd4651981..000000000 --- a/plugins/external/discover.go +++ /dev/null @@ -1,99 +0,0 @@ -package plugins - -import ( - "os" - - "github.com/hashicorp/go-plugin" - "github.com/odpf/meteor/plugins" - "github.com/odpf/meteor/registry" -) - -var ( - pluginPrefix = "meteor-plugin-" -) - -// This functions discovers plugins and populate processors with them -// returns clean up function to kill plugins processes -// -// discover plugins from -// ./ -// with the following format meteor-plugin-{plugin_name} -// -// in case of duplicate processor name, the latest would be used with no guarantee in order -func DiscoverPlugins(factory *registry.ProcessorFactory) (killPluginsFn func(), err error) { - binaries, err := findBinaries() - if err != nil { - return - } - clients, err := createClients(binaries) - if err != nil { - return - } - killPluginsFn = buildKillPluginsFn(clients) - - err = populateProcessorFactory(clients, factory) - if err != nil { - killPluginsFn() // kill plugins processes to prevent hanging processes - return - } - - return -} - -func findBinaries() (binaries []string, err error) { - path, err := os.Getwd() // current working directory - if err != nil { - return - } - dirEntries, err := os.ReadDir(path) - if err != nil { - return - } - for _, dirEntry := range dirEntries { - if isPlugin(dirEntry.Name()) { - binaries = append(binaries, "./"+dirEntry.Name()) - } - } - - return -} -func createClients(binaries []string) (clients []*plugin.Client, err error) { - for _, binary := range binaries { - clients = append(clients, NewClient(binary)) - } - return -} -func populateProcessorFactory(clients []*plugin.Client, factory *registry.ProcessorFactory) (err error) { - for _, client := range clients { - proc, err := dispense(client) - if err != nil { - return err - } - name, err := proc.Name() - if err != nil { - return err - } - - if err = factory.Register(name, func() plugins.Processor { - return proc - }); err != nil { - return err - } - } - return -} -func isPlugin(filename string) bool { - pluginPrefixLen := len(pluginPrefix) - if len(filename) <= pluginPrefixLen { - return false - } - - return filename[:pluginPrefixLen] == pluginPrefix -} -func buildKillPluginsFn(clients []*plugin.Client) func() { - return func() { - for _, client := range clients { - client.Kill() - } - } -} diff --git a/plugins/external/discover_test.go b/plugins/external/discover_test.go deleted file mode 100644 index 1df695ec0..000000000 --- a/plugins/external/discover_test.go +++ /dev/null @@ -1,54 +0,0 @@ -// we are not using plugins_test package because -// we want to test isPlugin function which is a private function. -// TODO: change package name to "plugins_test" -package plugins - -import ( - "testing" - - "github.com/odpf/meteor/registry" - "github.com/stretchr/testify/assert" -) - -func TestDiscoverPlugins(t *testing.T) { - // TODO: add test - factory := registry.NewProcessorFactory() - _, err := DiscoverPlugins(factory) - assert.Nil(t, err) -} - -// once we already setup a test for DiscoverPlugins this test will not be needed -// TODO: remove test if TestDiscoverPlugins is already added -func TestIsPlugin(t *testing.T) { - t.Run("should return true for correct format", func(t *testing.T) { - files := []string{ - "meteor-plugin-test", - "meteor-plugin-myplugin", - "meteor-plugin-my_plugin", - "meteor-plugin-a", - } - - for _, fileName := range files { - res := isPlugin(fileName) - assert.True(t, res) - } - }) - - t.Run("should return false for incorrect format", func(t *testing.T) { - files := []string{ - "test", - "meteor-plgin-test", - "plugin-meteor-myplugin", - "metor-plugin-my_plugin", - "myplugin-meteor-plugin", - "meteor-test-plugin", - "meteor-test-", - "meteor-test", - } - - for _, fileName := range files { - res := isPlugin(fileName) - assert.False(t, res) - } - }) -} diff --git a/plugins/external/plugin.go b/plugins/external/plugin.go deleted file mode 100644 index 3f652cf15..000000000 --- a/plugins/external/plugin.go +++ /dev/null @@ -1,68 +0,0 @@ -package plugins - -import ( - "os/exec" - - "github.com/pkg/errors" - - "github.com/hashicorp/go-hclog" - "github.com/hashicorp/go-plugin" -) - -var ( - handshakeConfig = plugin.HandshakeConfig{ - ProtocolVersion: 2, - MagicCookieKey: "METEOR_PLUGIN", - MagicCookieValue: "F$i^yqI.s]NIoHhR'fVV{=@ix-:gyN", - } - processorPluginKey = "processor" -) - -func ServeProcessor(processor Processor, logger hclog.Logger) { - plugin.Serve(&plugin.ServeConfig{ - HandshakeConfig: handshakeConfig, - Plugins: map[string]plugin.Plugin{ - processorPluginKey: &ProcessorPlugin{ - Impl: processor, - }, - }, - Logger: logger, - }) -} - -func NewClient(binaryPath string) *plugin.Client { - client := plugin.NewClient(&plugin.ClientConfig{ - HandshakeConfig: handshakeConfig, - Plugins: map[string]plugin.Plugin{ - processorPluginKey: &ProcessorPlugin{}, - }, - Cmd: exec.Command(binaryPath), - Logger: hclog.New(&hclog.LoggerOptions{ - Level: hclog.Debug, // Log level Debug is the minimum to log error from plugin - }), - }) - - return client -} - -func dispense(client *plugin.Client) (processor Processor, err error) { - // Connect via RPC - rpcClient, err := client.Client() - if err != nil { - err = errors.Wrap(err, "failed to connect client") - return - } - // Request the plugin - raw, err := rpcClient.Dispense(processorPluginKey) - if err != nil { - err = errors.Wrap(err, "failed to dispense a new instance of the plugin") - return - } - - processor, ok := raw.(Processor) - if !ok { - return processor, errors.New("invalid processor format") - } - - return -} diff --git a/plugins/external/processor.go b/plugins/external/processor.go deleted file mode 100644 index 9b5c61271..000000000 --- a/plugins/external/processor.go +++ /dev/null @@ -1,98 +0,0 @@ -package plugins - -import ( - "encoding/json" - "net/rpc" - - "github.com/hashicorp/go-plugin" - "github.com/odpf/meteor/plugins" -) - -// Processor is wrapper for processor.Processor -// it requires Name() to return the name of the processor -// it is needed for referencing it in a recipe -type Processor interface { - plugins.Processor - Name() (string, error) -} - -type processorArgs struct { - Data interface{} - Config map[string]interface{} -} - -type ProcessorRPC struct { - client *rpc.Client -} - -// This function will be run on the host -func (e *ProcessorRPC) Name() (name string, err error) { - err = e.client.Call("Plugin.Name", new(interface{}), &name) - if err != nil { - return - } - - return -} - -// This function will be run on the host -func (e *ProcessorRPC) Process(data interface{}, config map[string]interface{}) (resp []interface{}, err error) { - args, err := json.Marshal(processorArgs{ - Data: data, - Config: config, - }) - if err != nil { - return resp, err - } - - err = e.client.Call("Plugin.Process", args, &resp) - if err != nil { - return resp, err - } - - return resp, nil -} - -type ProcessorRPCServer struct { - // This is the real implementation - Impl Processor -} - -// This function will be run on the remote plugin -func (s *ProcessorRPCServer) Name(args interface{}, name *string) (err error) { - *name, err = s.Impl.Name() - if err != nil { - return - } - - return -} - -// This function will be run on the remote plugin -func (s *ProcessorRPCServer) Process(argsBytes []byte, res *interface{}) (err error) { - var args processorArgs - err = json.Unmarshal(argsBytes, &args) - if err != nil { - return - } - - //TODO: runtime processors are broken - //*res, err = s.Impl.Process(args.Config) - //if err != nil { - // return - //} - return -} - -type ProcessorPlugin struct { - // Impl Injection - Impl Processor -} - -func (p *ProcessorPlugin) Server(*plugin.MuxBroker) (interface{}, error) { - return &ProcessorRPCServer{Impl: p.Impl}, nil -} - -func (ProcessorPlugin) Client(b *plugin.MuxBroker, c *rpc.Client) (interface{}, error) { - return &ProcessorRPC{client: c}, nil -} diff --git a/plugins/extractors/bigquery/auditlog/logdata.go b/plugins/extractors/bigquery/auditlog/logdata.go index 9d9538d89..7b9b87b4e 100644 --- a/plugins/extractors/bigquery/auditlog/logdata.go +++ b/plugins/extractors/bigquery/auditlog/logdata.go @@ -1,13 +1,11 @@ package auditlog import ( - "github.com/odpf/meteor/models" + "github.com/odpf/meteor/plugins/extractors/bigquery/util" "github.com/pkg/errors" loggingpb "google.golang.org/genproto/googleapis/cloud/bigquery/logging/v1" ) -const serviceName = "bigquery" - type LogData struct { *loggingpb.AuditData } @@ -19,7 +17,7 @@ func (ld *LogData) GetReferencedTablesURN() (refTablesURN []string) { return } for _, rt := range stats.ReferencedTables { - tableURN := models.TableURN(serviceName, rt.ProjectId, rt.DatasetId, rt.TableId) + tableURN := util.TableURN(rt.ProjectId, rt.DatasetId, rt.TableId) refTablesURN = append(refTablesURN, tableURN) } return diff --git a/plugins/extractors/bigquery/auditlog/logdata_test.go b/plugins/extractors/bigquery/auditlog/logdata_test.go index 815a60a29..03c9edfba 100644 --- a/plugins/extractors/bigquery/auditlog/logdata_test.go +++ b/plugins/extractors/bigquery/auditlog/logdata_test.go @@ -3,7 +3,7 @@ package auditlog import ( "testing" - "github.com/odpf/meteor/models" + "github.com/odpf/meteor/plugins/extractors/bigquery/util" "github.com/stretchr/testify/assert" loggingpb "google.golang.org/genproto/googleapis/cloud/bigquery/logging/v1" statuspb "google.golang.org/genproto/googleapis/rpc/status" @@ -206,9 +206,9 @@ func TestGetReferencedTablesURN(t *testing.T) { rts := testDataLogData1.GetReferencedTablesURN() expectedRefTablesURN := []string{ - models.TableURN("bigquery", "project1", "dataset1", "table1"), - models.TableURN("bigquery", "project2", "dataset1", "table1"), - models.TableURN("bigquery", "project3", "dataset1", "table1"), + util.TableURN("project1", "dataset1", "table1"), + util.TableURN("project2", "dataset1", "table1"), + util.TableURN("project3", "dataset1", "table1"), } assert.EqualValues(t, expectedRefTablesURN, rts) }) diff --git a/plugins/extractors/bigquery/auditlog/testdata.go b/plugins/extractors/bigquery/auditlog/testdata.go index c5a7a989e..55332c3f1 100644 --- a/plugins/extractors/bigquery/auditlog/testdata.go +++ b/plugins/extractors/bigquery/auditlog/testdata.go @@ -1,29 +1,29 @@ package auditlog import ( - "github.com/odpf/meteor/models" + "github.com/odpf/meteor/plugins/extractors/bigquery/util" loggingpb "google.golang.org/genproto/googleapis/cloud/bigquery/logging/v1" ) var testDataRefTables1 = []string{ - models.TableURN("bigquery", "project1", "dataset1", "table1"), - models.TableURN("bigquery", "project2", "dataset1", "table1"), - models.TableURN("bigquery", "project3", "dataset1", "table1"), + util.TableURN("project1", "dataset1", "table1"), + util.TableURN("project2", "dataset1", "table1"), + util.TableURN("project3", "dataset1", "table1"), } var testDataRefTables2 = []string{ - models.TableURN("bigquery", "project1", "dataset1", "table1"), - models.TableURN("bigquery", "project3", "dataset1", "table1"), - models.TableURN("bigquery", "project4", "dataset1", "table1"), + util.TableURN("project1", "dataset1", "table1"), + util.TableURN("project3", "dataset1", "table1"), + util.TableURN("project4", "dataset1", "table1"), } var testDataRefTables3 = []string{ - models.TableURN("bigquery", "project1", "dataset1", "table1"), - models.TableURN("bigquery", "project3", "dataset1", "table1"), + util.TableURN("project1", "dataset1", "table1"), + util.TableURN("project3", "dataset1", "table1"), } var testDataRefTables4 = []string{ - models.TableURN("bigquery", "project1", "dataset1", "table1"), + util.TableURN("project1", "dataset1", "table1"), } var testDataLogData1 = &LogData{ @@ -173,15 +173,15 @@ var testDataLogData4 = &LogData{ } var testDataJoinDetail1234 = map[string]map[string]JoinDetail{ - models.TableURN("bigquery", "project1", "dataset1", "table1"): { - models.TableURN("bigquery", "project2", "dataset1", "table1"): { + util.TableURN("project1", "dataset1", "table1"): { + util.TableURN("project2", "dataset1", "table1"): { Usage: 1, Conditions: map[string]bool{ "ON t1.somefield = t2.anotherfield": true, "ON t1.somefield = t3.yetanotherfield": true, }, }, - models.TableURN("bigquery", "project3", "dataset1", "table1"): { + util.TableURN("project3", "dataset1", "table1"): { Usage: 3, Conditions: map[string]bool{ "ON t1.somefield = t2.anotherfield": true, @@ -190,7 +190,7 @@ var testDataJoinDetail1234 = map[string]map[string]JoinDetail{ }, }, - models.TableURN("bigquery", "project4", "dataset1", "table1"): { + util.TableURN("project4", "dataset1", "table1"): { Usage: 1, Conditions: map[string]bool{ "ON t1.somefield = t2.anotherfield": true, @@ -198,15 +198,15 @@ var testDataJoinDetail1234 = map[string]map[string]JoinDetail{ }, }, }, - models.TableURN("bigquery", "project2", "dataset1", "table1"): { - models.TableURN("bigquery", "project1", "dataset1", "table1"): { + util.TableURN("project2", "dataset1", "table1"): { + util.TableURN("project1", "dataset1", "table1"): { Usage: 1, Conditions: map[string]bool{ "ON t1.somefield = t2.anotherfield": true, "ON t1.somefield = t3.yetanotherfield": true, }, }, - models.TableURN("bigquery", "project3", "dataset1", "table1"): { + util.TableURN("project3", "dataset1", "table1"): { Usage: 1, Conditions: map[string]bool{ "ON t1.somefield = t2.anotherfield": true, @@ -214,8 +214,8 @@ var testDataJoinDetail1234 = map[string]map[string]JoinDetail{ }, }, }, - models.TableURN("bigquery", "project3", "dataset1", "table1"): { - models.TableURN("bigquery", "project1", "dataset1", "table1"): { + util.TableURN("project3", "dataset1", "table1"): { + util.TableURN("project1", "dataset1", "table1"): { Usage: 3, Conditions: map[string]bool{ "ON t1.somefield = t2.anotherfield": true, @@ -223,14 +223,14 @@ var testDataJoinDetail1234 = map[string]map[string]JoinDetail{ "USING (somefield,anotherfield)": true, }, }, - models.TableURN("bigquery", "project2", "dataset1", "table1"): { + util.TableURN("project2", "dataset1", "table1"): { Usage: 1, Conditions: map[string]bool{ "ON t1.somefield = t2.anotherfield": true, "ON t1.somefield = t3.yetanotherfield": true, }, }, - models.TableURN("bigquery", "project4", "dataset1", "table1"): { + util.TableURN("project4", "dataset1", "table1"): { Usage: 1, Conditions: map[string]bool{ "ON t1.somefield = t2.anotherfield": true, @@ -238,15 +238,15 @@ var testDataJoinDetail1234 = map[string]map[string]JoinDetail{ }, }, }, - models.TableURN("bigquery", "project4", "dataset1", "table1"): { - models.TableURN("bigquery", "project1", "dataset1", "table1"): { + util.TableURN("project4", "dataset1", "table1"): { + util.TableURN("project1", "dataset1", "table1"): { Usage: 1, Conditions: map[string]bool{ "ON t1.somefield = t2.anotherfield": true, "ON t1.somefield = t3.yetanotherfield": true, }, }, - models.TableURN("bigquery", "project3", "dataset1", "table1"): { + util.TableURN("project3", "dataset1", "table1"): { Usage: 1, Conditions: map[string]bool{ "ON t1.somefield = t2.anotherfield": true, @@ -257,65 +257,65 @@ var testDataJoinDetail1234 = map[string]map[string]JoinDetail{ } var testDataJoinUsage1234 = map[string]map[string]JoinDetail{ - models.TableURN("bigquery", "project1", "dataset1", "table1"): { - models.TableURN("bigquery", "project2", "dataset1", "table1"): { + util.TableURN("project1", "dataset1", "table1"): { + util.TableURN("project2", "dataset1", "table1"): { Usage: 1, }, - models.TableURN("bigquery", "project3", "dataset1", "table1"): { + util.TableURN("project3", "dataset1", "table1"): { Usage: 3, }, - models.TableURN("bigquery", "project4", "dataset1", "table1"): { + util.TableURN("project4", "dataset1", "table1"): { Usage: 1, }, }, - models.TableURN("bigquery", "project2", "dataset1", "table1"): { - models.TableURN("bigquery", "project1", "dataset1", "table1"): { + util.TableURN("project2", "dataset1", "table1"): { + util.TableURN("project1", "dataset1", "table1"): { Usage: 1, }, - models.TableURN("bigquery", "project3", "dataset1", "table1"): { + util.TableURN("project3", "dataset1", "table1"): { Usage: 1, }, }, - models.TableURN("bigquery", "project3", "dataset1", "table1"): { - models.TableURN("bigquery", "project1", "dataset1", "table1"): { + util.TableURN("project3", "dataset1", "table1"): { + util.TableURN("project1", "dataset1", "table1"): { Usage: 3, }, - models.TableURN("bigquery", "project2", "dataset1", "table1"): { + util.TableURN("project2", "dataset1", "table1"): { Usage: 1, }, - models.TableURN("bigquery", "project4", "dataset1", "table1"): { + util.TableURN("project4", "dataset1", "table1"): { Usage: 1, }, }, - models.TableURN("bigquery", "project4", "dataset1", "table1"): { - models.TableURN("bigquery", "project1", "dataset1", "table1"): { + util.TableURN("project4", "dataset1", "table1"): { + util.TableURN("project1", "dataset1", "table1"): { Usage: 1, }, - models.TableURN("bigquery", "project3", "dataset1", "table1"): { + util.TableURN("project3", "dataset1", "table1"): { Usage: 1, }, }, } var testDataTableUsage1234 = map[string]int64{ - models.TableURN("bigquery", "project1", "dataset1", "table1"): 4, - models.TableURN("bigquery", "project2", "dataset1", "table1"): 1, - models.TableURN("bigquery", "project3", "dataset1", "table1"): 3, - models.TableURN("bigquery", "project4", "dataset1", "table1"): 1, + util.TableURN("project1", "dataset1", "table1"): 4, + util.TableURN("project2", "dataset1", "table1"): 1, + util.TableURN("project3", "dataset1", "table1"): 3, + util.TableURN("project4", "dataset1", "table1"): 1, } var testDataFilterCondition1234 = map[string]map[string]bool{ - models.TableURN("bigquery", "project1", "dataset1", "table1"): { + util.TableURN("project1", "dataset1", "table1"): { "WHERE column_1 IS TRUE": true, "WHERE t1.field2 = 'valid'": true, "where job_type=\"query\" and statement_type=\"insert\"": true, }, - models.TableURN("bigquery", "project3", "dataset1", "table1"): { + util.TableURN("project3", "dataset1", "table1"): { "WHERE column_1 IS TRUE": true, "WHERE t1.field2 = 'valid'": true, }, - models.TableURN("bigquery", "project4", "dataset1", "table1"): { + util.TableURN("project4", "dataset1", "table1"): { "WHERE t1.field2 = 'valid'": true, }, } diff --git a/plugins/extractors/bigquery/bigquery.go b/plugins/extractors/bigquery/bigquery.go index 52dd34040..0df9b413f 100644 --- a/plugins/extractors/bigquery/bigquery.go +++ b/plugins/extractors/bigquery/bigquery.go @@ -15,6 +15,7 @@ import ( assetsv1beta1 "github.com/odpf/meteor/models/odpf/assets/v1beta1" "github.com/odpf/meteor/plugins" "github.com/odpf/meteor/plugins/extractors/bigquery/auditlog" + "github.com/odpf/meteor/plugins/extractors/bigquery/util" "github.com/odpf/meteor/registry" "github.com/odpf/meteor/utils" "github.com/odpf/salt/log" @@ -59,8 +60,16 @@ service_account_json: |- collect_table_usage: false usage_period_in_day: 7` +var info = plugins.Info{ + Description: "Big Query table metadata and metrics", + SampleConfig: sampleConfig, + Tags: []string{"gcp", "table", "extractor"}, + Summary: summary, +} + // Extractor manages the communication with the bigquery service type Extractor struct { + plugins.BaseExtractor logger log.Logger client *bigquery.Client config Config @@ -69,32 +78,20 @@ type Extractor struct { func New(logger log.Logger) *Extractor { galc := auditlog.New(logger) - return &Extractor{ + + e := &Extractor{ logger: logger, galClient: galc, } -} - -// Info returns the detailed information about the extractor -func (e *Extractor) Info() plugins.Info { - return plugins.Info{ - Description: "Big Query table metadata and metrics", - SampleConfig: sampleConfig, - Summary: summary, - Tags: []string{"gcp", "table", "extractor"}, - } -} + e.BaseExtractor = plugins.NewBaseExtractor(info, &e.config) -// Validate validates the configuration of the extractor -func (e *Extractor) Validate(configMap map[string]interface{}) (err error) { - return utils.BuildConfig(configMap, &Config{}) + return e } // Init initializes the extractor -func (e *Extractor) Init(ctx context.Context, configMap map[string]interface{}) (err error) { - err = utils.BuildConfig(configMap, &e.config) - if err != nil { - return plugins.InvalidConfigError{} +func (e *Extractor) Init(ctx context.Context, config plugins.Config) (err error) { + if err = e.BaseExtractor.Init(ctx, config); err != nil { + return err } e.client, err = e.createClient(ctx) @@ -185,7 +182,7 @@ func (e *Extractor) buildTable(ctx context.Context, t *bigquery.Table, md *bigqu } tableFQN := t.FullyQualifiedName() - tableURN := models.TableURN("bigquery", t.ProjectID, t.DatasetID, t.TableID) + tableURN := util.TableURN(t.ProjectID, t.DatasetID, t.TableID) tableProfile := e.buildTableProfile(tableURN, tableStats) diff --git a/plugins/extractors/bigquery/bigquery_test.go b/plugins/extractors/bigquery/bigquery_test.go index f11a6808f..6b7a50a72 100644 --- a/plugins/extractors/bigquery/bigquery_test.go +++ b/plugins/extractors/bigquery/bigquery_test.go @@ -19,19 +19,23 @@ func TestInit(t *testing.T) { extr := bigquery.New(utils.Logger) ctx, cancel := context.WithCancel(context.Background()) defer cancel() - err := extr.Init(ctx, map[string]interface{}{ - "wrong-config": "sample-project", - }) + err := extr.Init(ctx, plugins.Config{ + URNScope: "test-bigquery", + RawConfig: map[string]interface{}{ + "wrong-config": "sample-project", + }}) - assert.Equal(t, plugins.InvalidConfigError{}, err) + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) }) t.Run("should not return invalid config error if config is valid", func(t *testing.T) { extr := bigquery.New(utils.Logger) ctx, cancel := context.WithCancel(context.Background()) defer cancel() - err := extr.Init(ctx, map[string]interface{}{ - "project_id": "sample-project", - }) + err := extr.Init(ctx, plugins.Config{ + URNScope: "test-bigquery", + RawConfig: map[string]interface{}{ + "project_id": "sample-project", + }}) assert.NotEqual(t, plugins.InvalidConfigError{}, err) }) diff --git a/plugins/extractors/bigquery/profile_test.go b/plugins/extractors/bigquery/profile_test.go index 10a570899..d0c15e4d6 100644 --- a/plugins/extractors/bigquery/profile_test.go +++ b/plugins/extractors/bigquery/profile_test.go @@ -7,13 +7,13 @@ import ( "testing" "github.com/alecthomas/assert" - "github.com/odpf/meteor/models" assetsv1beta1 "github.com/odpf/meteor/models/odpf/assets/v1beta1" "github.com/odpf/meteor/plugins/extractors/bigquery/auditlog" + "github.com/odpf/meteor/plugins/extractors/bigquery/util" ) func TestBuildTableProfile(t *testing.T) { - tableURN := models.TableURN("bigquery", "project1", "dataset1", "table1") + tableURN := util.TableURN("project1", "dataset1", "table1") t.Run("table profile usage related fields are empty if usage collection is disabled", func(t *testing.T) { var tableStats *auditlog.TableStats @@ -45,19 +45,19 @@ func TestBuildTableProfile(t *testing.T) { t.Run("table profile usage related fields are populated if table stats is not nil and usage collection is enabled", func(t *testing.T) { tableStats := &auditlog.TableStats{ TableUsage: map[string]int64{ - models.TableURN("bigquery", "project1", "dataset1", "table1"): 5, - models.TableURN("bigquery", "project2", "dataset1", "table1"): 3, - models.TableURN("bigquery", "project3", "dataset1", "table1"): 1, + util.TableURN("project1", "dataset1", "table1"): 5, + util.TableURN("project2", "dataset1", "table1"): 3, + util.TableURN("project3", "dataset1", "table1"): 1, }, JoinDetail: map[string]map[string]auditlog.JoinDetail{ - models.TableURN("bigquery", "project1", "dataset1", "table1"): { - models.TableURN("bigquery", "project2", "dataset1", "table1"): auditlog.JoinDetail{ + util.TableURN("project1", "dataset1", "table1"): { + util.TableURN("project2", "dataset1", "table1"): auditlog.JoinDetail{ Usage: 1, }, - models.TableURN("bigquery", "project3", "dataset1", "table1"): auditlog.JoinDetail{ + util.TableURN("project3", "dataset1", "table1"): auditlog.JoinDetail{ Usage: 3, }, - models.TableURN("bigquery", "project4", "dataset1", "table1"): auditlog.JoinDetail{ + util.TableURN("project4", "dataset1", "table1"): auditlog.JoinDetail{ Usage: 1, }, }, @@ -74,15 +74,15 @@ func TestBuildTableProfile(t *testing.T) { assert.EqualValues(t, 5, tp.UsageCount) assert.Contains(t, tp.Joins, &assetsv1beta1.Join{ - Urn: models.TableURN("bigquery", "project2", "dataset1", "table1"), + Urn: util.TableURN("project2", "dataset1", "table1"), Count: 1, }) assert.Contains(t, tp.Joins, &assetsv1beta1.Join{ - Urn: models.TableURN("bigquery", "project3", "dataset1", "table1"), + Urn: util.TableURN("project3", "dataset1", "table1"), Count: 3, }) assert.Contains(t, tp.Joins, &assetsv1beta1.Join{ - Urn: models.TableURN("bigquery", "project4", "dataset1", "table1"), + Urn: util.TableURN("project4", "dataset1", "table1"), Count: 1, }) }) diff --git a/plugins/extractors/bigquery/util/table_fqn.go b/plugins/extractors/bigquery/util/table_fqn.go new file mode 100644 index 000000000..b04bcd176 --- /dev/null +++ b/plugins/extractors/bigquery/util/table_fqn.go @@ -0,0 +1,13 @@ +package util + +import ( + "fmt" + + "github.com/odpf/meteor/models" +) + +func TableURN(projectID, datasetID, tableID string) string { + tableFQN := fmt.Sprintf("%s:%s.%s", projectID, datasetID, tableID) + + return models.NewURN("bigquery", projectID, "table", tableFQN) +} diff --git a/plugins/extractors/bigtable/bigtable.go b/plugins/extractors/bigtable/bigtable.go index b5dd88559..c929e4c5d 100644 --- a/plugins/extractors/bigtable/bigtable.go +++ b/plugins/extractors/bigtable/bigtable.go @@ -22,13 +22,22 @@ import ( //go:embed README.md var summary string +const ( + service = "bigtable" +) + // Config holds the configurations for the bigtable extractor type Config struct { ProjectID string `mapstructure:"project_id" validate:"required"` } -var sampleConfig = ` -project_id: google-project-id` +var info = plugins.Info{ + Description: "Compressed, high-performance, proprietary data storage system.", + Summary: summary, + Tags: []string{"gcp", "extractor"}, + SampleConfig: ` + project_id: google-project-id`, +} // InstancesFetcher is an interface for fetching instances type InstancesFetcher interface { @@ -42,40 +51,27 @@ var ( // Extractor used to extract bigtable metadata type Extractor struct { + plugins.BaseExtractor config Config logger log.Logger instanceNames []string } func New(logger log.Logger) *Extractor { - return &Extractor{ + e := &Extractor{ logger: logger, } -} - -// Info returns the brief information about the extractor -func (e *Extractor) Info() plugins.Info { - return plugins.Info{ - Description: "Compressed, high-performance, proprietary data storage system.", - SampleConfig: sampleConfig, - Summary: summary, - Tags: []string{"gcp", "extractor"}, - } -} + e.BaseExtractor = plugins.NewBaseExtractor(info, &e.config) -// Validate validates the configuration -func (e *Extractor) Validate(configMap map[string]interface{}) (err error) { - return utils.BuildConfig(configMap, &Config{}) + return e } -func (e *Extractor) Init(ctx context.Context, configMap map[string]interface{}) (err error) { - var config Config - err = utils.BuildConfig(configMap, &config) - if err != nil { - return plugins.InvalidConfigError{} +func (e *Extractor) Init(ctx context.Context, config plugins.Config) (err error) { + if err = e.BaseExtractor.Init(ctx, config); err != nil { + return err } - client, err := instanceAdminClientCreator(ctx, config) + client, err := instanceAdminClientCreator(ctx, e.config) if err != nil { return } @@ -87,7 +83,7 @@ func (e *Extractor) Init(ctx context.Context, configMap map[string]interface{}) return } -//Extract checks if the extractor is configured and +// Extract checks if the extractor is configured and // if so, then extracts the metadata and // returns the assets. func (e *Extractor) Extract(ctx context.Context, emit plugins.Emit) (err error) { @@ -128,9 +124,9 @@ func (e *Extractor) getTablesInfo(ctx context.Context, emit plugins.Emit) (err e familyInfoBytes, _ := json.Marshal(tableInfo.FamilyInfos) emit(models.NewRecord(&assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: fmt.Sprintf("%s.%s.%s", e.config.ProjectID, instance, table), + Urn: models.NewURN(service, e.config.ProjectID, "table", fmt.Sprintf("%s.%s", instance, table)), Name: table, - Service: "bigtable", + Service: service, Type: "table", }, Properties: &facetsv1beta1.Properties{ diff --git a/plugins/extractors/bigtable/bigtable_test.go b/plugins/extractors/bigtable/bigtable_test.go index 94eb8a91f..7d635917f 100644 --- a/plugins/extractors/bigtable/bigtable_test.go +++ b/plugins/extractors/bigtable/bigtable_test.go @@ -14,20 +14,28 @@ import ( "github.com/stretchr/testify/assert" ) +const ( + urnScope = "test-bigtable" +) + func TestInit(t *testing.T) { t.Run("should return error if no project_id in config", func(t *testing.T) { - err := bt.New(utils.Logger).Init(context.TODO(), map[string]interface{}{ - "wrong-config": "sample-project", - }) + err := bt.New(utils.Logger).Init(context.TODO(), plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "wrong-config": "sample-project", + }}) - assert.Equal(t, plugins.InvalidConfigError{}, err) + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) }) t.Run("should return error if project_id is empty", func(t *testing.T) { - err := bt.New(utils.Logger).Init(context.TODO(), map[string]interface{}{ - "project_id": "", - }) + err := bt.New(utils.Logger).Init(context.TODO(), plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "project_id": "", + }}) - assert.Equal(t, plugins.InvalidConfigError{}, err) + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) }) } diff --git a/plugins/extractors/cassandra/cassandra.go b/plugins/extractors/cassandra/cassandra.go index e095b2f9d..c2f85f853 100644 --- a/plugins/extractors/cassandra/cassandra.go +++ b/plugins/extractors/cassandra/cassandra.go @@ -17,7 +17,6 @@ import ( "github.com/odpf/meteor/plugins" "github.com/odpf/meteor/registry" - "github.com/odpf/meteor/utils" "github.com/odpf/salt/log" ) @@ -33,6 +32,10 @@ var defaultKeyspaceList = []string{ "system_traces", } +const ( + service = "cassandra" +) + // Config holds the set of configuration for the cassandra extractor type Config struct { UserID string `mapstructure:"user_id" validate:"required"` @@ -48,8 +51,16 @@ host: localhost port: 9042 ` +var info = plugins.Info{ + Description: "Table metadata from cassandra server.", + SampleConfig: sampleConfig, + Summary: summary, + Tags: []string{"oss", "extractor"}, +} + // Extractor manages the extraction of data from cassandra type Extractor struct { + plugins.BaseExtractor excludedKeyspaces map[string]bool logger log.Logger config Config @@ -59,31 +70,18 @@ type Extractor struct { // New returns a pointer to an initialized Extractor Object func New(logger log.Logger) *Extractor { - return &Extractor{ + e := &Extractor{ logger: logger, } -} - -// Info returns the brief information about the extractor -func (e *Extractor) Info() plugins.Info { - return plugins.Info{ - Description: "Table metadata from cassandra server.", - SampleConfig: sampleConfig, - Summary: summary, - Tags: []string{"oss", "extractor"}, - } -} + e.BaseExtractor = plugins.NewBaseExtractor(info, &e.config) -// Validate checks if the extractor is configured correctly -func (e *Extractor) Validate(configMap map[string]interface{}) (err error) { - return utils.BuildConfig(configMap, &Config{}) + return e } // Init initializes the extractor -func (e *Extractor) Init(ctx context.Context, configMap map[string]interface{}) (err error) { - //build config - if err := utils.BuildConfig(configMap, &e.config); err != nil { - return plugins.InvalidConfigError{} +func (e *Extractor) Init(ctx context.Context, config plugins.Config) (err error) { + if err = e.BaseExtractor.Init(ctx, config); err != nil { + return err } // build excluded database list @@ -105,7 +103,7 @@ func (e *Extractor) Init(ctx context.Context, configMap map[string]interface{}) return } -//Extract checks if the extractor is configured and +// Extract checks if the extractor is configured and // if the connection to the DB is successful // and then starts the extraction process func (e *Extractor) Extract(ctx context.Context, emit plugins.Emit) (err error) { @@ -166,9 +164,10 @@ func (e *Extractor) processTable(keyspace string, tableName string) (err error) // push table to channel e.emit(models.NewRecord(&assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: fmt.Sprintf("%s.%s", keyspace, tableName), - Name: tableName, - Type: "table", + Urn: models.NewURN(service, e.UrnScope, "table", fmt.Sprintf("%s.%s", keyspace, tableName)), + Name: tableName, + Service: service, + Type: "table", }, Schema: &facetsv1beta1.Columns{ Columns: columns, diff --git a/plugins/extractors/cassandra/cassandra_test.go b/plugins/extractors/cassandra/cassandra_test.go index e57536e34..7930e24d7 100644 --- a/plugins/extractors/cassandra/cassandra_test.go +++ b/plugins/extractors/cassandra/cassandra_test.go @@ -32,6 +32,7 @@ const ( port = 9042 host = "127.0.0.1" keyspace = "cassandra_meteor_test" + urnScope = "test-cassandra" ) var session *gocql.Session @@ -105,12 +106,15 @@ func TestEmptyHosts(t *testing.T) { // TestInit tests the configs func TestInit(t *testing.T) { t.Run("should return error for invalid configs", func(t *testing.T) { - err := cassandra.New(utils.Logger).Init(context.TODO(), map[string]interface{}{ - "password": pass, - "host": host, + err := cassandra.New(utils.Logger).Init(context.TODO(), plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "password": pass, + "host": host, + }, }) - assert.Equal(t, plugins.InvalidConfigError{}, err) + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) }) } @@ -120,11 +124,14 @@ func TestExtract(t *testing.T) { ctx := context.TODO() extr := cassandra.New(utils.Logger) - err := extr.Init(ctx, map[string]interface{}{ - "user_id": user, - "password": pass, - "host": host, - "port": port, + err := extr.Init(ctx, plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "user_id": user, + "password": pass, + "host": host, + "port": port, + }, }) if err != nil { t.Fatal(err) @@ -185,9 +192,10 @@ func getExpected() []models.Record { return []models.Record{ models.NewRecord(&assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: keyspace + ".applicant", - Name: "applicant", - Type: "table", + Urn: "urn:cassandra:test-cassandra:table:" + keyspace + ".applicant", + Name: "applicant", + Service: "cassandra", + Type: "table", }, Schema: &facetsv1beta1.Columns{ Columns: []*facetsv1beta1.Column{ @@ -208,9 +216,10 @@ func getExpected() []models.Record { }), models.NewRecord(&assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: keyspace + ".jobs", - Name: "jobs", - Type: "table", + Urn: "urn:cassandra:test-cassandra:table:" + keyspace + ".jobs", + Name: "jobs", + Service: "cassandra", + Type: "table", }, Schema: &facetsv1beta1.Columns{ Columns: []*facetsv1beta1.Column{ diff --git a/plugins/extractors/cassandra/localConfig/cassandra.yaml b/plugins/extractors/cassandra/localConfig/cassandra.yaml index bd099a06b..1dbb40a97 100644 --- a/plugins/extractors/cassandra/localConfig/cassandra.yaml +++ b/plugins/extractors/cassandra/localConfig/cassandra.yaml @@ -17,7 +17,7 @@ commitlog_segment_size_in_mb: 32 seed_provider: - class_name: org.apache.cassandra.locator.SimpleSeedProvider parameters: - - seeds: "172.17.0.3" + - seeds: "172.17.0.2" concurrent_reads: 32 concurrent_writes: 32 concurrent_counter_writes: 32 @@ -28,14 +28,14 @@ trickle_fsync: false trickle_fsync_interval_in_kb: 10240 storage_port: 7000 ssl_storage_port: 7001 -listen_address: 172.17.0.3 -broadcast_address: 172.17.0.3 +listen_address: 172.17.0.2 +broadcast_address: 172.17.0.2 start_native_transport: true native_transport_port: 9042 start_rpc: false rpc_address: 0.0.0.0 rpc_port: 9160 -broadcast_rpc_address: 172.17.0.3 +broadcast_rpc_address: 172.17.0.2 rpc_keepalive: true rpc_server_type: sync thrift_framed_transport_size_in_mb: 15 diff --git a/plugins/extractors/clickhouse/clickhouse.go b/plugins/extractors/clickhouse/clickhouse.go index 423206b2f..3276eb7ba 100644 --- a/plugins/extractors/clickhouse/clickhouse.go +++ b/plugins/extractors/clickhouse/clickhouse.go @@ -15,7 +15,6 @@ import ( assetsv1beta1 "github.com/odpf/meteor/models/odpf/assets/v1beta1" "github.com/odpf/meteor/plugins" "github.com/odpf/meteor/registry" - "github.com/odpf/meteor/utils" "github.com/odpf/salt/log" ) @@ -30,9 +29,17 @@ type Config struct { var sampleConfig = ` connection_url: "tcp://localhost:3306?username=admin&password=pass123&debug=true"` +var info = plugins.Info{ + Description: "Column-oriented DBMS for online analytical processing.", + SampleConfig: sampleConfig, + Summary: summary, + Tags: []string{"oss", "extractor"}, +} + // Extractor manages the output stream // and logger interface for the extractor type Extractor struct { + plugins.BaseExtractor config Config logger log.Logger db *sql.DB @@ -40,30 +47,18 @@ type Extractor struct { // New returns a pointer to an initialized Extractor Object func New(logger log.Logger) *Extractor { - return &Extractor{ + e := &Extractor{ logger: logger, } -} - -// Info returns the brief information about the extractor -func (e *Extractor) Info() plugins.Info { - return plugins.Info{ - Description: "Column-oriented DBMS for online analytical processing.", - SampleConfig: sampleConfig, - Summary: summary, - Tags: []string{"oss", "extractor"}, - } -} + e.BaseExtractor = plugins.NewBaseExtractor(info, &e.config) -// Validate validates the configuration of the extractor -func (e *Extractor) Validate(configMap map[string]interface{}) (err error) { - return utils.BuildConfig(configMap, &Config{}) + return e } // Init initializes the extractor -func (e *Extractor) Init(ctx context.Context, configMap map[string]interface{}) (err error) { - if err = utils.BuildConfig(configMap, &e.config); err != nil { - return plugins.InvalidConfigError{} +func (e *Extractor) Init(ctx context.Context, config plugins.Config) (err error) { + if err = e.BaseExtractor.Init(ctx, config); err != nil { + return err } if e.db, err = sql.Open("clickhouse", e.config.ConnectionURL); err != nil { @@ -73,7 +68,7 @@ func (e *Extractor) Init(ctx context.Context, configMap map[string]interface{}) return } -//Extract checks if the extractor is configured and +// Extract checks if the extractor is configured and // if the connection to the DB is successful // and then starts the extraction process func (e *Extractor) Extract(ctx context.Context, emit plugins.Emit) (err error) { @@ -106,9 +101,10 @@ func (e *Extractor) extractTables(emit plugins.Emit) (err error) { emit(models.NewRecord(&assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: fmt.Sprintf("%s.%s", dbName, tableName), - Name: tableName, - Type: "table", + Urn: models.NewURN("clickhouse", e.UrnScope, "table", fmt.Sprintf("%s.%s", dbName, tableName)), + Name: tableName, + Service: "clickhouse", + Type: "table", }, Schema: &facetsv1beta1.Columns{ Columns: columns, }, diff --git a/plugins/extractors/clickhouse/clickhouse_test.go b/plugins/extractors/clickhouse/clickhouse_test.go index 84bd139b1..e05d90172 100644 --- a/plugins/extractors/clickhouse/clickhouse_test.go +++ b/plugins/extractors/clickhouse/clickhouse_test.go @@ -33,6 +33,7 @@ const ( pass = "pass" globalhost = "%" port = "9000" + urnScope = "test-clickhouse" ) var ( @@ -88,11 +89,13 @@ func TestMain(m *testing.M) { func TestInit(t *testing.T) { t.Run("should return error for invalid configuration", func(t *testing.T) { - err := newExtractor().Init(context.TODO(), map[string]interface{}{ - "invalid_config": "invalid_config_value", - }) + err := newExtractor().Init(context.TODO(), plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "invalid_config": "invalid_config_value", + }}) - assert.Equal(t, plugins.InvalidConfigError{}, err) + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) }) } @@ -100,8 +103,11 @@ func TestExtract(t *testing.T) { t.Run("should return mockdata we generated with clickhouse running on localhost", func(t *testing.T) { ctx := context.TODO() extr := newExtractor() - err := extr.Init(ctx, map[string]interface{}{ - "connection_url": fmt.Sprintf("tcp://%s?username=default&password=%s&debug=true", host, pass), + err := extr.Init(ctx, plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "connection_url": fmt.Sprintf("tcp://%s?username=default&password=%s&debug=true", host, pass), + }, }) if err != nil { t.Fatal(err) @@ -121,9 +127,10 @@ func getExpected() []models.Record { return []models.Record{ models.NewRecord(&assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: "mockdata_meteor_metadata_test.applicant", - Name: "applicant", - Type: "table", + Urn: "urn:clickhouse:test-clickhouse:table:mockdata_meteor_metadata_test.applicant", + Name: "applicant", + Service: "clickhouse", + Type: "table", }, Schema: &facetsv1beta1.Columns{ Columns: []*facetsv1beta1.Column{ @@ -147,9 +154,10 @@ func getExpected() []models.Record { }), models.NewRecord(&assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: "mockdata_meteor_metadata_test.jobs", - Name: "jobs", - Type: "table", + Urn: "urn:clickhouse:test-clickhouse:table:mockdata_meteor_metadata_test.jobs", + Name: "jobs", + Service: "clickhouse", + Type: "table", }, Schema: &facetsv1beta1.Columns{ Columns: []*facetsv1beta1.Column{ diff --git a/plugins/extractors/couchdb/couchdb.go b/plugins/extractors/couchdb/couchdb.go index 49e6248ff..4f1062a90 100644 --- a/plugins/extractors/couchdb/couchdb.go +++ b/plugins/extractors/couchdb/couchdb.go @@ -14,7 +14,6 @@ import ( assetsv1beta1 "github.com/odpf/meteor/models/odpf/assets/v1beta1" "github.com/odpf/meteor/plugins" "github.com/odpf/meteor/registry" - "github.com/odpf/meteor/utils" "github.com/odpf/salt/log" ) @@ -34,8 +33,16 @@ type Config struct { var sampleConfig = `connection_url: http://admin:pass123@localhost:3306/` +var info = plugins.Info{ + Description: "Table metadata from CouchDB server,", + SampleConfig: sampleConfig, + Summary: summary, + Tags: []string{"oss", "extractor"}, +} + // Extractor manages the extraction of data from CouchDB type Extractor struct { + plugins.BaseExtractor client *kivik.Client db *kivik.DB excludedDbs map[string]bool @@ -46,31 +53,18 @@ type Extractor struct { // New returns a pointer to an initialized Extractor Object func New(logger log.Logger) *Extractor { - return &Extractor{ + e := &Extractor{ logger: logger, } -} + e.BaseExtractor = plugins.NewBaseExtractor(info, &e.config) -// Info returns the brief information about the extractor -func (e *Extractor) Info() plugins.Info { - return plugins.Info{ - Description: "Table metadata from CouchDB server,", - SampleConfig: sampleConfig, - Summary: summary, - Tags: []string{"oss", "extractor"}, - } -} - -// Validate validates the configuration of the extractor -func (e *Extractor) Validate(configMap map[string]interface{}) (err error) { - return utils.BuildConfig(configMap, &Config{}) + return e } // Initialise the Extractor with Configurations -func (e *Extractor) Init(ctx context.Context, configMap map[string]interface{}) (err error) { - err = utils.BuildConfig(configMap, &e.config) - if err != nil { - return plugins.InvalidConfigError{} +func (e *Extractor) Init(ctx context.Context, config plugins.Config) (err error) { + if err = e.BaseExtractor.Init(ctx, config); err != nil { + return err } // build excluded database list @@ -140,9 +134,10 @@ func (e *Extractor) processTable(ctx context.Context, dbName string, docID strin // push table to channel e.emit(models.NewRecord(&assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: fmt.Sprintf("%s.%s", dbName, docID), - Name: docID, - Type: "table", + Urn: models.NewURN("couchdb", e.UrnScope, "table", fmt.Sprintf("%s.%s", dbName, docID)), + Name: docID, + Service: "couchdb", + Type: "table", }, Schema: &facetsv1beta1.Columns{ Columns: columns, diff --git a/plugins/extractors/couchdb/couchdb_test.go b/plugins/extractors/couchdb/couchdb_test.go index 1e311a7eb..a868a95f8 100644 --- a/plugins/extractors/couchdb/couchdb_test.go +++ b/plugins/extractors/couchdb/couchdb_test.go @@ -25,10 +25,11 @@ import ( ) const ( - user = "meteor_test_user" - pass = "couchdb" - port = "5984" - testDB = "mockdata_meteor_metadata_test" + user = "meteor_test_user" + pass = "couchdb" + port = "5984" + testDB = "mockdata_meteor_metadata_test" + urnScope = "test-couchdb" ) var ( @@ -88,11 +89,14 @@ func TestMain(m *testing.M) { func TestInit(t *testing.T) { t.Run("should return error for invalid configs", func(t *testing.T) { - err := couchdb.New(utils.Logger).Init(context.TODO(), map[string]interface{}{ - "invalid_config": "invalid_config_value", + err := couchdb.New(utils.Logger).Init(context.TODO(), plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "invalid_config": "invalid_config_value", + }, }) - assert.Equal(t, plugins.InvalidConfigError{}, err) + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) }) } @@ -101,8 +105,11 @@ func TestExtract(t *testing.T) { ctx := context.TODO() extr := couchdb.New(utils.Logger) - err := extr.Init(ctx, map[string]interface{}{ - "connection_url": fmt.Sprintf("http://%s:%s@%s/", user, pass, host), + err := extr.Init(ctx, plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "connection_url": fmt.Sprintf("http://%s:%s@%s/", user, pass, host), + }, }) if err != nil { t.Fatal(err) diff --git a/plugins/extractors/csv/csv.go b/plugins/extractors/csv/csv.go index 930f6e238..513bd3bc2 100644 --- a/plugins/extractors/csv/csv.go +++ b/plugins/extractors/csv/csv.go @@ -15,7 +15,6 @@ import ( facetsv1beta1 "github.com/odpf/meteor/models/odpf/assets/facets/v1beta1" assetsv1beta1 "github.com/odpf/meteor/models/odpf/assets/v1beta1" "github.com/odpf/meteor/registry" - "github.com/odpf/meteor/utils" "github.com/pkg/errors" "encoding/csv" @@ -35,8 +34,16 @@ type Config struct { var sampleConfig = ` path: ./path-to-a-file-or-a-directory` +var info = plugins.Info{ + Description: "Comma separated file", + SampleConfig: sampleConfig, + Summary: summary, + Tags: []string{"file", "extractor"}, +} + // Extractor manages the extraction of data from the extractor type Extractor struct { + plugins.BaseExtractor config Config logger log.Logger filePaths []string @@ -44,31 +51,17 @@ type Extractor struct { // New returns a pointer to an initialized Extractor Object func New(logger log.Logger) *Extractor { - return &Extractor{ + e := &Extractor{ logger: logger, } -} + e.BaseExtractor = plugins.NewBaseExtractor(info, &e.config) -// Info returns the brief information about the extractor -func (e *Extractor) Info() plugins.Info { - return plugins.Info{ - Description: "Comma separated file", - SampleConfig: sampleConfig, - Summary: summary, - Tags: []string{"file", "extractor"}, - } + return e } -// Validate validates the configuration of the extractor -func (e *Extractor) Validate(configMap map[string]interface{}) (err error) { - return utils.BuildConfig(configMap, &Config{}) -} - -func (e *Extractor) Init(ctx context.Context, configMap map[string]interface{}) (err error) { - // build config - err = utils.BuildConfig(configMap, &e.config) - if err != nil { - return plugins.InvalidConfigError{} +func (e *Extractor) Init(ctx context.Context, config plugins.Config) (err error) { + if err = e.BaseExtractor.Init(ctx, config); err != nil { + return err } // build file paths to read from @@ -80,7 +73,7 @@ func (e *Extractor) Init(ctx context.Context, configMap map[string]interface{}) return } -//Extract checks if the extractor is configured and +// Extract checks if the extractor is configured and // returns the extracted data func (e *Extractor) Extract(ctx context.Context, emit plugins.Emit) (err error) { for _, filePath := range e.filePaths { @@ -116,7 +109,7 @@ func (e *Extractor) buildTable(filePath string) (table *assetsv1beta1.Table, err fileName := stat.Name() table = &assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: fileName, + Urn: models.NewURN("csv", e.UrnScope, "file", fileName), Name: fileName, Service: "csv", Type: "table", diff --git a/plugins/extractors/csv/csv_test.go b/plugins/extractors/csv/csv_test.go index 8707825ef..572c92bc6 100644 --- a/plugins/extractors/csv/csv_test.go +++ b/plugins/extractors/csv/csv_test.go @@ -24,8 +24,11 @@ func TestInit(t *testing.T) { config := map[string]interface{}{} err := csv.New(utils.Logger).Init( context.TODO(), - config) - assert.Equal(t, plugins.InvalidConfigError{}, err) + plugins.Config{ + URNScope: "test-csv", + RawConfig: config, + }) + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) }) } @@ -33,8 +36,11 @@ func TestExtract(t *testing.T) { t.Run("should extract data if path is a file", func(t *testing.T) { ctx := context.TODO() extr := csv.New(utils.Logger) - err := extr.Init(ctx, map[string]interface{}{ - "path": "./testdata/test.csv", + err := extr.Init(ctx, plugins.Config{ + URNScope: "test-csv", + RawConfig: map[string]interface{}{ + "path": "./testdata/test.csv", + }, }) if err != nil { t.Fatal(err) @@ -47,7 +53,7 @@ func TestExtract(t *testing.T) { expected := []models.Record{ models.NewRecord(&assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: "test.csv", + Urn: "urn:csv:test-csv:file:test.csv", Name: "test.csv", Service: "csv", Type: "table", @@ -68,8 +74,11 @@ func TestExtract(t *testing.T) { t.Run("should extract data from all files if path is a dir", func(t *testing.T) { ctx := context.TODO() extr := csv.New(utils.Logger) - err := extr.Init(ctx, map[string]interface{}{ - "path": "./testdata", + err := extr.Init(ctx, plugins.Config{ + URNScope: "test-csv", + RawConfig: map[string]interface{}{ + "path": "./testdata", + }, }) if err != nil { t.Fatal(err) @@ -82,7 +91,7 @@ func TestExtract(t *testing.T) { expected := []models.Record{ models.NewRecord(&assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: "test-2.csv", + Urn: "urn:csv:test-csv:file:test-2.csv", Name: "test-2.csv", Service: "csv", Type: "table", @@ -97,7 +106,7 @@ func TestExtract(t *testing.T) { }), models.NewRecord(&assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: "test.csv", + Urn: "urn:csv:test-csv:file:test.csv", Name: "test.csv", Service: "csv", Type: "table", diff --git a/plugins/extractors/elastic/elastic.go b/plugins/extractors/elastic/elastic.go index 01eb39e8e..3a31cea38 100644 --- a/plugins/extractors/elastic/elastic.go +++ b/plugins/extractors/elastic/elastic.go @@ -4,7 +4,6 @@ import ( "context" _ "embed" "encoding/json" - "fmt" "reflect" "github.com/pkg/errors" @@ -16,7 +15,6 @@ import ( assetsv1beta1 "github.com/odpf/meteor/models/odpf/assets/v1beta1" "github.com/odpf/meteor/plugins" "github.com/odpf/meteor/registry" - "github.com/odpf/meteor/utils" "github.com/odpf/salt/log" ) @@ -34,8 +32,16 @@ var sampleConfig = ` password: "changeme" host: elastic_server` +var info = plugins.Info{ + Description: "Search engine based on the Lucene library.", + SampleConfig: sampleConfig, + Summary: summary, + Tags: []string{"oss", "extractor"}, +} + // Extractor manages the extraction of data from elastic type Extractor struct { + plugins.BaseExtractor config Config logger log.Logger client *elasticsearch.Client @@ -43,32 +49,18 @@ type Extractor struct { // New returns a pointer to an initialized Extractor Object func New(logger log.Logger) *Extractor { - return &Extractor{ + e := &Extractor{ logger: logger, } -} + e.BaseExtractor = plugins.NewBaseExtractor(info, &e.config) -// Info returns the brief information about the extractor -func (e *Extractor) Info() plugins.Info { - return plugins.Info{ - Description: "Search engine based on the Lucene library.", - SampleConfig: sampleConfig, - Summary: summary, - Tags: []string{"oss", "extractor"}, - } -} - -// Validate validates the configuration of the extractor -func (e *Extractor) Validate(configMap map[string]interface{}) (err error) { - return utils.BuildConfig(configMap, &Config{}) + return e } // Init initializes the extractor -func (e *Extractor) Init(ctx context.Context, configMap map[string]interface{}) (err error) { - //build config - err = utils.BuildConfig(configMap, &e.config) - if err != nil { - return plugins.InvalidConfigError{} +func (e *Extractor) Init(ctx context.Context, config plugins.Config) (err error) { + if err = e.BaseExtractor.Init(ctx, config); err != nil { + return err } //build elasticsearch client @@ -132,9 +124,10 @@ func (e *Extractor) Extract(ctx context.Context, emit plugins.Emit) (err error) emit(models.NewRecord(&assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: fmt.Sprintf("%s.%s", "elasticsearch", indexName), - Name: indexName, - Type: "table", + Urn: models.NewURN("elasticsearch", e.UrnScope, "index", indexName), + Name: indexName, + Service: "elasticsearch", + Type: "table", }, Schema: &facetsv1beta1.Columns{ Columns: columns, diff --git a/plugins/extractors/elastic/elastic_test.go b/plugins/extractors/elastic/elastic_test.go index 5b8f5a90a..d2c84daae 100644 --- a/plugins/extractors/elastic/elastic_test.go +++ b/plugins/extractors/elastic/elastic_test.go @@ -30,9 +30,10 @@ import ( ) const ( - host = "http://localhost:9200" - pass = "secret_pass" - user = "elastic_meteor" + host = "http://localhost:9200" + pass = "secret_pass" + user = "elastic_meteor" + urnScope = "test-elasticsearch" ) var ( @@ -105,20 +106,26 @@ func TestMain(m *testing.M) { func TestInit(t *testing.T) { t.Run("should return error if no host in config", func(t *testing.T) { - err := newExtractor().Init(ctx, map[string]interface{}{ - "password": "pass", + err := newExtractor().Init(ctx, plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "password": "pass", + }, }) - assert.Equal(t, plugins.InvalidConfigError{}, err) + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) }) } func TestExtract(t *testing.T) { t.Run("should return mockdata we generated with service running on localhost", func(t *testing.T) { extr := newExtractor() - err := extr.Init(ctx, map[string]interface{}{ - "host": host, - "user": user, - "password": pass, + err := extr.Init(ctx, plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "host": host, + "user": user, + "password": pass, + }, }) if err != nil { t.Fatal(err) @@ -189,9 +196,10 @@ func getExpectedVal() []models.Record { return []models.Record{ models.NewRecord(&assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: "elasticsearch.index1", - Name: "index1", - Type: "table", + Urn: "urn:elasticsearch:test-elasticsearch:index:index1", + Name: "index1", + Service: "elasticsearch", + Type: "table", }, Schema: &facetsv1beta1.Columns{ Columns: []*facetsv1beta1.Column{ @@ -207,9 +215,10 @@ func getExpectedVal() []models.Record { }), models.NewRecord(&assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: "elasticsearch.index2", - Name: "index2", - Type: "table", + Urn: "urn:elasticsearch:test-elasticsearch:index:index2", + Name: "index2", + Service: "elasticsearch", + Type: "table", }, Schema: &facetsv1beta1.Columns{ Columns: []*facetsv1beta1.Column{ diff --git a/plugins/extractors/gcs/gcs.go b/plugins/extractors/gcs/gcs.go index bd39016ac..c3988e759 100644 --- a/plugins/extractors/gcs/gcs.go +++ b/plugins/extractors/gcs/gcs.go @@ -16,7 +16,6 @@ import ( "cloud.google.com/go/storage" "github.com/odpf/meteor/plugins" - "github.com/odpf/meteor/utils" "github.com/odpf/salt/log" "google.golang.org/api/iterator" "google.golang.org/api/option" @@ -25,8 +24,6 @@ import ( //go:embed README.md var summary string -const metadataSource = "googlecloudstorage" - // Config holds the set of configuration for the extractor type Config struct { ProjectID string `mapstructure:"project_id" validate:"required"` @@ -50,9 +47,17 @@ service_account_json: |- "client_x509_cert_url": "xxxxxxx" }` +var info = plugins.Info{ + Description: "Online file storage web service for storing and accessing data.", + SampleConfig: sampleConfig, + Summary: summary, + Tags: []string{"gcp", "extractor"}, +} + // Extractor manages the extraction of data // from the google cloud storage type Extractor struct { + plugins.BaseExtractor client *storage.Client logger log.Logger config Config @@ -60,32 +65,18 @@ type Extractor struct { // New returns a pointer to an initialized Extractor Object func New(logger log.Logger) *Extractor { - return &Extractor{ + e := &Extractor{ logger: logger, } -} - -// Info returns the brief information about the extractor -func (e *Extractor) Info() plugins.Info { - return plugins.Info{ - Description: "Online file storage web service for storing and accessing data.", - SampleConfig: sampleConfig, - Summary: summary, - Tags: []string{"gcp", "extractor"}, - } -} + e.BaseExtractor = plugins.NewBaseExtractor(info, &e.config) -// Validate validates the configuration of the extractor -func (e *Extractor) Validate(configMap map[string]interface{}) (err error) { - return utils.BuildConfig(configMap, &Config{}) + return e } // Init initializes the extractor -func (e *Extractor) Init(ctx context.Context, configMap map[string]interface{}) (err error) { - // build config - err = utils.BuildConfig(configMap, &e.config) - if err != nil { - return plugins.InvalidConfigError{} +func (e *Extractor) Init(ctx context.Context, config plugins.Config) (err error) { + if err = e.BaseExtractor.Init(ctx, config); err != nil { + return err } // create client @@ -140,9 +131,9 @@ func (e *Extractor) extractBlobs(ctx context.Context, bucketName string, project func (e *Extractor) buildBucket(b *storage.BucketAttrs, projectID string, blobs []*assetsv1beta1.Blob) (bucket *assetsv1beta1.Bucket) { bucket = &assetsv1beta1.Bucket{ Resource: &commonv1beta1.Resource{ - Urn: fmt.Sprintf("%s/%s", projectID, b.Name), + Urn: models.NewURN("gcs", projectID, "bucket", b.Name), Name: b.Name, - Service: metadataSource, + Service: "gcs", Type: "bucket", }, Location: b.Location, @@ -163,7 +154,7 @@ func (e *Extractor) buildBucket(b *storage.BucketAttrs, projectID string, blobs func (e *Extractor) buildBlob(blob *storage.ObjectAttrs, projectID string) *assetsv1beta1.Blob { return &assetsv1beta1.Blob{ - Urn: fmt.Sprintf("%s/%s/%s", projectID, blob.Bucket, blob.Name), + Urn: models.NewURN("gcs", projectID, "object", fmt.Sprintf("%s/%s", blob.Bucket, blob.Name)), Name: blob.Name, Size: blob.Size, DeleteTime: timestamppb.New(blob.Deleted), diff --git a/plugins/extractors/gcs/gcs_test.go b/plugins/extractors/gcs/gcs_test.go index c3d6e5931..c11986fa2 100644 --- a/plugins/extractors/gcs/gcs_test.go +++ b/plugins/extractors/gcs/gcs_test.go @@ -16,10 +16,13 @@ import ( func TestInit(t *testing.T) { t.Run("should return error if no project_id in config", func(t *testing.T) { - err := gcs.New(utils.Logger).Init(context.TODO(), map[string]interface{}{ - "wrong-config": "sample-project", + err := gcs.New(utils.Logger).Init(context.TODO(), plugins.Config{ + URNScope: "test", + RawConfig: map[string]interface{}{ + "wrong-config": "sample-project", + }, }) - assert.Equal(t, plugins.InvalidConfigError{}, err) + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) }) } diff --git a/plugins/extractors/github/github.go b/plugins/extractors/github/github.go index 35b13981c..91e7befa7 100644 --- a/plugins/extractors/github/github.go +++ b/plugins/extractors/github/github.go @@ -12,7 +12,6 @@ import ( assetsv1beta1 "github.com/odpf/meteor/models/odpf/assets/v1beta1" "github.com/odpf/meteor/plugins" "github.com/odpf/meteor/registry" - "github.com/odpf/meteor/utils" "github.com/odpf/salt/log" "golang.org/x/oauth2" ) @@ -30,33 +29,35 @@ var sampleConfig = ` org: odpf token: github_token` +var info = plugins.Info{ + Description: "User list from Github organisation.", + SampleConfig: sampleConfig, + Summary: summary, + Tags: []string{"platform", "extractor"}, +} + // Extractor manages the extraction of data from the extractor type Extractor struct { + plugins.BaseExtractor logger log.Logger config Config client *github.Client } -// Info returns the brief information about the extractor -func (e *Extractor) Info() plugins.Info { - return plugins.Info{ - Description: "User list from Github organisation.", - SampleConfig: sampleConfig, - Summary: summary, - Tags: []string{"platform", "extractor"}, +// New returns a pointer to an initialized Extractor Object +func New(logger log.Logger) *Extractor { + e := &Extractor{ + logger: logger, } -} + e.BaseExtractor = plugins.NewBaseExtractor(info, &e.config) -// Validate validates the configuration of the extractor -func (e *Extractor) Validate(configMap map[string]interface{}) (err error) { - return utils.BuildConfig(configMap, &Config{}) + return e } // Init initializes the extractor -func (e *Extractor) Init(ctx context.Context, configMap map[string]interface{}) (err error) { - err = utils.BuildConfig(configMap, &e.config) - if err != nil { - return plugins.InvalidConfigError{} +func (e *Extractor) Init(ctx context.Context, config plugins.Config) (err error) { + if err = e.BaseExtractor.Init(ctx, config); err != nil { + return err } ts := oauth2.StaticTokenSource( @@ -84,8 +85,11 @@ func (e *Extractor) Extract(ctx context.Context, emit plugins.Emit) (err error) } emit(models.NewRecord(&assetsv1beta1.User{ Resource: &commonv1beta1.Resource{ - Urn: usr.GetURL(), - Type: "user", + Urn: models.NewURN("github", e.UrnScope, "user", usr.GetNodeID()), + Service: "github", + Name: usr.GetEmail(), + Type: "user", + Url: usr.GetURL(), }, Email: usr.GetEmail(), Username: usr.GetLogin(), @@ -100,9 +104,7 @@ func (e *Extractor) Extract(ctx context.Context, emit plugins.Emit) (err error) // init registers the extractor to catalog func init() { if err := registry.Extractors.Register("github", func() plugins.Extractor { - return &Extractor{ - logger: plugins.GetLog(), - } + return New(plugins.GetLog()) }); err != nil { panic(err) } diff --git a/plugins/extractors/grafana/grafana.go b/plugins/extractors/grafana/grafana.go index b2259a06f..ff8af238b 100644 --- a/plugins/extractors/grafana/grafana.go +++ b/plugins/extractors/grafana/grafana.go @@ -13,7 +13,6 @@ import ( assetsv1beta1 "github.com/odpf/meteor/models/odpf/assets/v1beta1" "github.com/odpf/meteor/plugins" "github.com/odpf/meteor/registry" - "github.com/odpf/meteor/utils" "github.com/odpf/salt/log" ) @@ -30,8 +29,16 @@ var sampleConfig = ` base_url: grafana_server api_key: your_api_key` +var info = plugins.Info{ + Description: "Dashboard list from Grafana server.", + SampleConfig: sampleConfig, + Summary: summary, + Tags: []string{"oss", "extractor"}, +} + // Extractor manages the communication with the Grafana Server type Extractor struct { + plugins.BaseExtractor client *Client config Config logger log.Logger @@ -39,32 +46,18 @@ type Extractor struct { // New returns a pointer to an initialized Extractor Object func New(logger log.Logger) *Extractor { - return &Extractor{ + e := &Extractor{ logger: logger, } -} + e.BaseExtractor = plugins.NewBaseExtractor(info, &e.config) -// Info returns the brief information about the extractor -func (e *Extractor) Info() plugins.Info { - return plugins.Info{ - Description: "Dashboard list from Grafana server.", - SampleConfig: sampleConfig, - Summary: summary, - Tags: []string{"oss", "extractor"}, - } -} - -// Validate validates the configuration of the extractor -func (e *Extractor) Validate(configMap map[string]interface{}) (err error) { - return utils.BuildConfig(configMap, &Config{}) + return e } // Init initializes the extractor -func (e *Extractor) Init(ctx context.Context, configMap map[string]interface{}) (err error) { - // build config - err = utils.BuildConfig(configMap, &e.config) - if err != nil { - return plugins.InvalidConfigError{} +func (e *Extractor) Init(ctx context.Context, config plugins.Config) (err error) { + if err = e.BaseExtractor.Init(ctx, config); err != nil { + return err } // build client @@ -102,7 +95,7 @@ func (e *Extractor) grafanaDashboardToMeteorDashboard(dashboard DashboardDetail) } return &assetsv1beta1.Dashboard{ Resource: &commonv1beta1.Resource{ - Urn: fmt.Sprintf("grafana.%s", dashboard.Dashboard.UID), + Urn: models.NewURN("grafana", e.UrnScope, "dashboard", dashboard.Dashboard.UID), Name: dashboard.Meta.Slug, Type: "dashboard", Service: "grafana", @@ -120,7 +113,7 @@ func (e *Extractor) grafanaPanelToMeteorChart(panel Panel, dashboardUID string, rawQuery = panel.Targets[0].RawSQL } return assetsv1beta1.Chart{ - Urn: fmt.Sprintf("%s.%d", dashboardUID, panel.ID), + Urn: models.NewURN("grafana", e.UrnScope, "panel", fmt.Sprintf("%s.%d", dashboardUID, panel.ID)), Name: panel.Title, Type: panel.Type, Source: "grafana", diff --git a/plugins/extractors/grafana/grafana_test.go b/plugins/extractors/grafana/grafana_test.go index 5ba154194..63208c055 100644 --- a/plugins/extractors/grafana/grafana_test.go +++ b/plugins/extractors/grafana/grafana_test.go @@ -23,6 +23,7 @@ import ( ) var testServer *httptest.Server +var urnScope string = "test-grafana" func TestMain(m *testing.M) { testServer = NewTestServer() @@ -36,21 +37,25 @@ func TestMain(m *testing.M) { func TestInit(t *testing.T) { t.Run("should return error if for empty base_url in config", func(t *testing.T) { - err := grafana.New(utils.Logger).Init(context.TODO(), map[string]interface{}{ - "base_url": "", - "api_key": "qwerty123", - }) + err := grafana.New(utils.Logger).Init(context.TODO(), plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "base_url": "", + "api_key": "qwerty123", + }}) - assert.Equal(t, plugins.InvalidConfigError{}, err) + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) }) t.Run("should return error if for empty api_key in config", func(t *testing.T) { - err := grafana.New(utils.Logger).Init(context.TODO(), map[string]interface{}{ - "base_url": testServer.URL, - "api_key": "", - }) + err := grafana.New(utils.Logger).Init(context.TODO(), plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "base_url": testServer.URL, + "api_key": "", + }}) - assert.Equal(t, plugins.InvalidConfigError{}, err) + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) }) } @@ -60,7 +65,7 @@ func TestExtract(t *testing.T) { expectedData := []models.Record{ models.NewRecord(&assetsv1beta1.Dashboard{ Resource: &commonv1beta1.Resource{ - Urn: "grafana.HzK8qNW7z", + Urn: "urn:grafana:test-grafana:dashboard:HzK8qNW7z", Name: "new-dashboard-copy", Service: "grafana", Url: fmt.Sprintf("%s/d/HzK8qNW7z/new-dashboard-copy", testServer.URL), @@ -69,7 +74,7 @@ func TestExtract(t *testing.T) { }, Charts: []*assetsv1beta1.Chart{ { - Urn: "HzK8qNW7z.2", + Urn: "urn:grafana:test-grafana:panel:HzK8qNW7z.2", Name: "Panel Title", Type: "timeseries", Source: "grafana", @@ -84,7 +89,7 @@ func TestExtract(t *testing.T) { }), models.NewRecord(&assetsv1beta1.Dashboard{ Resource: &commonv1beta1.Resource{ - Urn: "grafana.5WsKOvW7z", + Urn: "urn:grafana:test-grafana:dashboard:5WsKOvW7z", Name: "test-dashboard-updated", Service: "grafana", Url: fmt.Sprintf("%s/d/5WsKOvW7z/test-dashboard-updated", testServer.URL), @@ -93,7 +98,7 @@ func TestExtract(t *testing.T) { }, Charts: []*assetsv1beta1.Chart{ { - Urn: "5WsKOvW7z.4", + Urn: "urn:grafana:test-grafana:panel:5WsKOvW7z.4", Name: "Panel Random", Type: "table", Source: "grafana", @@ -105,7 +110,7 @@ func TestExtract(t *testing.T) { DashboardSource: "grafana", }, { - Urn: "5WsKOvW7z.2", + Urn: "urn:grafana:test-grafana:panel:5WsKOvW7z.2", Name: "Panel Title", Type: "timeseries", Source: "grafana", @@ -122,9 +127,12 @@ func TestExtract(t *testing.T) { ctx := context.TODO() extractor := grafana.New(utils.Logger) - err := extractor.Init(ctx, map[string]interface{}{ - "base_url": testServer.URL, - "api_key": "qwerty123", + err := extractor.Init(ctx, plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "base_url": testServer.URL, + "api_key": "qwerty123", + }, }) if err != nil { t.Fatal(err) diff --git a/plugins/extractors/kafka/README.md b/plugins/extractors/kafka/README.md index c72e0178c..9c133e143 100644 --- a/plugins/extractors/kafka/README.md +++ b/plugins/extractors/kafka/README.md @@ -7,7 +7,6 @@ source: name: kafka config: broker: "localhost:9092" - label: "my-kafka-cluster" ``` ## Inputs @@ -15,7 +14,6 @@ source: | Key | Value | Example | Description | | | :-- | :---- | :------ | :---------- | :- | | `broker` | `string` | `localhost:9092` | Kafka broker's host | *required* | -| `label` | `string` | `samplePrefix` | Label will be used as a part in Urn components | *required* | ## Outputs diff --git a/plugins/extractors/kafka/kafka.go b/plugins/extractors/kafka/kafka.go index 7a2548972..917ae2d30 100644 --- a/plugins/extractors/kafka/kafka.go +++ b/plugins/extractors/kafka/kafka.go @@ -3,7 +3,6 @@ package kafka import ( "context" _ "embed" // used to print the embedded assets - "fmt" "github.com/pkg/errors" @@ -14,7 +13,6 @@ import ( "github.com/odpf/meteor/registry" kafka "github.com/segmentio/kafka-go" - "github.com/odpf/meteor/utils" "github.com/odpf/salt/log" ) @@ -30,16 +28,22 @@ var defaultTopics = map[string]byte{ // Config holds the set of configuration for the kafka extractor type Config struct { Broker string `mapstructure:"broker" validate:"required"` - Label string `mapstructure:"label" validate:"required"` } var sampleConfig = ` -broker: "localhost:9092" -label: "my-kafka"` +broker: "localhost:9092"` + +var info = plugins.Info{ + Description: "Topic list from Apache Kafka.", + SampleConfig: sampleConfig, + Summary: summary, + Tags: []string{"oss", "extractor"}, +} // Extractor manages the extraction of data // from a kafka broker type Extractor struct { + plugins.BaseExtractor // internal states conn *kafka.Conn logger log.Logger @@ -48,31 +52,18 @@ type Extractor struct { // New returns a pointer to an initialized Extractor Object func New(logger log.Logger) *Extractor { - return &Extractor{ + e := &Extractor{ logger: logger, } -} + e.BaseExtractor = plugins.NewBaseExtractor(info, &e.config) -// Info returns the brief information about the extractor -func (e *Extractor) Info() plugins.Info { - return plugins.Info{ - Description: "Topic list from Apache Kafka.", - SampleConfig: sampleConfig, - Summary: summary, - Tags: []string{"oss", "extractor"}, - } -} - -// Validate validates the configuration of the extractor -func (e *Extractor) Validate(configMap map[string]interface{}) (err error) { - return utils.BuildConfig(configMap, &Config{}) + return e } // Init initializes the extractor -func (e *Extractor) Init(ctx context.Context, configMap map[string]interface{}) (err error) { - err = utils.BuildConfig(configMap, &e.config) - if err != nil { - return plugins.InvalidConfigError{} +func (e *Extractor) Init(ctx context.Context, config plugins.Config) (err error) { + if err = e.BaseExtractor.Init(ctx, config); err != nil { + return err } // create connection @@ -124,7 +115,7 @@ func (e *Extractor) Extract(ctx context.Context, emit plugins.Emit) (err error) func (e *Extractor) buildTopic(topic string, numOfPartitions int) *assetsv1beta1.Topic { return &assetsv1beta1.Topic{ Resource: &commonv1beta1.Resource{ - Urn: fmt.Sprintf("kafka::%s/%s", e.config.Label, topic), + Urn: models.NewURN("kafka", e.UrnScope, "topic", topic), Name: topic, Service: "kafka", Type: "topic", diff --git a/plugins/extractors/kafka/kafka_test.go b/plugins/extractors/kafka/kafka_test.go index 008d23ca3..151d2718c 100644 --- a/plugins/extractors/kafka/kafka_test.go +++ b/plugins/extractors/kafka/kafka_test.go @@ -29,6 +29,7 @@ import ( var ( brokerHost = "localhost:9093" + urnScope = "test-kafka" ) func TestMain(m *testing.M) { @@ -98,11 +99,13 @@ func TestMain(m *testing.M) { func TestInit(t *testing.T) { t.Run("should return error for invalid config", func(t *testing.T) { - err := newExtractor().Init(context.TODO(), map[string]interface{}{ - "wrong-config": "wrong-value", - }) + err := newExtractor().Init(context.TODO(), plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "wrong-config": "wrong-value", + }}) - assert.Equal(t, plugins.InvalidConfigError{}, err) + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) }) } @@ -110,10 +113,11 @@ func TestExtract(t *testing.T) { t.Run("should emit list of topic metadata", func(t *testing.T) { ctx := context.TODO() extr := newExtractor() - err := extr.Init(ctx, map[string]interface{}{ - "broker": brokerHost, - "label": "my-kafka-cluster", - }) + err := extr.Init(ctx, plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "broker": brokerHost, + }}) if err != nil { t.Fatal(err) } @@ -126,7 +130,7 @@ func TestExtract(t *testing.T) { expected := []models.Record{ models.NewRecord(&assetsv1beta1.Topic{ Resource: &commonv1beta1.Resource{ - Urn: "kafka::my-kafka-cluster/meteor-test-topic-1", + Urn: "urn:kafka:test-kafka:topic:meteor-test-topic-1", Name: "meteor-test-topic-1", Service: "kafka", Type: "topic", @@ -137,7 +141,7 @@ func TestExtract(t *testing.T) { }), models.NewRecord(&assetsv1beta1.Topic{ Resource: &commonv1beta1.Resource{ - Urn: "kafka::my-kafka-cluster/meteor-test-topic-2", + Urn: "urn:kafka:test-kafka:topic:meteor-test-topic-2", Name: "meteor-test-topic-2", Service: "kafka", Type: "topic", @@ -148,7 +152,7 @@ func TestExtract(t *testing.T) { }), models.NewRecord(&assetsv1beta1.Topic{ Resource: &commonv1beta1.Resource{ - Urn: "kafka::my-kafka-cluster/meteor-test-topic-3", + Urn: "urn:kafka:test-kafka:topic:meteor-test-topic-3", Name: "meteor-test-topic-3", Service: "kafka", Type: "topic", diff --git a/plugins/extractors/mariadb/mariadb.go b/plugins/extractors/mariadb/mariadb.go index 3abfe229e..34cee8c60 100644 --- a/plugins/extractors/mariadb/mariadb.go +++ b/plugins/extractors/mariadb/mariadb.go @@ -10,7 +10,6 @@ import ( "github.com/odpf/meteor/models" "github.com/odpf/meteor/plugins" "github.com/odpf/meteor/registry" - "github.com/odpf/meteor/utils" "github.com/odpf/salt/log" "github.com/pkg/errors" @@ -38,8 +37,16 @@ type Config struct { var sampleConfig = `connection_url: "admin:pass123@tcp(localhost:3306)/"` +var info = plugins.Info{ + Description: "Table metadata from Mariadb server.", + SampleConfig: sampleConfig, + Summary: summary, + Tags: []string{"oss", "extractor"}, +} + // Extractor manages the extraction of data from Mariadb type Extractor struct { + plugins.BaseExtractor excludedDbs map[string]bool logger log.Logger config Config @@ -49,31 +56,18 @@ type Extractor struct { // New returns a pointer to an initialized Extractor Object func New(logger log.Logger) *Extractor { - return &Extractor{ + e := &Extractor{ logger: logger, } -} + e.BaseExtractor = plugins.NewBaseExtractor(info, &e.config) -// Info returns the brief information about the extractor -func (e *Extractor) Info() plugins.Info { - return plugins.Info{ - Description: "Table metadata from Mariadb server.", - SampleConfig: sampleConfig, - Summary: summary, - Tags: []string{"oss", "extractor"}, - } -} - -// Validate validates the configuration of the extractor -func (e *Extractor) Validate(configMap map[string]interface{}) (err error) { - return utils.BuildConfig(configMap, &Config{}) + return e } // Init initializes the extractor -func (e *Extractor) Init(_ context.Context, configMap map[string]interface{}) (err error) { - // Build and validate config received from recipe - if err = utils.BuildConfig(configMap, &e.config); err != nil { - return plugins.InvalidConfigError{} +func (e *Extractor) Init(ctx context.Context, config plugins.Config) (err error) { + if err = e.BaseExtractor.Init(ctx, config); err != nil { + return err } // build excluded database list @@ -141,9 +135,10 @@ func (e *Extractor) processTable(database string, tableName string) (err error) // push table to channel e.emit(models.NewRecord(&assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: fmt.Sprintf("%s.%s", database, tableName), - Name: tableName, - Type: "table", + Urn: models.NewURN("mariadb", e.UrnScope, "table", fmt.Sprintf("%s.%s", database, tableName)), + Name: tableName, + Service: "mariadb", + Type: "table", }, Schema: &facetsv1beta1.Columns{ Columns: columns, diff --git a/plugins/extractors/mariadb/mariadb_test.go b/plugins/extractors/mariadb/mariadb_test.go index 836c4e922..697fceff1 100644 --- a/plugins/extractors/mariadb/mariadb_test.go +++ b/plugins/extractors/mariadb/mariadb_test.go @@ -7,6 +7,10 @@ import ( "context" "database/sql" "fmt" + "log" + "os" + "testing" + _ "github.com/go-sql-driver/mysql" assetsv1beta1 "github.com/odpf/meteor/models/odpf/assets/v1beta1" "github.com/odpf/meteor/plugins" @@ -16,16 +20,14 @@ import ( "github.com/ory/dockertest/v3" "github.com/ory/dockertest/v3/docker" "github.com/stretchr/testify/assert" - "log" - "os" - "testing" ) const ( - testDB = "test_db" - user = "test_user" - pass = "pass" - port = "3306" + testDB = "test_db" + user = "test_user" + pass = "pass" + port = "3306" + urnScope = "test-mariadb" ) var ( @@ -79,10 +81,12 @@ func TestMain(m *testing.M) { // TestInit tests the configs func TestInit(t *testing.T) { t.Run("should return error for invalid config", func(t *testing.T) { - err := mariadb.New(utils.Logger).Init(context.TODO(), map[string]interface{}{ - "invalid_config": "invalid_config_value", - }) - assert.Equal(t, plugins.InvalidConfigError{}, err) + err := mariadb.New(utils.Logger).Init(context.TODO(), plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "invalid_config": "invalid_config_value", + }}) + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) }) } @@ -92,9 +96,11 @@ func TestExtract(t *testing.T) { ctx := context.TODO() newExtractor := mariadb.New(utils.Logger) - err := newExtractor.Init(ctx, map[string]interface{}{ - "connection_url": fmt.Sprintf("%s:%s@tcp(%s)/", user, pass, host), - }) + err := newExtractor.Init(ctx, plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "connection_url": fmt.Sprintf("%s:%s@tcp(%s)/", user, pass, host), + }}) if err != nil { t.Fatal(err) @@ -108,9 +114,12 @@ func TestExtract(t *testing.T) { for _, record := range emitter.Get() { table := record.Data().(*assetsv1beta1.Table) urns = append(urns, table.Resource.Urn) - } - assert.Equal(t, []string{"test_db.applicant", "test_db.jobs"}, urns) + + assert.Equal(t, []string{ + "urn:mariadb:test-mariadb:table:test_db.applicant", + "urn:mariadb:test-mariadb:table:test_db.jobs", + }, urns) }) } diff --git a/plugins/extractors/metabase/metabase.go b/plugins/extractors/metabase/metabase.go index fed9456b5..d839071b8 100644 --- a/plugins/extractors/metabase/metabase.go +++ b/plugins/extractors/metabase/metabase.go @@ -29,6 +29,13 @@ instance_label: my-metabase user_id: meteor_tester password: meteor_pass_1234` +var info = plugins.Info{ + Description: "Dashboard list from Metabase server.", + SampleConfig: sampleConfig, + Summary: summary, + Tags: []string{"oss", "extractor"}, +} + // Config holds the set of configuration for the metabase extractor type Config struct { Host string `mapstructure:"host" validate:"required"` @@ -41,6 +48,7 @@ type Config struct { // Extractor manages the extraction of data // from the metabase server type Extractor struct { + plugins.BaseExtractor config Config logger log.Logger client Client @@ -48,32 +56,18 @@ type Extractor struct { // New returns a pointer to an initialized Extractor Object func New(client Client, logger log.Logger) *Extractor { - return &Extractor{ + e := &Extractor{ client: client, logger: logger, } -} + e.BaseExtractor = plugins.NewBaseExtractor(info, &e.config) -// Info returns the brief information of the extractor -func (e *Extractor) Info() plugins.Info { - return plugins.Info{ - Description: "Dashboard list from Metabase server.", - SampleConfig: sampleConfig, - Summary: summary, - Tags: []string{"oss", "extractor"}, - } + return e } -// Validate validates the configuration of the extractor -func (e *Extractor) Validate(configMap map[string]interface{}) (err error) { - return utils.BuildConfig(configMap, &Config{}) -} - -func (e *Extractor) Init(ctx context.Context, configMap map[string]interface{}) (err error) { - // build and validate config - err = utils.BuildConfig(configMap, &e.config) - if err != nil { - return plugins.InvalidConfigError{} +func (e *Extractor) Init(ctx context.Context, config plugins.Config) (err error) { + if err = e.BaseExtractor.Init(ctx, config); err != nil { + return err } err = e.client.Authenticate(e.config.Host, e.config.Username, e.config.Password, e.config.SessionID) @@ -110,7 +104,7 @@ func (e *Extractor) buildDashboard(d Dashboard) (data *assetsv1beta1.Dashboard, return } - dashboardUrn := models.DashboardURN("metabase", e.config.InstanceLabel, fmt.Sprintf("dashboard/%d", dashboard.ID)) + dashboardUrn := models.NewURN("metabase", e.UrnScope, "collection", fmt.Sprintf("%d", dashboard.ID)) charts := e.buildCharts(dashboardUrn, dashboard) dashboardUpstreams := e.buildDashboardUpstreams(charts) @@ -162,7 +156,7 @@ func (e *Extractor) buildChart(card Card, dashboardUrn string) (chart *assetsv1b } return &assetsv1beta1.Chart{ - Urn: fmt.Sprintf("metabase::%s/card/%d", e.config.InstanceLabel, card.ID), + Urn: models.NewURN("metabase", e.UrnScope, "card", fmt.Sprintf("%d", card.ID)), DashboardUrn: dashboardUrn, Source: "metabase", Name: card.Name, @@ -316,15 +310,19 @@ func (e *Extractor) buildURN(service, cluster, dbName, tableName string) string cluster = tableComps[0] } case "bigquery": + project := cluster + dataset := dbName if compLength > 2 { - cluster = tableComps[0] - dbName = tableComps[1] + project = tableComps[0] + dataset = tableComps[1] } else if compLength > 1 { - dbName = tableComps[0] + dataset = tableComps[0] } + + return plugins.BigQueryURN(project, dataset, tableName) } - return models.TableURN(service, cluster, dbName, tableName) + return models.NewURN(service, cluster, "table", fmt.Sprintf("%s.%s", dbName, tableName)) } // Register the extractor to catalog diff --git a/plugins/extractors/metabase/metabase_test.go b/plugins/extractors/metabase/metabase_test.go index 6447c3590..8f6c720bd 100644 --- a/plugins/extractors/metabase/metabase_test.go +++ b/plugins/extractors/metabase/metabase_test.go @@ -21,7 +21,8 @@ import ( ) const ( - host = "https://my-metabase.com" + host = "https://my-metabase.com" + urnScope = "test-metabase" ) func TestInit(t *testing.T) { @@ -31,9 +32,12 @@ func TestInit(t *testing.T) { "host": "sample-host", "instance_label": "my-metabase", } - err := metabase.New(client, testutils.Logger).Init(context.TODO(), config) + err := metabase.New(client, testutils.Logger).Init(context.TODO(), plugins.Config{ + URNScope: urnScope, + RawConfig: config, + }) - assert.Equal(t, plugins.InvalidConfigError{}, err) + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) }) t.Run("should authenticate with client if config is valid", func(t *testing.T) { config := map[string]interface{}{ @@ -46,7 +50,10 @@ func TestInit(t *testing.T) { client := new(mockClient) client.On("Authenticate", "sample-host", "user", "sample-password", "").Return(nil) - err := metabase.New(client, testutils.Logger).Init(context.TODO(), config) + err := metabase.New(client, testutils.Logger).Init(context.TODO(), plugins.Config{ + URNScope: urnScope, + RawConfig: config, + }) assert.NoError(t, err) }) t.Run("should allow session_id to replace username and password", func(t *testing.T) { @@ -59,7 +66,10 @@ func TestInit(t *testing.T) { client := new(mockClient) client.On("Authenticate", "sample-host", "", "", "sample-session").Return(nil) - err := metabase.New(client, testutils.Logger).Init(context.TODO(), config) + err := metabase.New(client, testutils.Logger).Init(context.TODO(), plugins.Config{ + URNScope: urnScope, + RawConfig: config, + }) assert.NoError(t, err) }) } @@ -81,12 +91,14 @@ func TestExtract(t *testing.T) { emitter := mocks.NewEmitter() extr := metabase.New(client, plugins.GetLog()) - err := extr.Init(context.TODO(), map[string]interface{}{ - "host": host, - "username": "test-user", - "password": "test-pass", - "instance_label": "my-metabase", - }) + err := extr.Init(context.TODO(), plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "host": host, + "username": "test-user", + "password": "test-pass", + "instance_label": "my-metabase", + }}) if err != nil { t.Fatal(err) } diff --git a/plugins/extractors/metabase/testdata/expected.json b/plugins/extractors/metabase/testdata/expected.json index 3b9c03eab..c16f1495c 100644 --- a/plugins/extractors/metabase/testdata/expected.json +++ b/plugins/extractors/metabase/testdata/expected.json @@ -1,6 +1,6 @@ [{ "resource": { - "urn": "metabase::my-metabase/dashboard/1", + "urn": "urn:metabase:test-metabase:collection:1", "name": "Main", "service": "metabase", "type": "dashboard", @@ -16,31 +16,31 @@ "lineage": { "upstreams": [ { - "urn": "h2::zip:/app/metabase.jar!/sample-dataset.db/ORDERS", + "urn": "urn:h2:zip:/app/metabase.jar!:table:sample-dataset.db.ORDERS", "type": "table", "service": "h2" }, { - "urn": "postgres::postgres:5432/postgres/user", + "urn": "urn:postgres:postgres:5432:table:postgres.user", "type": "table", "service": "postgres" }, { "service": "bigquery", "type": "table", - "urn": "bigquery::sample-project/dataset_a/invoice" + "urn": "urn:bigquery:sample-project:table:sample-project:dataset_a.invoice" }, { "service": "bigquery", "type": "table", - "urn": "bigquery::project_a/dataset_b/user" + "urn": "urn:bigquery:project_a:table:project_a:dataset_b.user" } ] }, "charts": [ { - "urn": "metabase::my-metabase/card/1", - "dashboard_urn": "metabase::my-metabase/dashboard/1", + "urn": "urn:metabase:test-metabase:card:1", + "dashboard_urn": "urn:metabase:test-metabase:collection:1", "source": "metabase", "name": "Orders, Filtered by Quantity", "description": "HELPFUL CHART DESC", @@ -59,7 +59,7 @@ "lineage": { "upstreams": [ { - "urn": "h2::zip:/app/metabase.jar!/sample-dataset.db/ORDERS", + "urn": "urn:h2:zip:/app/metabase.jar!:table:sample-dataset.db.ORDERS", "type": "table", "service": "h2" } @@ -67,8 +67,8 @@ } }, { - "urn": "metabase::my-metabase/card/2", - "dashboard_urn": "metabase::my-metabase/dashboard/1", + "urn": "urn:metabase:test-metabase:card:2", + "dashboard_urn": "urn:metabase:test-metabase:collection:1", "source": "metabase", "name": "Exceptional Users", "description": "This shows only exceptional users.", @@ -87,7 +87,7 @@ "lineage": { "upstreams": [ { - "urn": "postgres::postgres:5432/postgres/user", + "urn": "urn:postgres:postgres:5432:table:postgres.user", "type": "table", "service": "postgres" } @@ -95,8 +95,8 @@ } }, { - "urn": "metabase::my-metabase/card/3", - "dashboard_urn": "metabase::my-metabase/dashboard/1", + "urn": "urn:metabase:test-metabase:card:3", + "dashboard_urn": "urn:metabase:test-metabase:collection:1", "source": "metabase", "name": "Users, Average of Total Followers and Cumulative sum of Total Likes, Filtered by Total Followers", "description": "Users, Average of Total Followers", @@ -115,7 +115,7 @@ "lineage": { "upstreams": [ { - "urn": "postgres::postgres:5432/postgres/user", + "urn": "urn:postgres:postgres:5432:table:postgres.user", "type": "table", "service": "postgres" } @@ -123,18 +123,18 @@ } }, { - "dashboard_urn": "metabase::my-metabase/dashboard/1", + "dashboard_urn": "urn:metabase:test-metabase:collection:1", "lineage": { "upstreams": [ { "service": "bigquery", "type": "table", - "urn": "bigquery::sample-project/dataset_a/invoice" + "urn": "urn:bigquery:sample-project:table:sample-project:dataset_a.invoice" }, { "service": "bigquery", "type": "table", - "urn": "bigquery::project_a/dataset_b/user" + "urn": "urn:bigquery:project_a:table:project_a:dataset_b.user" } ] }, @@ -152,7 +152,7 @@ } }, "source": "metabase", - "urn": "metabase::my-metabase/card/4" + "urn": "urn:metabase:test-metabase:card:4" } ], "timestamps": { diff --git a/plugins/extractors/mongodb/mongodb.go b/plugins/extractors/mongodb/mongodb.go index fac47099c..354afabc2 100644 --- a/plugins/extractors/mongodb/mongodb.go +++ b/plugins/extractors/mongodb/mongodb.go @@ -13,7 +13,6 @@ import ( assetsv1beta1 "github.com/odpf/meteor/models/odpf/assets/v1beta1" "github.com/odpf/meteor/plugins" "github.com/odpf/meteor/registry" - "github.com/odpf/meteor/utils" "github.com/odpf/salt/log" "go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/mongo" @@ -38,8 +37,16 @@ type Config struct { var sampleConfig = ` connection_url: "mongodb://admin:pass123@localhost:3306"` +var info = plugins.Info{ + Description: "Collection metadata from MongoDB Server", + SampleConfig: sampleConfig, + Summary: summary, + Tags: []string{"oss", "extractor"}, +} + // Extractor manages the communication with the mongo server type Extractor struct { + plugins.BaseExtractor // internal states client *mongo.Client excluded map[string]bool @@ -49,30 +56,17 @@ type Extractor struct { // New returns a pointer to an initialized Extractor Object func New(logger log.Logger) *Extractor { - return &Extractor{ + e := &Extractor{ logger: logger, } -} + e.BaseExtractor = plugins.NewBaseExtractor(info, &e.config) -// Info returns the brief information about the extractor -func (e *Extractor) Info() plugins.Info { - return plugins.Info{ - Description: "Collection metadata from MongoDB Server", - SampleConfig: sampleConfig, - Summary: summary, - Tags: []string{"oss", "extractor"}, - } + return e } -// Validate validates the configuration of the extractor -func (e *Extractor) Validate(configMap map[string]interface{}) (err error) { - return utils.BuildConfig(configMap, &Config{}) -} - -func (e *Extractor) Init(ctx context.Context, configMap map[string]interface{}) (err error) { - err = utils.BuildConfig(configMap, &e.config) - if err != nil { - return plugins.InvalidConfigError{} +func (e *Extractor) Init(ctx context.Context, config plugins.Config) (err error) { + if err = e.BaseExtractor.Init(ctx, config); err != nil { + return err } // build excluded list @@ -142,9 +136,10 @@ func (e *Extractor) buildTable(ctx context.Context, db *mongo.Database, collecti table = &assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: fmt.Sprintf("%s.%s", db.Name(), collectionName), - Name: collectionName, - Type: "table", + Urn: models.NewURN("mongodb", e.UrnScope, "collection", fmt.Sprintf("%s.%s", db.Name(), collectionName)), + Name: collectionName, + Service: "mongodb", + Type: "table", }, Profile: &assetsv1beta1.TableProfile{ TotalRows: totalRows, diff --git a/plugins/extractors/mongodb/mongodb_test.go b/plugins/extractors/mongodb/mongodb_test.go index f06883470..d8694897d 100644 --- a/plugins/extractors/mongodb/mongodb_test.go +++ b/plugins/extractors/mongodb/mongodb_test.go @@ -27,10 +27,11 @@ import ( ) const ( - testDB = "MeteorMongoExtractorTest" - user = "user" - pass = "abcd" - port = "27017" + testDB = "MeteorMongoExtractorTest" + user = "user" + pass = "abcd" + port = "27017" + urnScope = "test-mongodb" ) var ( @@ -94,11 +95,13 @@ func TestMain(m *testing.M) { func TestInit(t *testing.T) { t.Run("should return error for invalid", func(t *testing.T) { - err := mongodb.New(utils.Logger).Init(context.TODO(), map[string]interface{}{ - "invalid_config": "invalid_config_value", - }) + err := mongodb.New(utils.Logger).Init(context.TODO(), plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "invalid_config": "invalid_config_value", + }}) - assert.Equal(t, plugins.InvalidConfigError{}, err) + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) }) } @@ -107,9 +110,11 @@ func TestExtract(t *testing.T) { ctx := context.TODO() extr := mongodb.New(utils.Logger) - err := extr.Init(ctx, map[string]interface{}{ - "connection_url": fmt.Sprintf("mongodb://%s:%s@%s", user, pass, host), - }) + err := extr.Init(ctx, plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "connection_url": fmt.Sprintf("mongodb://%s:%s@%s", user, pass, host), + }}) if err != nil { t.Fatal(err) } @@ -163,9 +168,10 @@ func getExpected() []models.Record { return []models.Record{ models.NewRecord(&assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: testDB + ".connections", - Name: "connections", - Type: "table", + Urn: "urn:mongodb:test-mongodb:collection:" + testDB + ".connections", + Name: "connections", + Service: "mongodb", + Type: "table", }, Profile: &assetsv1beta1.TableProfile{ TotalRows: 3, @@ -173,9 +179,10 @@ func getExpected() []models.Record { }), models.NewRecord(&assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: testDB + ".posts", - Name: "posts", - Type: "table", + Urn: "urn:mongodb:test-mongodb:collection:" + testDB + ".posts", + Name: "posts", + Service: "mongodb", + Type: "table", }, Profile: &assetsv1beta1.TableProfile{ TotalRows: 2, @@ -183,9 +190,10 @@ func getExpected() []models.Record { }), models.NewRecord(&assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: testDB + ".stats", - Name: "stats", - Type: "table", + Urn: "urn:mongodb:test-mongodb:collection:" + testDB + ".stats", + Name: "stats", + Service: "mongodb", + Type: "table", }, Profile: &assetsv1beta1.TableProfile{ TotalRows: 1, diff --git a/plugins/extractors/mssql/README.md b/plugins/extractors/mssql/README.md index 27dc1d7de..42aff3948 100644 --- a/plugins/extractors/mssql/README.md +++ b/plugins/extractors/mssql/README.md @@ -7,7 +7,6 @@ source: name: mssql config: connection_url: sqlserver://admin:pass123@localhost:3306/ - identifier: my-mssql ``` ## Inputs @@ -15,7 +14,6 @@ source: | Key | Value | Example | Description | | | :-- | :---- | :------ | :---------- | :- | | `identifier` | `string` | `my-mssql` | Instance alias, the value will be used as part of the urn component | *required* | -| `connection_url` | `string` | `sqlserver://admin:pass123@localhost:3306/` | URL to access the mssql server | *required* | ## Outputs diff --git a/plugins/extractors/mssql/mssql.go b/plugins/extractors/mssql/mssql.go index 14564614f..3432d7558 100644 --- a/plugins/extractors/mssql/mssql.go +++ b/plugins/extractors/mssql/mssql.go @@ -14,7 +14,6 @@ import ( "github.com/odpf/meteor/models" "github.com/odpf/meteor/plugins" "github.com/odpf/meteor/registry" - "github.com/odpf/meteor/utils" "github.com/odpf/meteor/plugins/sqlutil" @@ -36,15 +35,21 @@ var defaultDBList = []string{ // Config holds the connection URL for the extractor type Config struct { ConnectionURL string `mapstructure:"connection_url" validate:"required"` - Identifier string `mapstructure:"identifier" validate:"required"` } var sampleConfig = ` -connection_url: "sqlserver://admin:pass123@localhost:3306/" -identifier: my-mssql` +connection_url: "sqlserver://admin:pass123@localhost:3306/"` + +var info = plugins.Info{ + Description: "Table metdata from MSSQL server", + SampleConfig: sampleConfig, + Summary: summary, + Tags: []string{"microsoft", "extractor"}, +} // Extractor manages the extraction of data from the database type Extractor struct { + plugins.BaseExtractor excludedDbs map[string]bool logger log.Logger db *sql.DB @@ -54,31 +59,18 @@ type Extractor struct { // New returns a pointer to an initialized Extractor Object func New(logger log.Logger) *Extractor { - return &Extractor{ + e := &Extractor{ logger: logger, } -} - -// Info returns the brief information about the extractor -func (e *Extractor) Info() plugins.Info { - return plugins.Info{ - Description: "Table metdata from MSSQL server", - SampleConfig: sampleConfig, - Summary: summary, - Tags: []string{"microsoft", "extractor"}, - } -} + e.BaseExtractor = plugins.NewBaseExtractor(info, &e.config) -// Validate validates the configuration of the extractor -func (e *Extractor) Validate(configMap map[string]interface{}) (err error) { - return utils.BuildConfig(configMap, &Config{}) + return e } // Init initializes the extractor -func (e *Extractor) Init(ctx context.Context, configMap map[string]interface{}) (err error) { - err = utils.BuildConfig(configMap, &e.config) - if err != nil { - return plugins.InvalidConfigError{} +func (e *Extractor) Init(ctx context.Context, config plugins.Config) (err error) { + if err = e.BaseExtractor.Init(ctx, config); err != nil { + return err } // build excluded database list @@ -134,9 +126,10 @@ func (e *Extractor) processTable(database string, tableName string) (err error) // push table to channel e.emit(models.NewRecord(&assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: models.TableURN("mssql", e.config.Identifier, database, tableName), - Name: tableName, - Type: "table", + Urn: models.NewURN("mssql", e.UrnScope, "table", fmt.Sprintf("%s.%s", database, tableName)), + Name: tableName, + Service: "mssql", + Type: "table", }, Schema: &facetsv1beta1.Columns{ Columns: columns, diff --git a/plugins/extractors/mssql/mssql_test.go b/plugins/extractors/mssql/mssql_test.go index f6067119a..23841c66a 100644 --- a/plugins/extractors/mssql/mssql_test.go +++ b/plugins/extractors/mssql/mssql_test.go @@ -27,10 +27,11 @@ import ( ) const ( - testDB = "mockdata_meteor_metadata_test" - user = "sa" - pass = "P@ssword1234" - port = "1433" + testDB = "mockdata_meteor_metadata_test" + user = "sa" + pass = "P@ssword1234" + port = "1433" + urnScope = "test-mssql" ) var host = "localhost:" + port @@ -81,11 +82,13 @@ func TestMain(m *testing.M) { func TestInit(t *testing.T) { t.Run("should error for invalid configurations", func(t *testing.T) { - err := mssql.New(utils.Logger).Init(context.TODO(), map[string]interface{}{ - "invalid_config": "invalid_config_value", - }) + err := mssql.New(utils.Logger).Init(context.TODO(), plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "invalid_config": "invalid_config_value", + }}) - assert.Equal(t, plugins.InvalidConfigError{}, err) + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) }) } @@ -94,10 +97,11 @@ func TestExtract(t *testing.T) { ctx := context.TODO() extr := mssql.New(utils.Logger) - err := extr.Init(ctx, map[string]interface{}{ - "connection_url": fmt.Sprintf("sqlserver://%s:%s@%s/", user, pass, host), - "identifier": "my-mssql", - }) + err := extr.Init(ctx, plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "connection_url": fmt.Sprintf("sqlserver://%s:%s@%s/", user, pass, host), + }}) if err != nil { t.Fatal(err) } @@ -147,9 +151,10 @@ func getExpected() []models.Record { return []models.Record{ models.NewRecord(&assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: "mssql::my-mssql/mockdata_meteor_metadata_test/applicant", - Name: "applicant", - Type: "table", + Urn: "urn:mssql:test-mssql:table:mockdata_meteor_metadata_test.applicant", + Name: "applicant", + Service: "mssql", + Type: "table", }, Schema: &facetsv1beta1.Columns{ Columns: []*facetsv1beta1.Column{ @@ -176,9 +181,10 @@ func getExpected() []models.Record { }), models.NewRecord(&assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: "mssql::my-mssql/mockdata_meteor_metadata_test/jobs", - Name: "jobs", - Type: "table", + Urn: "urn:mssql:test-mssql:table:mockdata_meteor_metadata_test.jobs", + Name: "jobs", + Service: "mssql", + Type: "table", }, Schema: &facetsv1beta1.Columns{ Columns: []*facetsv1beta1.Column{ diff --git a/plugins/extractors/mysql/mysql.go b/plugins/extractors/mysql/mysql.go index af3a229c6..06ab9fe6c 100644 --- a/plugins/extractors/mysql/mysql.go +++ b/plugins/extractors/mysql/mysql.go @@ -18,7 +18,6 @@ import ( "github.com/odpf/meteor/plugins" "github.com/odpf/meteor/plugins/sqlutil" "github.com/odpf/meteor/registry" - "github.com/odpf/meteor/utils" "github.com/odpf/salt/log" ) @@ -35,15 +34,21 @@ var defaultDBList = []string{ // Config holds the connection URL for the extractor type Config struct { ConnectionURL string `mapstructure:"connection_url" validate:"required"` - Identifier string `mapstructure:"identifier" validate:"required"` } var sampleConfig = ` -connection_url: "admin:pass123@tcp(localhost:3306)/" -identifier: "my-mysql"` +connection_url: "admin:pass123@tcp(localhost:3306)/"` + +var info = plugins.Info{ + Description: "Table metadata from MySQL server.", + SampleConfig: sampleConfig, + Summary: summary, + Tags: []string{"oss", "extractor"}, +} // Extractor manages the extraction of data from MySQL type Extractor struct { + plugins.BaseExtractor excludedDbs map[string]bool logger log.Logger config Config @@ -53,30 +58,18 @@ type Extractor struct { // New returns a pointer to an initialized Extractor Object func New(logger log.Logger) *Extractor { - return &Extractor{ + e := &Extractor{ logger: logger, } -} + e.BaseExtractor = plugins.NewBaseExtractor(info, &e.config) -// Info returns the brief information about the extractor -func (e *Extractor) Info() plugins.Info { - return plugins.Info{ - Description: "Table metadata from MySQL server.", - SampleConfig: sampleConfig, - Summary: summary, - Tags: []string{"oss", "extractor"}, - } -} - -// Validate validates the configuration of the extractor -func (e *Extractor) Validate(configMap map[string]interface{}) (err error) { - return utils.BuildConfig(configMap, &Config{}) + return e } // Init initializes the extractor -func (e *Extractor) Init(ctx context.Context, configMap map[string]interface{}) (err error) { - if err = utils.BuildConfig(configMap, &e.config); err != nil { - return plugins.InvalidConfigError{} +func (e *Extractor) Init(ctx context.Context, config plugins.Config) (err error) { + if err = e.BaseExtractor.Init(ctx, config); err != nil { + return err } // build excluded database list @@ -146,9 +139,10 @@ func (e *Extractor) processTable(database string, tableName string) (err error) // push table to channel e.emit(models.NewRecord(&assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: models.TableURN("mysql", e.config.Identifier, database, tableName), - Name: tableName, - Type: "table", + Urn: models.NewURN("mysql", e.UrnScope, "table", fmt.Sprintf("%s.%s", database, tableName)), + Name: tableName, + Service: "mysql", + Type: "table", }, Schema: &facetsv1beta1.Columns{ Columns: columns, diff --git a/plugins/extractors/mysql/mysql_test.go b/plugins/extractors/mysql/mysql_test.go index c7c5f5e5f..1c0e568a9 100644 --- a/plugins/extractors/mysql/mysql_test.go +++ b/plugins/extractors/mysql/mysql_test.go @@ -30,9 +30,10 @@ import ( var db *sql.DB const ( - user = "meteor_test_user" - pass = "pass" - port = "3310" + user = "meteor_test_user" + pass = "pass" + port = "3310" + urnScope = "test-mysql" ) var host = "localhost:" + port @@ -81,11 +82,13 @@ func TestMain(m *testing.M) { func TestInit(t *testing.T) { t.Run("should return error for invalid configs", func(t *testing.T) { - err := mysql.New(utils.Logger).Init(context.TODO(), map[string]interface{}{ - "invalid_config": "invalid_config_value", - }) + err := mysql.New(utils.Logger).Init(context.TODO(), plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "invalid_config": "invalid_config_value", + }}) - assert.Equal(t, plugins.InvalidConfigError{}, err) + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) }) } @@ -94,10 +97,11 @@ func TestExtract(t *testing.T) { ctx := context.TODO() extr := mysql.New(utils.Logger) - err := extr.Init(ctx, map[string]interface{}{ - "connection_url": fmt.Sprintf("%s:%s@tcp(%s)/", user, pass, host), - "identifier": "my-mysql", - }) + err := extr.Init(ctx, plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "connection_url": fmt.Sprintf("%s:%s@tcp(%s)/", user, pass, host), + }}) if err != nil { t.Fatal(err) } @@ -153,9 +157,10 @@ func getExpected() []models.Record { return []models.Record{ models.NewRecord(&assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: "mysql::my-mysql/mockdata_meteor_metadata_test/applicant", - Name: "applicant", - Type: "table", + Urn: "urn:mysql:test-mysql:table:mockdata_meteor_metadata_test.applicant", + Name: "applicant", + Service: "mysql", + Type: "table", }, Schema: &facetsv1beta1.Columns{ Columns: []*facetsv1beta1.Column{ @@ -185,9 +190,10 @@ func getExpected() []models.Record { }), models.NewRecord(&assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: "mysql::my-mysql/mockdata_meteor_metadata_test/jobs", - Name: "jobs", - Type: "table", + Urn: "urn:mysql:test-mysql:table:mockdata_meteor_metadata_test.jobs", + Name: "jobs", + Service: "mysql", + Type: "table", }, Schema: &facetsv1beta1.Columns{ Columns: []*facetsv1beta1.Column{ diff --git a/plugins/extractors/optimus/optimus.go b/plugins/extractors/optimus/optimus.go index efee10912..e7ffd5475 100644 --- a/plugins/extractors/optimus/optimus.go +++ b/plugins/extractors/optimus/optimus.go @@ -30,39 +30,35 @@ type Config struct { var sampleConfig = ` host: optimus.com:80` +var info = plugins.Info{ + Description: "Optimus' jobs metadata", + SampleConfig: sampleConfig, + Summary: summary, + Tags: []string{"optimus", "bigquery", "job", "extractor"}, +} + // Extractor manages the communication with the bigquery service type Extractor struct { + plugins.BaseExtractor logger log.Logger config Config client Client } func New(logger log.Logger, client Client) *Extractor { - return &Extractor{ + e := &Extractor{ logger: logger, client: client, } -} - -// Info returns the detailed information about the extractor -func (e *Extractor) Info() plugins.Info { - return plugins.Info{ - Description: "Optimus' jobs metadata", - SampleConfig: sampleConfig, - Summary: summary, - Tags: []string{"optimus", "bigquery", "job", "extractor"}, - } -} + e.BaseExtractor = plugins.NewBaseExtractor(info, &e.config) -// Validate validates the configuration of the extractor -func (e *Extractor) Validate(configMap map[string]interface{}) (err error) { - return utils.BuildConfig(configMap, &Config{}) + return e } // Init initializes the extractor -func (e *Extractor) Init(ctx context.Context, configMap map[string]interface{}) (err error) { - if err := utils.BuildConfig(configMap, &e.config); err != nil { - return plugins.InvalidConfigError{} +func (e *Extractor) Init(ctx context.Context, config plugins.Config) (err error) { + if err = e.BaseExtractor.Init(ctx, config); err != nil { + return err } if err := e.client.Connect(ctx, e.config.Host, e.config.MaxSizeInMB); err != nil { @@ -139,7 +135,7 @@ func (e *Extractor) buildJob(ctx context.Context, jobSpec *pb.JobSpecification, } jobID := fmt.Sprintf("%s.%s.%s", project, namespace, jobSpec.Name) - urn := models.JobURN(service, e.config.Host, jobID) + urn := models.NewURN(service, e.UrnScope, "job", jobID) job = &assetsv1beta1.Job{ Resource: &commonv1beta1.Resource{ Urn: urn, @@ -259,7 +255,7 @@ func (e *Extractor) mapURN(optimusURN string) (tableURN string, err error) { datasetID := datasetTableID[0] // "datasetB" tableID := datasetTableID[1] // "tableC" - return models.TableURN("bigquery", projectID, datasetID, tableID), nil + return plugins.BigQueryURN(projectID, datasetID, tableID), nil } // Register the extractor to catalog diff --git a/plugins/extractors/optimus/optimus_test.go b/plugins/extractors/optimus/optimus_test.go index 0f915a21d..bcdf5f184 100644 --- a/plugins/extractors/optimus/optimus_test.go +++ b/plugins/extractors/optimus/optimus_test.go @@ -23,14 +23,18 @@ var ( validConfig = map[string]interface{}{ "host": "optimus:80", } + urnScope = "test-optimus" ) func TestInit(t *testing.T) { t.Run("should return error if config is invalid", func(t *testing.T) { extr := optimus.New(testutils.Logger, new(mockClient)) - err := extr.Init(context.TODO(), map[string]interface{}{}) + err := extr.Init(context.TODO(), plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{}, + }) - assert.Equal(t, plugins.InvalidConfigError{}, err) + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) }) t.Run("should hit optimus /ping to check connection if config is valid", func(t *testing.T) { @@ -42,7 +46,10 @@ func TestInit(t *testing.T) { defer client.AssertExpectations(t) extr := optimus.New(testutils.Logger, client) - err = extr.Init(ctx, validConfig) + err = extr.Init(ctx, plugins.Config{ + URNScope: urnScope, + RawConfig: validConfig, + }) assert.NoError(t, err) }) } @@ -58,7 +65,7 @@ func TestExtract(t *testing.T) { defer client.AssertExpectations(t) extr := optimus.New(testutils.Logger, client) - err = extr.Init(ctx, validConfig) + err = extr.Init(ctx, plugins.Config{URNScope: urnScope, RawConfig: validConfig}) require.NoError(t, err) emitter := mocks.NewEmitter() diff --git a/plugins/extractors/optimus/testdata/expected.json b/plugins/extractors/optimus/testdata/expected.json index 98c5e9287..40b4aec81 100644 --- a/plugins/extractors/optimus/testdata/expected.json +++ b/plugins/extractors/optimus/testdata/expected.json @@ -1,6 +1,6 @@ [{ "resource": { - "urn": "optimus::optimus:80/project-A.namespace-A.job-A", + "urn": "urn:optimus:test-optimus:job:project-A.namespace-A.job-A", "type": "job", "name": "job-A", "service": "optimus", @@ -37,12 +37,12 @@ }, "lineage": { "upstreams": [{ - "urn": "bigquery::src-project/src-dataset/src-table", + "urn": "urn:bigquery:src-project:table:src-project:src-dataset.src-table", "type": "table", "service": "bigquery" }], "downstreams": [{ - "urn": "bigquery::dst-project/dst-dataset/dst-table", + "urn": "urn:bigquery:dst-project:table:dst-project:dst-dataset.dst-table", "type": "table", "service": "bigquery" }] @@ -50,7 +50,7 @@ }, { "resource": { - "urn": "optimus::optimus:80/project-A.namespace-A.job-B", + "urn": "urn:optimus:test-optimus:job:project-A.namespace-A.job-B", "name": "job-B", "service": "optimus", "type": "job", @@ -87,18 +87,18 @@ }, "lineage": { "upstreams": [{ - "urn": "bigquery::src-b1-project/src-b1-dataset/src-b1-table", + "urn": "urn:bigquery:src-b1-project:table:src-b1-project:src-b1-dataset.src-b1-table", "type": "table", "service": "bigquery" }, { - "urn": "bigquery::src-b2-project/src-b2-dataset/src-b2-table", + "urn": "urn:bigquery:src-b2-project:table:src-b2-project:src-b2-dataset.src-b2-table", "type": "table", "service": "bigquery" } ], "downstreams": [{ - "urn": "bigquery::dst-b-project/dst-b-dataset/dst-b-table", + "urn": "urn:bigquery:dst-b-project:table:dst-b-project:dst-b-dataset.dst-b-table", "type": "table", "service": "bigquery" }] diff --git a/plugins/extractors/oracle/oracle.go b/plugins/extractors/oracle/oracle.go index bebf12d7c..01fc654c9 100644 --- a/plugins/extractors/oracle/oracle.go +++ b/plugins/extractors/oracle/oracle.go @@ -16,7 +16,6 @@ import ( "github.com/odpf/meteor/plugins" "github.com/odpf/meteor/registry" - "github.com/odpf/meteor/utils" "github.com/odpf/salt/log" _ "github.com/sijms/go-ora/v2" ) @@ -31,8 +30,16 @@ type Config struct { var sampleConfig = ` connection_url: oracle://username:passwd@localhost:1521/xe` +var info = plugins.Info{ + Description: "Table metadata oracle SQL Database.", + SampleConfig: sampleConfig, + Summary: summary, + Tags: []string{"oss", "extractor"}, +} + // Extractor manages the extraction of data from the extractor type Extractor struct { + plugins.BaseExtractor logger log.Logger config Config db *sql.DB @@ -40,31 +47,18 @@ type Extractor struct { // New returns a pointer to an initialized Extractor Object func New(logger log.Logger) *Extractor { - return &Extractor{ + e := &Extractor{ logger: logger, } -} + e.BaseExtractor = plugins.NewBaseExtractor(info, &e.config) -// Info returns the brief information about the extractor -func (e *Extractor) Info() plugins.Info { - return plugins.Info{ - Description: "Table metadata Oracle SQL Database.", - SampleConfig: sampleConfig, - Summary: summary, - Tags: []string{"oss", "extractor"}, - } -} - -// Validate validates the configuration of the extractor -func (e *Extractor) Validate(configMap map[string]interface{}) (err error) { - return utils.BuildConfig(configMap, &Config{}) + return e } // Init initializes the extractor -func (e *Extractor) Init(ctx context.Context, config map[string]interface{}) (err error) { - // Build and validate config received from recipe - if err := utils.BuildConfig(config, &e.config); err != nil { - return plugins.InvalidConfigError{} +func (e *Extractor) Init(ctx context.Context, config plugins.Config) (err error) { + if err = e.BaseExtractor.Init(ctx, config); err != nil { + return err } // Create database connection @@ -154,9 +148,9 @@ func (e *Extractor) getTableMetadata(db *sql.DB, dbName string, tableName string result = &assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: fmt.Sprintf("%s.%s", dbName, tableName), + Urn: models.NewURN("oracle", e.UrnScope, "table", fmt.Sprintf("%s.%s", dbName, tableName)), Name: tableName, - Service: "Oracle", + Service: "oracle", Type: "table", }, Schema: &facetsv1beta1.Columns{ @@ -220,9 +214,7 @@ func connection(cfg Config) (db *sql.DB, err error) { // Register the extractor to catalog func init() { if err := registry.Extractors.Register("oracle", func() plugins.Extractor { - return &Extractor{ - logger: plugins.GetLog(), - } + return New(plugins.GetLog()) }); err != nil { panic(err) } diff --git a/plugins/extractors/oracle/oracle_test.go b/plugins/extractors/oracle/oracle_test.go index c8c82a33e..f3a6d0786 100644 --- a/plugins/extractors/oracle/oracle_test.go +++ b/plugins/extractors/oracle/oracle_test.go @@ -35,6 +35,7 @@ const ( port = "1521" defaultDB = "xe" sysUser = "system" + urnScope = "test-oracle" ) var host = "localhost:" + port @@ -77,12 +78,14 @@ func TestMain(m *testing.M) { func TestInit(t *testing.T) { t.Run("should return error for invalid config", func(t *testing.T) { - err := oracle.New(utils.Logger).Init(context.TODO(), map[string]interface{}{ - "password": "pass", - "host": host, - }) - - assert.Equal(t, plugins.InvalidConfigError{}, err) + err := oracle.New(utils.Logger).Init(context.TODO(), plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "password": "pass", + "host": host, + }}) + + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) }) } @@ -91,9 +94,11 @@ func TestExtract(t *testing.T) { ctx := context.TODO() extr := oracle.New(utils.Logger) - err := extr.Init(ctx, map[string]interface{}{ - "connection_url": fmt.Sprintf("oracle://%s:%s@%s/%s", user, password, host, defaultDB), - }) + err := extr.Init(ctx, plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "connection_url": fmt.Sprintf("oracle://%s:%s@%s/%s", user, password, host, defaultDB), + }}) if err != nil { t.Fatal(err) } @@ -157,9 +162,9 @@ func getExpected() []models.Record { return []models.Record{ models.NewRecord(&assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: "XE.EMPLOYEE", + Urn: "urn:oracle:test-oracle:table:XE.EMPLOYEE", Name: "EMPLOYEE", - Service: "Oracle", + Service: "oracle", Type: "table", }, Profile: &assetsv1beta1.TableProfile{ @@ -188,9 +193,9 @@ func getExpected() []models.Record { }), models.NewRecord(&assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: "XE.DEPARTMENT", + Urn: "urn:oracle:test-oracle:table:XE.DEPARTMENT", Name: "DEPARTMENT", - Service: "Oracle", + Service: "oracle", Type: "table", }, Profile: &assetsv1beta1.TableProfile{ diff --git a/plugins/extractors/postgres/README.md b/plugins/extractors/postgres/README.md index 2f3128b5f..6bce16818 100644 --- a/plugins/extractors/postgres/README.md +++ b/plugins/extractors/postgres/README.md @@ -14,7 +14,6 @@ source: | Key | Value | Example | Description | | | :-- | :---- | :------ | :---------- | :- | -| `identifier` | `string` | `my-postgres` | Instance alias, the value will be used as part of the urn component | *required* | | `connection_url` | `string` | `postgres://admin:pass123@localhost:3306/testDB?sslmode=disable` | URL to access the postgres server | *required* | | `exclude` | `string` | `primaryDB,secondaryDB` | This is a comma separated db list | *optional* | diff --git a/plugins/extractors/postgres/postgres.go b/plugins/extractors/postgres/postgres.go index 9eb8b2324..17a50c83e 100644 --- a/plugins/extractors/postgres/postgres.go +++ b/plugins/extractors/postgres/postgres.go @@ -34,16 +34,22 @@ var defaultDBList = []string{"information_schema", "root", "postgres"} type Config struct { ConnectionURL string `mapstructure:"connection_url" validate:"required"` Exclude string `mapstructure:"exclude"` - Identifier string `mapstructure:"identifier" validate:"required"` } var sampleConfig = ` connection_url: "postgres://admin:pass123@localhost:3306/postgres?sslmode=disable" -exclude: testDB,secondaryDB -identifier: my-postgres` +exclude: testDB,secondaryDB` + +var info = plugins.Info{ + Description: "Table metadata and metrics from Postgres SQL sever.", + SampleConfig: sampleConfig, + Summary: summary, + Tags: []string{"oss", "extractor"}, +} // Extractor manages the extraction of data from the extractor type Extractor struct { + plugins.BaseExtractor excludedDbs map[string]bool logger log.Logger config Config @@ -58,31 +64,18 @@ type Extractor struct { // New returns a pointer to an initialized Extractor Object func New(logger log.Logger) *Extractor { - return &Extractor{ + e := &Extractor{ logger: logger, } -} + e.BaseExtractor = plugins.NewBaseExtractor(info, &e.config) -// Info returns the brief information about the extractor -func (e *Extractor) Info() plugins.Info { - return plugins.Info{ - Description: "Table metadata and metrics from Postgres SQL sever.", - SampleConfig: sampleConfig, - Summary: summary, - Tags: []string{"oss", "extractor"}, - } -} - -// Validate validates the configuration of the extractor -func (e *Extractor) Validate(configMap map[string]interface{}) (err error) { - return utils.BuildConfig(configMap, &Config{}) + return e } // Init initializes the extractor -func (e *Extractor) Init(ctx context.Context, config map[string]interface{}) (err error) { - // Build and validate config received from recipe - if err := utils.BuildConfig(config, &e.config); err != nil { - return plugins.InvalidConfigError{} +func (e *Extractor) Init(ctx context.Context, config plugins.Config) (err error) { + if err = e.BaseExtractor.Init(ctx, config); err != nil { + return err } // build excluded database list @@ -173,7 +166,7 @@ func (e *Extractor) getTableMetadata(db *sql.DB, dbName string, tableName string result = &assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: models.TableURN("postgres", e.config.Identifier, dbName, tableName), + Urn: models.NewURN("postgres", e.UrnScope, "table", fmt.Sprintf("%s.%s", dbName, tableName)), Name: tableName, Service: "postgres", Type: "table", @@ -285,9 +278,7 @@ func (e *Extractor) isExcludedDB(database string) bool { // Register the extractor to catalog func init() { if err := registry.Extractors.Register("postgres", func() plugins.Extractor { - return &Extractor{ - logger: plugins.GetLog(), - } + return New(plugins.GetLog()) }); err != nil { panic(err) } diff --git a/plugins/extractors/postgres/postgres_test.go b/plugins/extractors/postgres/postgres_test.go index 864f2ce6a..aed3a1bff 100644 --- a/plugins/extractors/postgres/postgres_test.go +++ b/plugins/extractors/postgres/postgres_test.go @@ -37,6 +37,7 @@ const ( port = "5438" root = "root" defaultDB = "postgres" + urnScope = "test-postgres" ) var host = "localhost:" + port @@ -79,11 +80,13 @@ func TestMain(m *testing.M) { func TestInit(t *testing.T) { t.Run("should return error for invalid config", func(t *testing.T) { - err := postgres.New(utils.Logger).Init(context.TODO(), map[string]interface{}{ - "invalid_config": "invalid_config_value", - }) + err := postgres.New(utils.Logger).Init(context.TODO(), plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "invalid_config": "invalid_config_value", + }}) - assert.Equal(t, plugins.InvalidConfigError{}, err) + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) }) } @@ -92,10 +95,11 @@ func TestExtract(t *testing.T) { ctx := context.TODO() extr := postgres.New(utils.Logger) - err := extr.Init(ctx, map[string]interface{}{ - "connection_url": fmt.Sprintf("postgres://%s:%s@%s/postgres?sslmode=disable", user, pass, host), - "identifier": "my-postgres", - }) + err := extr.Init(ctx, plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "connection_url": fmt.Sprintf("postgres://%s:%s@%s/postgres?sslmode=disable", user, pass, host), + }}) if err != nil { t.Fatal(err) } @@ -149,7 +153,7 @@ func getExpected() []models.Record { return []models.Record{ models.NewRecord(&assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: "postgres::my-postgres/test_db/article", + Urn: "urn:postgres:test-postgres:table:test_db.article", Name: "article", Service: "postgres", Type: "table", @@ -183,7 +187,7 @@ func getExpected() []models.Record { }), models.NewRecord(&assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: "postgres::my-postgres/test_db/post", + Urn: "urn:postgres:test-postgres:table:test_db.post", Name: "post", Service: "postgres", Type: "table", diff --git a/plugins/extractors/presto/presto.go b/plugins/extractors/presto/presto.go index 0bc273003..69deed47b 100644 --- a/plugins/extractors/presto/presto.go +++ b/plugins/extractors/presto/presto.go @@ -17,7 +17,6 @@ import ( "github.com/odpf/meteor/plugins" "github.com/odpf/meteor/registry" - "github.com/odpf/meteor/utils" "github.com/odpf/salt/log" _ "github.com/prestodb/presto-go-client/presto" // presto driver ) @@ -35,8 +34,16 @@ var sampleConfig = ` connection_url: "http://user:pass@localhost:8080" exclude_catalog: "memory,system,tpcds,tpch"` +var info = plugins.Info{ + Description: "Table metadata from Presto server.", + SampleConfig: sampleConfig, + Summary: summary, + Tags: []string{"oss", "extractor"}, +} + // Extractor manages the extraction of data type Extractor struct { + plugins.BaseExtractor logger log.Logger config Config client *sql.DB @@ -50,31 +57,18 @@ type Extractor struct { // New returns a pointer to an initialized Extractor Object func New(logger log.Logger) *Extractor { - return &Extractor{ + e := &Extractor{ logger: logger, } -} - -// Info returns the brief information about the extractor -func (e *Extractor) Info() plugins.Info { - return plugins.Info{ - Description: "Table metadata from Presto server.", - SampleConfig: sampleConfig, - Summary: summary, - Tags: []string{"oss", "extractor"}, - } -} + e.BaseExtractor = plugins.NewBaseExtractor(info, &e.config) -// Validate validates the configuration of the extractor -func (e *Extractor) Validate(configMap map[string]interface{}) (err error) { - return utils.BuildConfig(configMap, &Config{}) + return e } // Init initializes the extractor -func (e *Extractor) Init(_ context.Context, configMap map[string]interface{}) (err error) { - // Build and validate config received from recipe - if err = utils.BuildConfig(configMap, &e.config); err != nil { - return plugins.InvalidConfigError{} +func (e *Extractor) Init(ctx context.Context, config plugins.Config) (err error) { + if err = e.BaseExtractor.Init(ctx, config); err != nil { + return err } // build excluded catalog list @@ -171,7 +165,7 @@ func (e *Extractor) processTable(db *sql.DB, catalog string, database string, ta // push table to channel result = &assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: fmt.Sprintf("%s.%s.%s", catalog, database, tableName), + Urn: models.NewURN("presto", e.UrnScope, "table", fmt.Sprintf("%s.%s.%s", catalog, database, tableName)), Name: tableName, Service: "presto", Type: "table", diff --git a/plugins/extractors/presto/presto_test.go b/plugins/extractors/presto/presto_test.go index 11f5f775d..c1e0e6b0d 100644 --- a/plugins/extractors/presto/presto_test.go +++ b/plugins/extractors/presto/presto_test.go @@ -7,6 +7,10 @@ import ( "context" "database/sql" "fmt" + "log" + "os" + "testing" + assetsv1beta1 "github.com/odpf/meteor/models/odpf/assets/v1beta1" "github.com/odpf/meteor/plugins" "github.com/odpf/meteor/plugins/extractors/presto" @@ -16,14 +20,12 @@ import ( "github.com/ory/dockertest/v3/docker" _ "github.com/prestodb/presto-go-client/presto" "github.com/stretchr/testify/assert" - "log" - "os" - "testing" ) const ( - user = "presto" - port = "8888" + user = "presto" + port = "8888" + urnScope = "test-presto" ) var ( @@ -71,10 +73,12 @@ func TestMain(m *testing.M) { // TestInit tests the configs func TestInit(t *testing.T) { t.Run("should return error for invalid config", func(t *testing.T) { - err := presto.New(utils.Logger).Init(context.TODO(), map[string]interface{}{ - "invalid_config": "invalid_config_value", - }) - assert.Equal(t, plugins.InvalidConfigError{}, err) + err := presto.New(utils.Logger).Init(context.TODO(), plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "invalid_config": "invalid_config_value", + }}) + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) }) } @@ -84,10 +88,12 @@ func TestExtract(t *testing.T) { ctx := context.TODO() newExtractor := presto.New(utils.Logger) - if err := newExtractor.Init(ctx, map[string]interface{}{ - "connection_url": fmt.Sprintf("http://%s@%s", user, host), - "exclude_catalog": "memory,jmx,tpcds,tpch", // only system catalog is not excluded - }); err != nil { + if err := newExtractor.Init(ctx, plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "connection_url": fmt.Sprintf("http://%s@%s", user, host), + "exclude_catalog": "memory,jmx,tpcds,tpch", // only system catalog is not excluded + }}); err != nil { t.Fatal(err) } diff --git a/plugins/extractors/redash/redash.go b/plugins/extractors/redash/redash.go index d99808c04..c56276880 100644 --- a/plugins/extractors/redash/redash.go +++ b/plugins/extractors/redash/redash.go @@ -6,11 +6,12 @@ import ( _ "embed" // used to print the embedded assets "encoding/json" "fmt" - facetsv1beta1 "github.com/odpf/meteor/models/odpf/assets/facets/v1beta1" "io/ioutil" "net/http" "time" + facetsv1beta1 "github.com/odpf/meteor/models/odpf/assets/facets/v1beta1" + "github.com/odpf/meteor/models" commonv1beta1 "github.com/odpf/meteor/models/odpf/assets/common/v1beta1" assetsv1beta1 "github.com/odpf/meteor/models/odpf/assets/v1beta1" @@ -36,8 +37,16 @@ base_url: https://redash.example.com api_key: t33I8i8OFnVt3t9Bjj2RXr8nCBz0xyzVZ318Zwbj ` +var info = plugins.Info{ + Description: "Dashboard list from Redash server.", + SampleConfig: sampleConfig, + Summary: summary, + Tags: []string{"oss", "extractor"}, +} + // Extractor manages the extraction of data from the redash server type Extractor struct { + plugins.BaseExtractor config Config logger log.Logger client *http.Client @@ -45,31 +54,18 @@ type Extractor struct { // New returns a pointer to an initialized Extractor Object func New(logger log.Logger) *Extractor { - return &Extractor{ + e := &Extractor{ logger: logger, } -} - -// Info returns the brief information of the extractor -func (e *Extractor) Info() plugins.Info { - return plugins.Info{ - Description: "Dashboard list from Redash server.", - SampleConfig: sampleConfig, - Summary: summary, - Tags: []string{"oss", "extractor"}, - } -} + e.BaseExtractor = plugins.NewBaseExtractor(info, &e.config) -// Validate validates the configuration of the extractor -func (e *Extractor) Validate(configMap map[string]interface{}) (err error) { - return utils.BuildConfig(configMap, &Config{}) + return e } // Init initializes the extractor -func (e *Extractor) Init(_ context.Context, configMap map[string]interface{}) (err error) { - // build and validate config - if err = utils.BuildConfig(configMap, &e.config); err != nil { - return plugins.InvalidConfigError{} +func (e *Extractor) Init(ctx context.Context, config plugins.Config) (err error) { + if err = e.BaseExtractor.Init(ctx, config); err != nil { + return err } e.client = &http.Client{ Timeout: 4 * time.Second, @@ -98,7 +94,7 @@ func (e *Extractor) Extract(_ context.Context, emit plugins.Emit) (err error) { // buildDashboard builds a dashboard from redash server func (e *Extractor) buildDashboard(dashboard Results) (data *assetsv1beta1.Dashboard, err error) { - dashboardUrn := models.DashboardURN("redash", e.config.BaseURL, fmt.Sprintf("dashboard/%d", dashboard.Id)) + dashboardUrn := models.NewURN("redash", e.UrnScope, "dashboard", fmt.Sprintf("%d", dashboard.Id)) data = &assetsv1beta1.Dashboard{ Resource: &commonv1beta1.Resource{ diff --git a/plugins/extractors/redash/redash_test.go b/plugins/extractors/redash/redash_test.go index ee6ec8fbe..d83794b89 100644 --- a/plugins/extractors/redash/redash_test.go +++ b/plugins/extractors/redash/redash_test.go @@ -5,7 +5,11 @@ package redash_test import ( "context" - "fmt" + "net/http" + "net/http/httptest" + "os" + "testing" + "github.com/odpf/meteor/models" commonv1beta1 "github.com/odpf/meteor/models/odpf/assets/common/v1beta1" facetsv1beta1 "github.com/odpf/meteor/models/odpf/assets/facets/v1beta1" @@ -16,13 +20,10 @@ import ( "github.com/odpf/meteor/test/utils" util "github.com/odpf/meteor/utils" "github.com/stretchr/testify/assert" - "net/http" - "net/http/httptest" - "os" - "testing" ) var testServer *httptest.Server +var urnScope = "test-redash" func TestMain(m *testing.M) { testServer = NewTestServer() @@ -37,20 +38,24 @@ func TestMain(m *testing.M) { // TestInit tests the configs func TestInit(t *testing.T) { t.Run("should return error if for empty base_url in config", func(t *testing.T) { - err := redash.New(utils.Logger).Init(context.TODO(), map[string]interface{}{ - "base_url": "", - "api_key": "checkAPI", - }) - assert.Equal(t, plugins.InvalidConfigError{}, err) + err := redash.New(utils.Logger).Init(context.TODO(), plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "base_url": "", + "api_key": "checkAPI", + }}) + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) }) t.Run("should return error if for empty api_key in config", func(t *testing.T) { - err := redash.New(utils.Logger).Init(context.TODO(), map[string]interface{}{ - "base_url": testServer.URL, - "api_key": "", - }) + err := redash.New(utils.Logger).Init(context.TODO(), plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "base_url": testServer.URL, + "api_key": "", + }}) - assert.Equal(t, plugins.InvalidConfigError{}, err) + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) }) } @@ -60,7 +65,7 @@ func TestExtract(t *testing.T) { expectedData := []models.Record{ models.NewRecord(&assetsv1beta1.Dashboard{ Resource: &commonv1beta1.Resource{ - Urn: fmt.Sprintf("redash::%s/dashboard/421", testServer.URL), + Urn: "urn:redash:test-redash:dashboard:421", Name: "firstDashboard", Service: "redash", Type: "dashboard", @@ -76,7 +81,7 @@ func TestExtract(t *testing.T) { }), models.NewRecord(&assetsv1beta1.Dashboard{ Resource: &commonv1beta1.Resource{ - Urn: fmt.Sprintf("redash::%s/dashboard/634", testServer.URL), + Urn: "urn:redash:test-redash:dashboard:634", Name: "secondDashboard", Service: "redash", Type: "dashboard", @@ -94,10 +99,12 @@ func TestExtract(t *testing.T) { ctx := context.TODO() extractor := redash.New(utils.Logger) - err := extractor.Init(ctx, map[string]interface{}{ - "base_url": testServer.URL, - "api_key": "checkAPI", - }) + err := extractor.Init(ctx, plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "base_url": testServer.URL, + "api_key": "checkAPI", + }}) if err != nil { t.Fatal(err) } diff --git a/plugins/extractors/redshift/redshift.go b/plugins/extractors/redshift/redshift.go index a741a9822..bb25b0f2b 100644 --- a/plugins/extractors/redshift/redshift.go +++ b/plugins/extractors/redshift/redshift.go @@ -3,6 +3,8 @@ package redshift import ( "context" _ "embed" // used to print the embedded assets + "fmt" + "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/session" "github.com/aws/aws-sdk-go/service/redshiftdataapiservice" @@ -13,7 +15,6 @@ import ( assetsv1beta1 "github.com/odpf/meteor/models/odpf/assets/v1beta1" "github.com/odpf/meteor/plugins" "github.com/odpf/meteor/registry" - "github.com/odpf/meteor/utils" "github.com/odpf/salt/log" ) @@ -37,6 +38,13 @@ aws_region: us-east-1 exclude: secondaryDB ` +var info = plugins.Info{ + Description: "Table metadata from Redshift server.", + SampleConfig: sampleConfig, + Summary: summary, + Tags: []string{"oss", "extractor"}, +} + // Option provides extension abstraction to Extractor constructor type Option func(*Extractor) @@ -50,6 +58,7 @@ func WithClient(redshiftClient redshiftdataapiserviceiface.RedshiftDataAPIServic // Extractor manages the extraction of data // from the redshift server type Extractor struct { + plugins.BaseExtractor config Config logger log.Logger client redshiftdataapiserviceiface.RedshiftDataAPIServiceAPI @@ -60,6 +69,7 @@ func New(logger log.Logger, opts ...Option) *Extractor { e := &Extractor{ logger: logger, } + e.BaseExtractor = plugins.NewBaseExtractor(info, &e.config) for _, opt := range opts { opt(e) } @@ -67,26 +77,10 @@ func New(logger log.Logger, opts ...Option) *Extractor { return e } -// Info returns the brief information about the extractor -func (e *Extractor) Info() plugins.Info { - return plugins.Info{ - Description: "Table metadata from Redshift server.", - SampleConfig: sampleConfig, - Summary: summary, - Tags: []string{"oss", "extractor"}, - } -} - -// Validate validates the configuration of the extractor -func (e *Extractor) Validate(configMap map[string]interface{}) (err error) { - return utils.BuildConfig(configMap, &Config{}) -} - // Init initializes the extractor -func (e *Extractor) Init(_ context.Context, config map[string]interface{}) (err error) { - // Build and validate config received from recipe - if err = utils.BuildConfig(config, &e.config); err != nil { - return plugins.InvalidConfigError{} +func (e *Extractor) Init(ctx context.Context, config plugins.Config) (err error) { + if err = e.BaseExtractor.Init(ctx, config); err != nil { + return err } if e.client != nil { @@ -187,7 +181,7 @@ func (e *Extractor) getTableMetadata(dbName string, tableName string) (result *a result = &assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: models.TableURN("redshift", e.config.AWSRegion, dbName, tableName), + Urn: models.NewURN("redshift", e.config.ClusterID, "table", fmt.Sprintf("%s.%s.%s", e.config.ClusterID, dbName, tableName)), Name: tableName, Type: "table", Service: "redshift", diff --git a/plugins/extractors/shield/shield.go b/plugins/extractors/shield/shield.go index d44982616..105f77a04 100644 --- a/plugins/extractors/shield/shield.go +++ b/plugins/extractors/shield/shield.go @@ -4,12 +4,12 @@ import ( "context" _ "embed" // used to print the embedded assets "fmt" + "github.com/odpf/meteor/models" commonv1beta1 "github.com/odpf/meteor/models/odpf/assets/common/v1beta1" assetsv1beta1 "github.com/odpf/meteor/models/odpf/assets/v1beta1" "github.com/odpf/meteor/plugins" "github.com/odpf/meteor/registry" - "github.com/odpf/meteor/utils" "github.com/odpf/salt/log" sh "github.com/odpf/shield/proto/v1beta1" ) @@ -25,39 +25,35 @@ type Config struct { var sampleConfig = ` host: shield.com:80` +var info = plugins.Info{ + Description: "Shield' users metadata", + SampleConfig: sampleConfig, + Summary: summary, + Tags: []string{"shield", "extractor"}, +} + // Extractor manages the communication with the shield service type Extractor struct { + plugins.BaseExtractor logger log.Logger config Config client Client } func New(logger log.Logger, client Client) *Extractor { - return &Extractor{ + e := &Extractor{ logger: logger, client: client, } -} - -// Info returns the detailed information about the extractor -func (e *Extractor) Info() plugins.Info { - return plugins.Info{ - Description: "Shield' users metadata", - SampleConfig: sampleConfig, - Summary: summary, - Tags: []string{"shield", "extractor"}, - } -} + e.BaseExtractor = plugins.NewBaseExtractor(info, &e.config) -// Validate validates the configuration of the extractor -func (e *Extractor) Validate(configMap map[string]interface{}) (err error) { - return utils.BuildConfig(configMap, &Config{}) + return e } // Init initializes the extractor -func (e *Extractor) Init(ctx context.Context, configMap map[string]interface{}) (err error) { - if err := utils.BuildConfig(configMap, &e.config); err != nil { - return plugins.InvalidConfigError{} +func (e *Extractor) Init(ctx context.Context, config plugins.Config) (err error) { + if err = e.BaseExtractor.Init(ctx, config); err != nil { + return err } if err := e.client.Connect(ctx, e.config.Host); err != nil { @@ -91,7 +87,7 @@ func (e *Extractor) Extract(ctx context.Context, emit plugins.Emit) error { emit(models.NewRecord(&assetsv1beta1.User{ Resource: &commonv1beta1.Resource{ - Urn: fmt.Sprintf("%s::%s/%s", service, e.config.Host, user.GetId()), + Urn: models.NewURN(service, e.UrnScope, "user", user.GetId()), Name: user.GetName(), Service: service, Type: "user", diff --git a/plugins/extractors/shield/shield_test.go b/plugins/extractors/shield/shield_test.go index 29f7de933..f733fe486 100644 --- a/plugins/extractors/shield/shield_test.go +++ b/plugins/extractors/shield/shield_test.go @@ -5,9 +5,10 @@ package shield_test import ( "context" + "testing" + "github.com/odpf/meteor/plugins/extractors/shield" "google.golang.org/protobuf/types/known/timestamppb" - "testing" "github.com/odpf/meteor/plugins" "github.com/odpf/meteor/test/mocks" @@ -23,14 +24,15 @@ var ( validConfig = map[string]interface{}{ "host": "shield:80", } + urnScope = "test-shield" ) func TestInit(t *testing.T) { t.Run("should return error if config is invalid", func(t *testing.T) { extr := shield.New(testutils.Logger, new(mockClient)) - err := extr.Init(context.TODO(), map[string]interface{}{}) + err := extr.Init(context.TODO(), plugins.Config{URNScope: urnScope, RawConfig: map[string]interface{}{}}) - assert.Equal(t, plugins.InvalidConfigError{}, err) + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) }) t.Run("should hit shield /admin/ping to check connection if config is valid", func(t *testing.T) { @@ -42,7 +44,7 @@ func TestInit(t *testing.T) { defer client.AssertExpectations(t) extr := shield.New(testutils.Logger, client) - err = extr.Init(ctx, validConfig) + err = extr.Init(ctx, plugins.Config{URNScope: urnScope, RawConfig: validConfig}) assert.NoError(t, err) }) } @@ -58,7 +60,7 @@ func TestExtract(t *testing.T) { defer client.AssertExpectations(t) extr := shield.New(testutils.Logger, client) - err = extr.Init(ctx, validConfig) + err = extr.Init(ctx, plugins.Config{URNScope: urnScope, RawConfig: validConfig}) require.NoError(t, err) emitter := mocks.NewEmitter() diff --git a/plugins/extractors/shield/testdata/expected.json b/plugins/extractors/shield/testdata/expected.json index 82ec79670..c06d44bf8 100644 --- a/plugins/extractors/shield/testdata/expected.json +++ b/plugins/extractors/shield/testdata/expected.json @@ -1,7 +1,7 @@ [ { "resource": { - "urn": "shield::shield:80/user-A", + "urn": "urn:shield:test-shield:user:user-A", "type": "user", "name": "fullname-A", "service": "shield", @@ -30,7 +30,7 @@ }, { "resource": { - "urn": "shield::shield:80/user-B", + "urn": "urn:shield:test-shield:user:user-B", "name": "fullname-B", "service": "shield", "type": "user", diff --git a/plugins/extractors/snowflake/snowflake.go b/plugins/extractors/snowflake/snowflake.go index 57a80b1f5..2d7e8f382 100644 --- a/plugins/extractors/snowflake/snowflake.go +++ b/plugins/extractors/snowflake/snowflake.go @@ -10,7 +10,6 @@ import ( "github.com/odpf/meteor/models" "github.com/odpf/meteor/plugins" "github.com/odpf/meteor/registry" - "github.com/odpf/meteor/utils" "github.com/odpf/salt/log" "github.com/snowflakedb/gosnowflake" _ "github.com/snowflakedb/gosnowflake" // used to register the snowflake driver @@ -29,9 +28,16 @@ type Config struct { } var sampleConfig = `connection_url: "user:password@my_organization-my_account/mydb"` +var info = plugins.Info{ + Description: "Table metadata from Snowflake server.", + SampleConfig: sampleConfig, + Summary: summary, + Tags: []string{"oss", "extractor"}, +} // Extractor manages the extraction of data from snowflake type Extractor struct { + plugins.BaseExtractor logger log.Logger config Config httpTransport http.RoundTripper @@ -54,6 +60,7 @@ func New(logger log.Logger, opts ...Option) *Extractor { e := &Extractor{ logger: logger, } + e.BaseExtractor = plugins.NewBaseExtractor(info, &e.config) for _, opt := range opts { opt(e) @@ -62,26 +69,10 @@ func New(logger log.Logger, opts ...Option) *Extractor { return e } -// Info returns the brief information about the extractor -func (e *Extractor) Info() plugins.Info { - return plugins.Info{ - Description: "Table metadata from Snowflake server.", - SampleConfig: sampleConfig, - Summary: summary, - Tags: []string{"oss", "extractor"}, - } -} - -// Validate validates the configuration of the extractor -func (e *Extractor) Validate(configMap map[string]interface{}) (err error) { - return utils.BuildConfig(configMap, &Config{}) -} - // Init initializes the extractor -func (e *Extractor) Init(_ context.Context, configMap map[string]interface{}) (err error) { - // Build and validate config received from recipe - if err = utils.BuildConfig(configMap, &e.config); err != nil { - return plugins.InvalidConfigError{} +func (e *Extractor) Init(ctx context.Context, config plugins.Config) (err error) { + if err = e.BaseExtractor.Init(ctx, config); err != nil { + return err } if e.httpTransport == nil { @@ -170,9 +161,9 @@ func (e *Extractor) processTable(database string, tableName string) (err error) // push table to channel e.emit(models.NewRecord(&assetsv1beta1.Table{ Resource: &commonv1beta1.Resource{ - Urn: fmt.Sprintf("%s.%s", database, tableName), + Urn: models.NewURN("snowflake", e.UrnScope, "table", fmt.Sprintf("%s.%s", database, tableName)), Name: tableName, - Service: "Snowflake", + Service: "snowflake", Type: "table", }, Schema: &facetsv1beta1.Columns{ diff --git a/plugins/extractors/snowflake/snowflake_test.go b/plugins/extractors/snowflake/snowflake_test.go index 3e80b9be8..74c3d8faa 100644 --- a/plugins/extractors/snowflake/snowflake_test.go +++ b/plugins/extractors/snowflake/snowflake_test.go @@ -21,13 +21,19 @@ import ( "github.com/stretchr/testify/assert" ) +const ( + urnScope = "test-snowflake" +) + // TestInit tests the configs func TestInit(t *testing.T) { t.Run("should return error for invalid config", func(t *testing.T) { - err := snowflake.New(utils.Logger).Init(context.TODO(), map[string]interface{}{ - "invalid_config": "invalid_config_value", - }) - assert.Equal(t, plugins.InvalidConfigError{}, err) + err := snowflake.New(utils.Logger).Init(context.TODO(), plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "invalid_config": "invalid_config_value", + }}) + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) }) } @@ -59,9 +65,11 @@ func TestExtract(t *testing.T) { utils.Logger, snowflake.WithHTTPTransport(r)) - if err := newExtractor.Init(ctx, map[string]interface{}{ - "connection_url": "testing:Snowtest0512@lrwfgiz-hi47152/SNOWFLAKE_SAMPLE_DATA", - }); err != nil { + if err := newExtractor.Init(ctx, plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "connection_url": "testing:Snowtest0512@lrwfgiz-hi47152/SNOWFLAKE_SAMPLE_DATA", + }}); err != nil { t.Fatal(err) } diff --git a/plugins/extractors/superset/superset.go b/plugins/extractors/superset/superset.go index c0bb76a18..75bf6444b 100644 --- a/plugins/extractors/superset/superset.go +++ b/plugins/extractors/superset/superset.go @@ -8,7 +8,6 @@ import ( "fmt" "io/ioutil" "net/http" - "strconv" "time" "github.com/odpf/meteor/models" @@ -16,7 +15,6 @@ import ( assetsv1beta1 "github.com/odpf/meteor/models/odpf/assets/v1beta1" "github.com/odpf/meteor/plugins" "github.com/odpf/meteor/registry" - "github.com/odpf/meteor/utils" "github.com/odpf/salt/log" "github.com/pkg/errors" ) @@ -38,9 +36,17 @@ password: meteor_pass_1234 host: http://localhost:3000 provider: db` +var info = plugins.Info{ + Description: "Dashboard list from Superset server.", + SampleConfig: sampleConfig, + Summary: summary, + Tags: []string{"oss", "extractor"}, +} + // Extractor manages the extraction of data // from the superset server type Extractor struct { + plugins.BaseExtractor config Config accessToken string csrfToken string @@ -50,31 +56,18 @@ type Extractor struct { // New returns a pointer to an initialized Extractor Object func New(logger log.Logger) *Extractor { - return &Extractor{ + e := &Extractor{ logger: logger, } -} + e.BaseExtractor = plugins.NewBaseExtractor(info, &e.config) -// Info returns the brief information of the extractor -func (e *Extractor) Info() plugins.Info { - return plugins.Info{ - Description: "Dashboard list from Superset server.", - SampleConfig: sampleConfig, - Summary: summary, - Tags: []string{"oss", "extractor"}, - } -} - -// Validate validates the configuration of the extractor -func (e *Extractor) Validate(configMap map[string]interface{}) (err error) { - return utils.BuildConfig(configMap, &Config{}) + return e } // Init initializes the extractor -func (e *Extractor) Init(_ context.Context, configMap map[string]interface{}) (err error) { - // build and validate config - if err = utils.BuildConfig(configMap, &e.config); err != nil { - return plugins.InvalidConfigError{} +func (e *Extractor) Init(ctx context.Context, config plugins.Config) (err error) { + if err = e.BaseExtractor.Init(ctx, config); err != nil { + return err } e.client = &http.Client{ Timeout: 4 * time.Second, @@ -98,7 +91,7 @@ func (e *Extractor) Extract(_ context.Context, emit plugins.Emit) (err error) { return errors.Wrap(err, "failed to get dashboard list") } for _, dashboard := range dashboards { - data, err := e.buildDashboard(dashboard.ID) + data, err := e.buildDashboard(dashboard) if err != nil { return errors.Wrap(err, "failed to build dashbaord") } @@ -108,16 +101,17 @@ func (e *Extractor) Extract(_ context.Context, emit plugins.Emit) (err error) { } // buildDashboard builds a dashboard from superset server -func (e *Extractor) buildDashboard(id int) (data *assetsv1beta1.Dashboard, err error) { - var dashboard Dashboard - chart, err := e.getChartsList(id) +func (e *Extractor) buildDashboard(dashboard Dashboard) (data *assetsv1beta1.Dashboard, err error) { + dashboardURN := models.NewURN("superset", e.UrnScope, "dashboard", fmt.Sprintf("%d", dashboard.ID)) + + chart, err := e.getChartsList(dashboardURN, dashboard.ID) if err != nil { err = errors.Wrap(err, "failed to get chart list") return } data = &assetsv1beta1.Dashboard{ Resource: &commonv1beta1.Resource{ - Urn: fmt.Sprintf("superset.%s", dashboard.DashboardTitle), + Urn: dashboardURN, Name: dashboard.DashboardTitle, Service: "superset", Url: dashboard.URL, @@ -142,7 +136,7 @@ func (e *Extractor) getDashboardsList() (dashboards []Dashboard, err error) { } // getChartsList gets a list of charts from superset server -func (e *Extractor) getChartsList(id int) (charts []*assetsv1beta1.Chart, err error) { +func (e *Extractor) getChartsList(dashboardURN string, id int) (charts []*assetsv1beta1.Chart, err error) { type responseChart struct { Result []Chart `json:"result"` } @@ -155,12 +149,13 @@ func (e *Extractor) getChartsList(id int) (charts []*assetsv1beta1.Chart, err er var tempCharts []*assetsv1beta1.Chart for _, res := range data.Result { var tempChart assetsv1beta1.Chart + tempChart.Urn = models.NewURN("superset", e.UrnScope, "chart", fmt.Sprintf("%d", res.SliceId)) tempChart.Name = res.SliceName tempChart.Source = "superset" tempChart.Description = res.Description tempChart.Url = res.SliceUrl tempChart.DataSource = res.Datasource - tempChart.DashboardUrn = "dashboard:" + strconv.Itoa(id) + tempChart.DashboardUrn = dashboardURN tempCharts = append(tempCharts, &tempChart) } return tempCharts, nil diff --git a/plugins/extractors/superset/superset_test.go b/plugins/extractors/superset/superset_test.go index 455c8146a..3cb3d5853 100644 --- a/plugins/extractors/superset/superset_test.go +++ b/plugins/extractors/superset/superset_test.go @@ -34,6 +34,7 @@ const ( provider = "db" dashboardTitle = "random dashboard" mockChart = "random chart" + urnScope = "test-superset" ) var ( @@ -144,11 +145,13 @@ func TestMain(m *testing.M) { // TestInit tests the configs func TestInit(t *testing.T) { t.Run("should return error for invalid config", func(t *testing.T) { - err := superset.New(utils.Logger).Init(context.TODO(), map[string]interface{}{ - "user_id": "user", - "host": host, - }) - assert.Equal(t, plugins.InvalidConfigError{}, err) + err := superset.New(utils.Logger).Init(context.TODO(), plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "user_id": "user", + "host": host, + }}) + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) }) } @@ -157,12 +160,14 @@ func TestExtract(t *testing.T) { t.Run("should return dashboard model", func(t *testing.T) { ctx := context.TODO() extr := superset.New(utils.Logger) - err := extr.Init(ctx, map[string]interface{}{ - "username": user, - "password": pass, - "host": host, - "provider": provider, - }) + err := extr.Init(ctx, plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "username": user, + "password": pass, + "host": host, + "provider": provider, + }}) if err != nil { t.Fatal(err) } diff --git a/plugins/extractors/tableau/builder_test.go b/plugins/extractors/tableau/builder_test.go index 3dc8f9f4a..956f1f480 100644 --- a/plugins/extractors/tableau/builder_test.go +++ b/plugins/extractors/tableau/builder_test.go @@ -32,7 +32,7 @@ func TestBuildLineageResource(t *testing.T) { res, err := e.buildLineageResources(table) expectedResource := &commonv1beta1.Resource{ - Urn: "bigquery::database_server/access_data/table1", + Urn: "urn:bigquery:database_server:table:database_server:access_data.table1", Type: "table", Service: table.Database["connectionType"].(string), } @@ -59,7 +59,7 @@ func TestBuildLineageResource(t *testing.T) { res, err := e.buildLineageResources(table) expectedResource := &commonv1beta1.Resource{ - Urn: "postgres::localhost:5432/database_server/table1", + Urn: "urn:postgres:localhost:5432:table:database_server.table1", Type: "table", Service: table.Database["connectionType"].(string), } @@ -85,7 +85,7 @@ func TestBuildLineageResource(t *testing.T) { res, err := e.buildLineageResources(table) expectedResource := &commonv1beta1.Resource{ - Urn: "gcs::gcs/database_cloud_file/table_name", + Urn: "urn:gcs:gcs:bucket:database_cloud_file/table_name", Type: "bucket", Service: table.Database["connectionType"].(string), } @@ -111,7 +111,7 @@ func TestBuildLineageResource(t *testing.T) { res, err := e.buildLineageResources(table) expectedResource := &commonv1beta1.Resource{ - Urn: "file::/this/is/file/database_file/table_name", + Urn: "urn:file:/this/is/file:bucket:database_file.table_name", Type: "bucket", Service: table.Database["connectionType"].(string), } @@ -137,7 +137,7 @@ func TestBuildLineageResource(t *testing.T) { res, err := e.buildLineageResources(table) expectedResource := &commonv1beta1.Resource{ - Urn: "web_data_connector::http://link_to_connector/database_wdc/table_name", + Urn: "urn:web_data_connector:http://link_to_connector:table:database_wdc.table_name", Type: "table", Service: table.Database["connectionType"].(string), } @@ -197,12 +197,12 @@ func TestBuildLineage(t *testing.T) { expectedLineage := &facetsv1beta1.Lineage{ Upstreams: []*commonv1beta1.Resource{ { - Urn: "postgres::localhost:5432/database_1/table_name_1", + Urn: "urn:postgres:localhost:5432:table:database_1.table_name_1", Type: "table", Service: upstreamTables[0].Database["connectionType"].(string), }, { - Urn: "gcs::gcs/database_2/table_name_2", + Urn: "urn:gcs:gcs:bucket:database_2/table_name_2", Type: "bucket", Service: upstreamTables[1].Database["connectionType"].(string), }, diff --git a/plugins/extractors/tableau/models.go b/plugins/extractors/tableau/models.go index 4b220ed49..328b96dae 100644 --- a/plugins/extractors/tableau/models.go +++ b/plugins/extractors/tableau/models.go @@ -8,6 +8,7 @@ import ( "github.com/odpf/meteor/models" commonv1beta1 "github.com/odpf/meteor/models/odpf/assets/common/v1beta1" + "github.com/odpf/meteor/plugins" "github.com/pkg/errors" ) @@ -117,15 +118,14 @@ func (dbs *DatabaseServer) CreateResource(tableInfo Table) (resource *commonv1be fullNameSplitted, err := parseBQTableFullName(tableInfo.FullName) if err != nil { // assume fullNameSplitted[0] is the project ID - urn = models.TableURN(source, fullNameSplitted[0], tableInfo.Schema, tableInfo.Name) + urn = plugins.BigQueryURN(fullNameSplitted[0], tableInfo.Schema, tableInfo.Name) break } - urn = models.TableURN(source, fullNameSplitted[0], fullNameSplitted[1], fullNameSplitted[2]) + urn = plugins.BigQueryURN(fullNameSplitted[0], fullNameSplitted[1], fullNameSplitted[2]) default: // postgres::postgres:5432/postgres/user host := fmt.Sprintf("%s:%d", dbs.HostName, dbs.Port) - urn = models.TableURN(source, host, dbs.Name, tableInfo.Name) - + urn = models.NewURN(source, host, "table", fmt.Sprintf("%s.%s", dbs.Name, tableInfo.Name)) } resource = &commonv1beta1.Resource{ Urn: urn, @@ -148,7 +148,7 @@ type CloudFile struct { func (cf *CloudFile) CreateResource(tableInfo Table) (resource *commonv1beta1.Resource) { source := mapConnectionTypeToSource(cf.ConnectionType) - urn := fmt.Sprintf("%s::%s/%s/%s", source, cf.Provider, cf.Name, tableInfo.Name) + urn := models.NewURN(source, cf.Provider, "bucket", fmt.Sprintf("%s/%s", cf.Name, tableInfo.Name)) resource = &commonv1beta1.Resource{ Urn: urn, Type: "bucket", // TODO need to check what would be the appropriate type for this @@ -167,7 +167,7 @@ type File struct { func (f *File) CreateResource(tableInfo Table) (resource *commonv1beta1.Resource) { source := mapConnectionTypeToSource(f.ConnectionType) - urn := fmt.Sprintf("%s::%s/%s/%s", source, f.FilePath, f.Name, tableInfo.Name) + urn := models.NewURN(source, f.FilePath, "bucket", fmt.Sprintf("%s.%s", f.Name, tableInfo.Name)) resource = &commonv1beta1.Resource{ Urn: urn, Type: "bucket", // TODO need to check what would be the appropriate type for this @@ -186,7 +186,7 @@ type WebDataConnector struct { func (wdc *WebDataConnector) CreateResource(tableInfo Table) (resource *commonv1beta1.Resource) { source := mapConnectionTypeToSource(wdc.ConnectionType) - urn := fmt.Sprintf("%s::%s/%s/%s", source, wdc.ConnectorURL, wdc.Name, tableInfo.Name) + urn := models.NewURN(source, wdc.ConnectorURL, "table", fmt.Sprintf("%s.%s", wdc.Name, tableInfo.Name)) resource = &commonv1beta1.Resource{ Urn: urn, Type: "table", // TODO need to check what would be the appropriate type for this diff --git a/plugins/extractors/tableau/tableau.go b/plugins/extractors/tableau/tableau.go index d59fffca5..bd58d39c0 100644 --- a/plugins/extractors/tableau/tableau.go +++ b/plugins/extractors/tableau/tableau.go @@ -3,7 +3,6 @@ package tableau import ( "context" _ "embed" - "fmt" "net/http" "github.com/odpf/meteor/models" @@ -30,21 +29,28 @@ password: xxxxxxxxxx sitename: testdev550928 ` +var info = plugins.Info{ + Description: "Dashboard list from Tableau server", + SampleConfig: sampleConfig, + Summary: summary, + Tags: []string{"oss", "extractor"}, +} + // Config that holds a set of configuration for tableau extractor type Config struct { - Host string `mapstructure:"host" validate:"required"` - Version string `mapstructure:"version" validate:"required"` // float as string - Identifier string `mapstructure:"identifier" validate:"required"` - Username string `mapstructure:"username"` - Password string `mapstructure:"password" validate:"required_with=Username"` - AuthToken string `mapstructure:"auth_token" validate:"required_without=Username"` - SiteID string `mapstructure:"site_id" validate:"required_without=Username"` - Sitename string `mapstructure:"sitename"` + Host string `mapstructure:"host" validate:"required"` + Version string `mapstructure:"version" validate:"required"` // float as string + Username string `mapstructure:"username"` + Password string `mapstructure:"password" validate:"required_with=Username"` + AuthToken string `mapstructure:"auth_token" validate:"required_without=Username"` + SiteID string `mapstructure:"site_id" validate:"required_without=Username"` + Sitename string `mapstructure:"sitename"` } // Extractor manages the extraction of data // from tableau server type Extractor struct { + plugins.BaseExtractor config Config logger log.Logger httpClient *http.Client @@ -66,6 +72,7 @@ func New(logger log.Logger, opts ...Option) *Extractor { e := &Extractor{ logger: logger, } + e.BaseExtractor = plugins.NewBaseExtractor(info, &e.config) for _, opt := range opts { opt(e) @@ -75,26 +82,9 @@ func New(logger log.Logger, opts ...Option) *Extractor { return e } -// Info returns the brief information of the extractor -func (e *Extractor) Info() plugins.Info { - return plugins.Info{ - Description: "Dashboard list from Tableau server", - SampleConfig: sampleConfig, - Summary: summary, - Tags: []string{"oss", "extractor"}, - } -} - -// Validate validates the configuration of the extractor -func (e *Extractor) Validate(configMap map[string]interface{}) (err error) { - return utils.BuildConfig(configMap, &Config{}) -} - -func (e *Extractor) Init(ctx context.Context, configMap map[string]interface{}) (err error) { - // build and validate config - err = utils.BuildConfig(configMap, &e.config) - if err != nil { - return plugins.InvalidConfigError{} +func (e *Extractor) Init(ctx context.Context, config plugins.Config) (err error) { + if err = e.BaseExtractor.Init(ctx, config); err != nil { + return err } err = e.client.Init(ctx, e.config) @@ -133,7 +123,7 @@ func (e *Extractor) Extract(ctx context.Context, emit plugins.Emit) (err error) func (e *Extractor) buildDashboard(wb *Workbook) (data *assetsv1beta1.Dashboard, err error) { lineages := e.buildLineage(wb.UpstreamTables) - dashboardURN := models.DashboardURN("tableau", e.config.Identifier, fmt.Sprintf("workbook/%s", wb.ID)) + dashboardURN := models.NewURN("tableau", e.UrnScope, "workbook", wb.ID) data = &assetsv1beta1.Dashboard{ Resource: &commonv1beta1.Resource{ Urn: dashboardURN, @@ -174,7 +164,7 @@ func (e *Extractor) buildDashboard(wb *Workbook) (data *assetsv1beta1.Dashboard, func (e *Extractor) buildCharts(dashboardURN string, wb *Workbook, lineages *facetsv1beta1.Lineage) (charts []*assetsv1beta1.Chart) { for _, sh := range wb.Sheets { - chartURN := models.DashboardURN("tableau", e.config.Identifier, fmt.Sprintf("sheet/%s", sh.ID)) + chartURN := models.NewURN("tableau", e.UrnScope, "sheet", sh.ID) charts = append(charts, &assetsv1beta1.Chart{ Urn: chartURN, Name: sh.Name, diff --git a/plugins/extractors/tableau/tableau_test.go b/plugins/extractors/tableau/tableau_test.go index d4ea140a7..ed02926c3 100644 --- a/plugins/extractors/tableau/tableau_test.go +++ b/plugins/extractors/tableau/tableau_test.go @@ -26,46 +26,55 @@ var ( sitename = "testdev550928" username = "meteor_user" password = "xxxxxxxxxx" + urnScope = "test-tableau" ) func TestInit(t *testing.T) { t.Run("should return error for invalid config", func(t *testing.T) { - err := tableau.New(testutils.Logger).Init(context.TODO(), map[string]interface{}{ - "host": "invalid_host", - }) + err := tableau.New(testutils.Logger).Init(context.TODO(), plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "host": "invalid_host", + }}) - assert.Equal(t, plugins.InvalidConfigError{}, err) + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) }) t.Run("should return error for password missing with username", func(t *testing.T) { - err := tableau.New(testutils.Logger).Init(context.TODO(), map[string]interface{}{ - "host": host, - "version": version, - "identifier": "my-tableau", - "sitename": sitename, - "username": username, - }) - - assert.Equal(t, plugins.InvalidConfigError{}, err) + err := tableau.New(testutils.Logger).Init(context.TODO(), plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "host": host, + "version": version, + "identifier": "my-tableau", + "sitename": sitename, + "username": username, + }}) + + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) }) t.Run("should return error for site_id and auth_token missing", func(t *testing.T) { - err := tableau.New(testutils.Logger).Init(context.TODO(), map[string]interface{}{ - "host": host, - "version": version, - "identifier": "my-tableau", - "sitename": sitename, - }) - - assert.Equal(t, plugins.InvalidConfigError{}, err) + err := tableau.New(testutils.Logger).Init(context.TODO(), plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "host": host, + "version": version, + "identifier": "my-tableau", + "sitename": sitename, + }}) + + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) }) t.Run("should return no error for config with site_id and auth_token without username", func(t *testing.T) { - err := tableau.New(testutils.Logger).Init(context.TODO(), map[string]interface{}{ - "host": host, - "version": version, - "identifier": "my-tableau", - "sitename": sitename, - "site_id": "xxxxxxxxx", - "auth_token": "xxxxxxxxx", - }) + err := tableau.New(testutils.Logger).Init(context.TODO(), plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "host": host, + "version": version, + "identifier": "my-tableau", + "sitename": sitename, + "site_id": "xxxxxxxxx", + "auth_token": "xxxxxxxxx", + }}) assert.NoError(t, err) }) } @@ -83,14 +92,16 @@ func TestExtract(t *testing.T) { tableau.WithHTTPClient(&http.Client{ Transport: r, })) - err = extr.Init(ctx, map[string]interface{}{ - "host": host, - "version": version, - "identifier": "my-tableau", - "sitename": sitename, - "username": username, - "password": password, - }) + err = extr.Init(ctx, plugins.Config{ + URNScope: urnScope, + RawConfig: map[string]interface{}{ + "host": host, + "version": version, + "identifier": "my-tableau", + "sitename": sitename, + "username": username, + "password": password, + }}) if err != nil { t.Fatal(err) } diff --git a/plugins/extractors/tableau/testdata/dashboards_proto.json b/plugins/extractors/tableau/testdata/dashboards_proto.json index 11b137ae7..03b18c422 100644 --- a/plugins/extractors/tableau/testdata/dashboards_proto.json +++ b/plugins/extractors/tableau/testdata/dashboards_proto.json @@ -1,17 +1,17 @@ [ { "resource": { - "urn": "tableau::my-tableau/workbook/d74564cf-931c-2df2-fd8f-8b974fbc0b14", + "urn": "urn:tableau:test-tableau:workbook:d74564cf-931c-2df2-fd8f-8b974fbc0b14", "name": "Regional", "service": "tableau", "type": "dashboard" }, "charts": [ { - "urn": "tableau::my-tableau/sheet/2f97fce2-e291-e229-842d-b7af508aebfc", + "urn": "urn:tableau:test-tableau:sheet:2f97fce2-e291-e229-842d-b7af508aebfc", "name": "Obesity Map", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/d74564cf-931c-2df2-fd8f-8b974fbc0b14", + "dashboard_urn": "urn:tableau:test-tableau:workbook:d74564cf-931c-2df2-fd8f-8b974fbc0b14", "properties": { "attributes": { "id": "2f97fce2-e291-e229-842d-b7af508aebfc", @@ -28,10 +28,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/4bef53d7-8b22-87f4-0077-c9c7bd090c08", + "urn": "urn:tableau:test-tableau:sheet:4bef53d7-8b22-87f4-0077-c9c7bd090c08", "name": "S\u0026P Forward Returns", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/d74564cf-931c-2df2-fd8f-8b974fbc0b14", + "dashboard_urn": "urn:tableau:test-tableau:workbook:d74564cf-931c-2df2-fd8f-8b974fbc0b14", "properties": { "attributes": { "id": "4bef53d7-8b22-87f4-0077-c9c7bd090c08", @@ -48,10 +48,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/68cf24ed-f83c-b6f5-0f0c-a884895d9016", + "urn": "urn:tableau:test-tableau:sheet:68cf24ed-f83c-b6f5-0f0c-a884895d9016", "name": "Heat Map", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/d74564cf-931c-2df2-fd8f-8b974fbc0b14", + "dashboard_urn": "urn:tableau:test-tableau:workbook:d74564cf-931c-2df2-fd8f-8b974fbc0b14", "properties": { "attributes": { "id": "68cf24ed-f83c-b6f5-0f0c-a884895d9016", @@ -68,10 +68,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/8616fd42-285c-4179-dea6-9f36e9f666e1", + "urn": "urn:tableau:test-tableau:sheet:8616fd42-285c-4179-dea6-9f36e9f666e1", "name": "Scatter", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/d74564cf-931c-2df2-fd8f-8b974fbc0b14", + "dashboard_urn": "urn:tableau:test-tableau:workbook:d74564cf-931c-2df2-fd8f-8b974fbc0b14", "properties": { "attributes": { "id": "8616fd42-285c-4179-dea6-9f36e9f666e1", @@ -88,10 +88,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/924ac46b-b979-72d5-27c8-e96251c4a982", + "urn": "urn:tableau:test-tableau:sheet:924ac46b-b979-72d5-27c8-e96251c4a982", "name": "College", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/d74564cf-931c-2df2-fd8f-8b974fbc0b14", + "dashboard_urn": "urn:tableau:test-tableau:workbook:d74564cf-931c-2df2-fd8f-8b974fbc0b14", "properties": { "attributes": { "id": "924ac46b-b979-72d5-27c8-e96251c4a982", @@ -108,10 +108,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/99c11133-00b6-dc1f-8e89-88e224f96210", + "urn": "urn:tableau:test-tableau:sheet:99c11133-00b6-dc1f-8e89-88e224f96210", "name": "Flight Delays", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/d74564cf-931c-2df2-fd8f-8b974fbc0b14", + "dashboard_urn": "urn:tableau:test-tableau:workbook:d74564cf-931c-2df2-fd8f-8b974fbc0b14", "properties": { "attributes": { "id": "99c11133-00b6-dc1f-8e89-88e224f96210", @@ -128,10 +128,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/a0f461b6-6236-8c90-aaeb-be8218441ada", + "urn": "urn:tableau:test-tableau:sheet:a0f461b6-6236-8c90-aaeb-be8218441ada", "name": "Stocks", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/d74564cf-931c-2df2-fd8f-8b974fbc0b14", + "dashboard_urn": "urn:tableau:test-tableau:workbook:d74564cf-931c-2df2-fd8f-8b974fbc0b14", "properties": { "attributes": { "id": "a0f461b6-6236-8c90-aaeb-be8218441ada", @@ -148,10 +148,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/b8b4b90b-783d-663d-2988-72781d4609cb", + "urn": "urn:tableau:test-tableau:sheet:b8b4b90b-783d-663d-2988-72781d4609cb", "name": "S\u0026P Returns Vs Conditions", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/d74564cf-931c-2df2-fd8f-8b974fbc0b14", + "dashboard_urn": "urn:tableau:test-tableau:workbook:d74564cf-931c-2df2-fd8f-8b974fbc0b14", "properties": { "attributes": { "id": "b8b4b90b-783d-663d-2988-72781d4609cb", @@ -168,10 +168,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/c997983b-144f-ec1b-6a0f-1dfdeef63a97", + "urn": "urn:tableau:test-tableau:sheet:c997983b-144f-ec1b-6a0f-1dfdeef63a97", "name": "Obesity Scatter Plot", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/d74564cf-931c-2df2-fd8f-8b974fbc0b14", + "dashboard_urn": "urn:tableau:test-tableau:workbook:d74564cf-931c-2df2-fd8f-8b974fbc0b14", "properties": { "attributes": { "id": "c997983b-144f-ec1b-6a0f-1dfdeef63a97", @@ -188,10 +188,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/d19fe52f-76db-a243-3381-56c40536dd18", + "urn": "urn:tableau:test-tableau:sheet:d19fe52f-76db-a243-3381-56c40536dd18", "name": "S\u0026P Returns by Decade", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/d74564cf-931c-2df2-fd8f-8b974fbc0b14", + "dashboard_urn": "urn:tableau:test-tableau:workbook:d74564cf-931c-2df2-fd8f-8b974fbc0b14", "properties": { "attributes": { "id": "d19fe52f-76db-a243-3381-56c40536dd18", @@ -239,32 +239,32 @@ "lineage": { "upstreams": [ { - "urn": "hyper::6025bab5-2ce4-491b-90fd-891f1f33316c/6025bab5-2ce4-491b-90fd-891f1f33316c/Extract", + "urn": "urn:hyper:6025bab5-2ce4-491b-90fd-891f1f33316c:bucket:6025bab5-2ce4-491b-90fd-891f1f33316c.Extract", "service": "hyper", "type": "bucket" }, { - "urn": "hyper::554717ae-4225-4652-97cc-cb6021a3de22/554717ae-4225-4652-97cc-cb6021a3de22/Extract", + "urn": "urn:hyper:554717ae-4225-4652-97cc-cb6021a3de22:bucket:554717ae-4225-4652-97cc-cb6021a3de22.Extract", "service": "hyper", "type": "bucket" }, { - "urn": "hyper::f6538ebb-b95f-48da-af7b-12059ae4093f/f6538ebb-b95f-48da-af7b-12059ae4093f/Extract", + "urn": "urn:hyper:f6538ebb-b95f-48da-af7b-12059ae4093f:bucket:f6538ebb-b95f-48da-af7b-12059ae4093f.Extract", "service": "hyper", "type": "bucket" }, { - "urn": "hyper::e7fc51cb-2a4a-4910-a429-2708e9c0c1b6/e7fc51cb-2a4a-4910-a429-2708e9c0c1b6/Extract", + "urn": "urn:hyper:e7fc51cb-2a4a-4910-a429-2708e9c0c1b6:bucket:e7fc51cb-2a4a-4910-a429-2708e9c0c1b6.Extract", "service": "hyper", "type": "bucket" }, { - "urn": "hyper::8058bef9-5422-4e13-991b-fd233e5bace9/8058bef9-5422-4e13-991b-fd233e5bace9/Extract", + "urn": "urn:hyper:8058bef9-5422-4e13-991b-fd233e5bace9:bucket:8058bef9-5422-4e13-991b-fd233e5bace9.Extract", "service": "hyper", "type": "bucket" }, { - "urn": "hyper::50344e6b-add1-4dd1-b73f-42b772cfa2c0/50344e6b-add1-4dd1-b73f-42b772cfa2c0/Extract", + "urn": "urn:hyper:50344e6b-add1-4dd1-b73f-42b772cfa2c0:bucket:50344e6b-add1-4dd1-b73f-42b772cfa2c0.Extract", "service": "hyper", "type": "bucket" } @@ -273,7 +273,7 @@ }, { "resource": { - "urn": "tableau::my-tableau/workbook/e1f0aab7-e3ff-8727-8461-4d4294e220f0", + "urn": "urn:tableau:test-tableau:workbook:e1f0aab7-e3ff-8727-8461-4d4294e220f0", "name": "Superstore", "service": "tableau", "type": "dashboard", @@ -281,10 +281,10 @@ }, "charts": [ { - "urn": "tableau::my-tableau/sheet/0421101f-cd87-a6b9-e502-eca8f8e96a3a", + "urn": "urn:tableau:test-tableau:sheet:0421101f-cd87-a6b9-e502-eca8f8e96a3a", "name": "CustomerOverview", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/e1f0aab7-e3ff-8727-8461-4d4294e220f0", + "dashboard_urn": "urn:tableau:test-tableau:workbook:e1f0aab7-e3ff-8727-8461-4d4294e220f0", "properties": { "attributes": { "id": "0421101f-cd87-a6b9-e502-eca8f8e96a3a", @@ -301,10 +301,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/109afef4-0607-b6d2-6219-7a7009a62a4c", + "urn": "urn:tableau:test-tableau:sheet:109afef4-0607-b6d2-6219-7a7009a62a4c", "name": "QuotaAttainment", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/e1f0aab7-e3ff-8727-8461-4d4294e220f0", + "dashboard_urn": "urn:tableau:test-tableau:workbook:e1f0aab7-e3ff-8727-8461-4d4294e220f0", "properties": { "attributes": { "id": "109afef4-0607-b6d2-6219-7a7009a62a4c", @@ -321,10 +321,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/18a57261-20d9-ad1f-50f7-8ce832517257", + "urn": "urn:tableau:test-tableau:sheet:18a57261-20d9-ad1f-50f7-8ce832517257", "name": "What If Forecast", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/e1f0aab7-e3ff-8727-8461-4d4294e220f0", + "dashboard_urn": "urn:tableau:test-tableau:workbook:e1f0aab7-e3ff-8727-8461-4d4294e220f0", "properties": { "attributes": { "id": "18a57261-20d9-ad1f-50f7-8ce832517257", @@ -341,10 +341,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/1cf1a77e-1aa7-82e4-d56f-4df6ef5804f0", + "urn": "urn:tableau:test-tableau:sheet:1cf1a77e-1aa7-82e4-d56f-4df6ef5804f0", "name": "Product Detail Sheet", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/e1f0aab7-e3ff-8727-8461-4d4294e220f0", + "dashboard_urn": "urn:tableau:test-tableau:workbook:e1f0aab7-e3ff-8727-8461-4d4294e220f0", "properties": { "attributes": { "id": "1cf1a77e-1aa7-82e4-d56f-4df6ef5804f0", @@ -361,10 +361,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/2481cc92-cf3b-4547-45c3-62bf55c84193", + "urn": "urn:tableau:test-tableau:sheet:2481cc92-cf3b-4547-45c3-62bf55c84193", "name": "CustomerScatter", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/e1f0aab7-e3ff-8727-8461-4d4294e220f0", + "dashboard_urn": "urn:tableau:test-tableau:workbook:e1f0aab7-e3ff-8727-8461-4d4294e220f0", "properties": { "attributes": { "id": "2481cc92-cf3b-4547-45c3-62bf55c84193", @@ -381,10 +381,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/363b1423-b446-28d1-d18a-f28676f5154b", + "urn": "urn:tableau:test-tableau:sheet:363b1423-b446-28d1-d18a-f28676f5154b", "name": "Total Sales", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/e1f0aab7-e3ff-8727-8461-4d4294e220f0", + "dashboard_urn": "urn:tableau:test-tableau:workbook:e1f0aab7-e3ff-8727-8461-4d4294e220f0", "properties": { "attributes": { "id": "363b1423-b446-28d1-d18a-f28676f5154b", @@ -401,10 +401,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/3d04aec6-e611-f775-de41-ab642f800a8a", + "urn": "urn:tableau:test-tableau:sheet:3d04aec6-e611-f775-de41-ab642f800a8a", "name": "Sale Map", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/e1f0aab7-e3ff-8727-8461-4d4294e220f0", + "dashboard_urn": "urn:tableau:test-tableau:workbook:e1f0aab7-e3ff-8727-8461-4d4294e220f0", "properties": { "attributes": { "id": "3d04aec6-e611-f775-de41-ab642f800a8a", @@ -421,10 +421,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/4d72181d-88d6-3fae-9824-67789cc0cbca", + "urn": "urn:tableau:test-tableau:sheet:4d72181d-88d6-3fae-9824-67789cc0cbca", "name": "CommissionProjection", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/e1f0aab7-e3ff-8727-8461-4d4294e220f0", + "dashboard_urn": "urn:tableau:test-tableau:workbook:e1f0aab7-e3ff-8727-8461-4d4294e220f0", "properties": { "attributes": { "id": "4d72181d-88d6-3fae-9824-67789cc0cbca", @@ -441,10 +441,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/52a3e355-d6c6-d4e3-f5b3-9f5ff7f432b3", + "urn": "urn:tableau:test-tableau:sheet:52a3e355-d6c6-d4e3-f5b3-9f5ff7f432b3", "name": "ShipSummary", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/e1f0aab7-e3ff-8727-8461-4d4294e220f0", + "dashboard_urn": "urn:tableau:test-tableau:workbook:e1f0aab7-e3ff-8727-8461-4d4294e220f0", "properties": { "attributes": { "id": "52a3e355-d6c6-d4e3-f5b3-9f5ff7f432b3", @@ -461,10 +461,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/64984186-af48-7b33-3c8b-e34950d0f8ac", + "urn": "urn:tableau:test-tableau:sheet:64984186-af48-7b33-3c8b-e34950d0f8ac", "name": "Sales by Product", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/e1f0aab7-e3ff-8727-8461-4d4294e220f0", + "dashboard_urn": "urn:tableau:test-tableau:workbook:e1f0aab7-e3ff-8727-8461-4d4294e220f0", "properties": { "attributes": { "id": "64984186-af48-7b33-3c8b-e34950d0f8ac", @@ -481,10 +481,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/74096ebd-7253-292a-b0a7-d8f4c9a617ca", + "urn": "urn:tableau:test-tableau:sheet:74096ebd-7253-292a-b0a7-d8f4c9a617ca", "name": "Performance", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/e1f0aab7-e3ff-8727-8461-4d4294e220f0", + "dashboard_urn": "urn:tableau:test-tableau:workbook:e1f0aab7-e3ff-8727-8461-4d4294e220f0", "properties": { "attributes": { "id": "74096ebd-7253-292a-b0a7-d8f4c9a617ca", @@ -501,10 +501,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/805d5e5e-1129-792c-0581-7f315c43829a", + "urn": "urn:tableau:test-tableau:sheet:805d5e5e-1129-792c-0581-7f315c43829a", "name": "ProductDetails", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/e1f0aab7-e3ff-8727-8461-4d4294e220f0", + "dashboard_urn": "urn:tableau:test-tableau:workbook:e1f0aab7-e3ff-8727-8461-4d4294e220f0", "properties": { "attributes": { "id": "805d5e5e-1129-792c-0581-7f315c43829a", @@ -521,10 +521,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/8c7702e7-3bce-1865-38b2-887ec3d1026a", + "urn": "urn:tableau:test-tableau:sheet:8c7702e7-3bce-1865-38b2-887ec3d1026a", "name": "Tooltip: Profit Ratio by City", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/e1f0aab7-e3ff-8727-8461-4d4294e220f0", + "dashboard_urn": "urn:tableau:test-tableau:workbook:e1f0aab7-e3ff-8727-8461-4d4294e220f0", "properties": { "attributes": { "id": "8c7702e7-3bce-1865-38b2-887ec3d1026a", @@ -541,10 +541,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/90f2fd1c-0ff2-1184-f37d-ebb6a2ac0ac8", + "urn": "urn:tableau:test-tableau:sheet:90f2fd1c-0ff2-1184-f37d-ebb6a2ac0ac8", "name": "Forecast", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/e1f0aab7-e3ff-8727-8461-4d4294e220f0", + "dashboard_urn": "urn:tableau:test-tableau:workbook:e1f0aab7-e3ff-8727-8461-4d4294e220f0", "properties": { "attributes": { "id": "90f2fd1c-0ff2-1184-f37d-ebb6a2ac0ac8", @@ -561,10 +561,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/960d1086-d0af-e4fb-1678-e70fb7b373c5", + "urn": "urn:tableau:test-tableau:sheet:960d1086-d0af-e4fb-1678-e70fb7b373c5", "name": "ShippingTrend", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/e1f0aab7-e3ff-8727-8461-4d4294e220f0", + "dashboard_urn": "urn:tableau:test-tableau:workbook:e1f0aab7-e3ff-8727-8461-4d4294e220f0", "properties": { "attributes": { "id": "960d1086-d0af-e4fb-1678-e70fb7b373c5", @@ -581,10 +581,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/a0073bd9-753c-be7c-d14c-4fdc70383e85", + "urn": "urn:tableau:test-tableau:sheet:a0073bd9-753c-be7c-d14c-4fdc70383e85", "name": "OTE", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/e1f0aab7-e3ff-8727-8461-4d4294e220f0", + "dashboard_urn": "urn:tableau:test-tableau:workbook:e1f0aab7-e3ff-8727-8461-4d4294e220f0", "properties": { "attributes": { "id": "a0073bd9-753c-be7c-d14c-4fdc70383e85", @@ -601,10 +601,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/ae2f7cbc-c788-1f4f-12d5-810f9c8be43a", + "urn": "urn:tableau:test-tableau:sheet:ae2f7cbc-c788-1f4f-12d5-810f9c8be43a", "name": "CustomerRank", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/e1f0aab7-e3ff-8727-8461-4d4294e220f0", + "dashboard_urn": "urn:tableau:test-tableau:workbook:e1f0aab7-e3ff-8727-8461-4d4294e220f0", "properties": { "attributes": { "id": "ae2f7cbc-c788-1f4f-12d5-810f9c8be43a", @@ -621,10 +621,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/c00babfb-1189-fe5c-b1bd-e8313f807b5c", + "urn": "urn:tableau:test-tableau:sheet:c00babfb-1189-fe5c-b1bd-e8313f807b5c", "name": "Sales", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/e1f0aab7-e3ff-8727-8461-4d4294e220f0", + "dashboard_urn": "urn:tableau:test-tableau:workbook:e1f0aab7-e3ff-8727-8461-4d4294e220f0", "properties": { "attributes": { "id": "c00babfb-1189-fe5c-b1bd-e8313f807b5c", @@ -641,10 +641,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/cae8a658-437f-af6e-5725-4c8a355bc2c3", + "urn": "urn:tableau:test-tableau:sheet:cae8a658-437f-af6e-5725-4c8a355bc2c3", "name": "DaystoShip", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/e1f0aab7-e3ff-8727-8461-4d4294e220f0", + "dashboard_urn": "urn:tableau:test-tableau:workbook:e1f0aab7-e3ff-8727-8461-4d4294e220f0", "properties": { "attributes": { "id": "cae8a658-437f-af6e-5725-4c8a355bc2c3", @@ -661,10 +661,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/e2b10503-01ce-aa3c-1dc0-a4200eedd98b", + "urn": "urn:tableau:test-tableau:sheet:e2b10503-01ce-aa3c-1dc0-a4200eedd98b", "name": "Sales by Segment", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/e1f0aab7-e3ff-8727-8461-4d4294e220f0", + "dashboard_urn": "urn:tableau:test-tableau:workbook:e1f0aab7-e3ff-8727-8461-4d4294e220f0", "properties": { "attributes": { "id": "e2b10503-01ce-aa3c-1dc0-a4200eedd98b", @@ -681,10 +681,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/f5981591-d5c1-8842-5edf-845d121c0afa", + "urn": "urn:tableau:test-tableau:sheet:f5981591-d5c1-8842-5edf-845d121c0afa", "name": "ProductView", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/e1f0aab7-e3ff-8727-8461-4d4294e220f0", + "dashboard_urn": "urn:tableau:test-tableau:workbook:e1f0aab7-e3ff-8727-8461-4d4294e220f0", "properties": { "attributes": { "id": "f5981591-d5c1-8842-5edf-845d121c0afa", @@ -732,17 +732,17 @@ "lineage": { "upstreams": [ { - "urn": "textscan::Sales Commission.csv/Sales Commission.csv/Sales Commission.csv", + "urn": "urn:textscan:Sales Commission.csv:bucket:Sales Commission.csv.Sales Commission.csv", "service": "textscan", "type": "bucket" }, { - "urn": "excel-direct::Sample - Superstore.xls/Sample - Superstore.xls/Orders", + "urn": "urn:excel-direct:Sample - Superstore.xls:bucket:Sample - Superstore.xls.Orders", "service": "excel-direct", "type": "bucket" }, { - "urn": "excel-direct::Sales Target (US).xlsx/Sales Target (US).xlsx/Sheet1", + "urn": "urn:excel-direct:Sales Target (US).xlsx:bucket:Sales Target (US).xlsx.Sheet1", "service": "excel-direct", "type": "bucket" } @@ -751,17 +751,17 @@ }, { "resource": { - "urn": "tableau::my-tableau/workbook/d7db01c0-b311-6f47-31bf-ad1c42e67629", + "urn": "urn:tableau:test-tableau:workbook:d7db01c0-b311-6f47-31bf-ad1c42e67629", "name": "InMail Engagement", "service": "tableau", "type": "dashboard" }, "charts": [ { - "urn": "tableau::my-tableau/sheet/3250190d-e1f8-3e1e-e43e-3485ea87990e", + "urn": "urn:tableau:test-tableau:sheet:3250190d-e1f8-3e1e-e43e-3485ea87990e", "name": "Best Day to Send InMails", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/d7db01c0-b311-6f47-31bf-ad1c42e67629", + "dashboard_urn": "urn:tableau:test-tableau:workbook:d7db01c0-b311-6f47-31bf-ad1c42e67629", "properties": { "attributes": { "id": "3250190d-e1f8-3e1e-e43e-3485ea87990e", @@ -778,10 +778,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/3b839abb-791b-8399-82a2-59915f9209be", + "urn": "urn:tableau:test-tableau:sheet:3b839abb-791b-8399-82a2-59915f9209be", "name": "Number of InMail Sent", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/d7db01c0-b311-6f47-31bf-ad1c42e67629", + "dashboard_urn": "urn:tableau:test-tableau:workbook:d7db01c0-b311-6f47-31bf-ad1c42e67629", "properties": { "attributes": { "id": "3b839abb-791b-8399-82a2-59915f9209be", @@ -798,10 +798,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/3ec1ba6c-c8dd-7641-45e4-a2b2154d0052", + "urn": "urn:tableau:test-tableau:sheet:3ec1ba6c-c8dd-7641-45e4-a2b2154d0052", "name": "Tooltip: volume InMail sent vs accepted by weekday", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/d7db01c0-b311-6f47-31bf-ad1c42e67629", + "dashboard_urn": "urn:tableau:test-tableau:workbook:d7db01c0-b311-6f47-31bf-ad1c42e67629", "properties": { "attributes": { "id": "3ec1ba6c-c8dd-7641-45e4-a2b2154d0052", @@ -818,10 +818,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/447bed17-7fb7-23c9-6207-78ed4c8bf769", + "urn": "urn:tableau:test-tableau:sheet:447bed17-7fb7-23c9-6207-78ed4c8bf769", "name": "InMail Response Rate", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/d7db01c0-b311-6f47-31bf-ad1c42e67629", + "dashboard_urn": "urn:tableau:test-tableau:workbook:d7db01c0-b311-6f47-31bf-ad1c42e67629", "properties": { "attributes": { "id": "447bed17-7fb7-23c9-6207-78ed4c8bf769", @@ -838,10 +838,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/514b6c67-f4a2-70e9-5fbb-c7ba97477532", + "urn": "urn:tableau:test-tableau:sheet:514b6c67-f4a2-70e9-5fbb-c7ba97477532", "name": "Tooltip: volume InMail sent vs accepted by time of day and weekday", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/d7db01c0-b311-6f47-31bf-ad1c42e67629", + "dashboard_urn": "urn:tableau:test-tableau:workbook:d7db01c0-b311-6f47-31bf-ad1c42e67629", "properties": { "attributes": { "id": "514b6c67-f4a2-70e9-5fbb-c7ba97477532", @@ -858,10 +858,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/881c5f1c-05c0-cccb-c778-0e9315d4d28f", + "urn": "urn:tableau:test-tableau:sheet:881c5f1c-05c0-cccb-c778-0e9315d4d28f", "name": "InMail response rate by time of day", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/d7db01c0-b311-6f47-31bf-ad1c42e67629", + "dashboard_urn": "urn:tableau:test-tableau:workbook:d7db01c0-b311-6f47-31bf-ad1c42e67629", "properties": { "attributes": { "id": "881c5f1c-05c0-cccb-c778-0e9315d4d28f", @@ -878,10 +878,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/98e3c68b-3d72-4522-99e3-82e62ae6943f", + "urn": "urn:tableau:test-tableau:sheet:98e3c68b-3d72-4522-99e3-82e62ae6943f", "name": "All Team Memeber Engagement", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/d7db01c0-b311-6f47-31bf-ad1c42e67629", + "dashboard_urn": "urn:tableau:test-tableau:workbook:d7db01c0-b311-6f47-31bf-ad1c42e67629", "properties": { "attributes": { "id": "98e3c68b-3d72-4522-99e3-82e62ae6943f", @@ -898,10 +898,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/9b98ebfa-f333-c487-4e39-8edf047499fa", + "urn": "urn:tableau:test-tableau:sheet:9b98ebfa-f333-c487-4e39-8edf047499fa", "name": "Weekday by time of day response rate heatmap", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/d7db01c0-b311-6f47-31bf-ad1c42e67629", + "dashboard_urn": "urn:tableau:test-tableau:workbook:d7db01c0-b311-6f47-31bf-ad1c42e67629", "properties": { "attributes": { "id": "9b98ebfa-f333-c487-4e39-8edf047499fa", @@ -918,10 +918,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/a3edae7b-a14f-69f7-c126-3cdcc0fc6427", + "urn": "urn:tableau:test-tableau:sheet:a3edae7b-a14f-69f7-c126-3cdcc0fc6427", "name": "Best Time to Send InMails", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/d7db01c0-b311-6f47-31bf-ad1c42e67629", + "dashboard_urn": "urn:tableau:test-tableau:workbook:d7db01c0-b311-6f47-31bf-ad1c42e67629", "properties": { "attributes": { "id": "a3edae7b-a14f-69f7-c126-3cdcc0fc6427", @@ -938,10 +938,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/a91805a6-403c-6797-caca-d5aa14ffa136", + "urn": "urn:tableau:test-tableau:sheet:a91805a6-403c-6797-caca-d5aa14ffa136", "name": "InMail Response Rate Timeline", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/d7db01c0-b311-6f47-31bf-ad1c42e67629", + "dashboard_urn": "urn:tableau:test-tableau:workbook:d7db01c0-b311-6f47-31bf-ad1c42e67629", "properties": { "attributes": { "id": "a91805a6-403c-6797-caca-d5aa14ffa136", @@ -958,10 +958,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/c5ddca41-13eb-a01a-0072-6aef8f1dd57b", + "urn": "urn:tableau:test-tableau:sheet:c5ddca41-13eb-a01a-0072-6aef8f1dd57b", "name": "Tooltip: volume InMail sent vs accepted by time of day", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/d7db01c0-b311-6f47-31bf-ad1c42e67629", + "dashboard_urn": "urn:tableau:test-tableau:workbook:d7db01c0-b311-6f47-31bf-ad1c42e67629", "properties": { "attributes": { "id": "c5ddca41-13eb-a01a-0072-6aef8f1dd57b", @@ -978,10 +978,10 @@ } }, { - "urn": "tableau::my-tableau/sheet/e8bde842-c7bc-57a0-8138-251190f520b1", + "urn": "urn:tableau:test-tableau:sheet:e8bde842-c7bc-57a0-8138-251190f520b1", "name": "InMail response by weekday", "source": "tableau", - "dashboard_urn": "tableau::my-tableau/workbook/d7db01c0-b311-6f47-31bf-ad1c42e67629", + "dashboard_urn": "urn:tableau:test-tableau:workbook:d7db01c0-b311-6f47-31bf-ad1c42e67629", "properties": { "attributes": { "id": "e8bde842-c7bc-57a0-8138-251190f520b1", @@ -1029,17 +1029,17 @@ "lineage": { "upstreams": [ { - "urn": "webdata-direct:linkedin-snap::https://connectors.tableau.com/linkedin/snap/1/snap.html/Sales Navigator Connection/ActivityOutcome", + "urn": "urn:webdata-direct:linkedin-snap:https://connectors.tableau.com/linkedin/snap/1/snap.html:table:Sales Navigator Connection.ActivityOutcome", "service": "webdata-direct:linkedin-snap", "type": "table" }, { - "urn": "webdata-direct:linkedin-snap::https://connectors.tableau.com/linkedin/snap/1/snap.html/Sales Navigator Connection/Activity", + "urn": "urn:webdata-direct:linkedin-snap:https://connectors.tableau.com/linkedin/snap/1/snap.html:table:Sales Navigator Connection.Activity", "service": "webdata-direct:linkedin-snap", "type": "table" }, { - "urn": "webdata-direct:linkedin-snap::https://connectors.tableau.com/linkedin/snap/1/snap.html/Sales Navigator Connection/UniqueSeat", + "urn": "urn:webdata-direct:linkedin-snap:https://connectors.tableau.com/linkedin/snap/1/snap.html:table:Sales Navigator Connection.UniqueSeat", "service": "webdata-direct:linkedin-snap", "type": "table" } diff --git a/plugins/plugin.go b/plugins/plugin.go index 4f60bca04..3acb54ea4 100644 --- a/plugins/plugin.go +++ b/plugins/plugin.go @@ -4,7 +4,6 @@ import ( "context" "github.com/odpf/meteor/models" - "gopkg.in/yaml.v3" ) // PluginType is the type of plugin. @@ -27,16 +26,21 @@ type Info struct { Summary string `yaml:"summary"` } +type Config struct { + URNScope string + RawConfig map[string]interface{} +} + type Plugin interface { // Info returns plugin's information. Info() Info - // Validate checks if the given config is valid for the plugin. - Validate(config map[string]interface{}) error + // Validate checks if the given options is valid for the plugin. + Validate(config Config) error // Init will be called once before running the plugin. // This is where you want to initiate any client or test any connection to external service. - Init(ctx context.Context, config map[string]interface{}) error + Init(ctx context.Context, config Config) error } // Extractor is a plugin that extracts data from a source. @@ -59,12 +63,3 @@ type Syncer interface { // Close will be called once after everything is done Close() error } - -// ParseInfo parses the plugin's meta.yaml file and returns an plugin Info struct. -func ParseInfo(text string) (info Info, err error) { - err = yaml.Unmarshal([]byte(text), &info) - if err != nil { - return - } - return -} diff --git a/plugins/processors/enrich/processor.go b/plugins/processors/enrich/processor.go index ce0438fc2..50a7b1266 100644 --- a/plugins/processors/enrich/processor.go +++ b/plugins/processors/enrich/processor.go @@ -16,40 +16,39 @@ var summary string // Processor work in a list of data type Processor struct { + plugins.BasePlugin config map[string]interface{} logger log.Logger } -// New create a new processor -func New(logger log.Logger) *Processor { - return &Processor{ - logger: logger, - } -} - var sampleConfig = ` # Enrichment configuration # fieldA: valueA # fieldB: valueB` -// Info returns the plugin information -func (p *Processor) Info() plugins.Info { - return plugins.Info{ - Description: "Append custom fields to records", - SampleConfig: sampleConfig, - Summary: summary, - Tags: []string{"processor", "transform"}, - } +var info = plugins.Info{ + Description: "Append custom fields to records", + SampleConfig: sampleConfig, + Summary: summary, + Tags: []string{"processor", "transform"}, } -// Validate validates the plugin configuration -func (p *Processor) Validate(configMap map[string]interface{}) (err error) { - return nil +// New create a new processor +func New(logger log.Logger) *Processor { + p := &Processor{ + logger: logger, + } + p.BasePlugin = plugins.NewBasePlugin(info, &p.config) + + return p } // Process processes the data -func (p *Processor) Init(ctx context.Context, config map[string]interface{}) (err error) { - p.config = config +func (p *Processor) Init(ctx context.Context, config plugins.Config) (err error) { + if err = p.BasePlugin.Init(ctx, config); err != nil { + return err + } + return } diff --git a/plugins/sinks/compass/sink.go b/plugins/sinks/compass/sink.go index 5aca21a46..5b1b239ed 100644 --- a/plugins/sinks/compass/sink.go +++ b/plugins/sinks/compass/sink.go @@ -27,50 +27,48 @@ type Config struct { Labels map[string]string `mapstructure:"labels"` } -var sampleConfig = ` -# The hostname of the compass service -host: https://compass.com -# Additional HTTP headers send to compass, multiple headers value are separated by a comma -headers: - Compass-User-Email: meteor@odpf.io - X-Other-Header: value1, value2 -# The labels to pass as payload label of the patch api -labels: - myCustom: $properties.attributes.myCustomField - sampleLabel: $properties.labels.sampleLabelField -` +var info = plugins.Info{ + Description: "Send metadata to compass http service", + Summary: summary, + Tags: []string{"http", "sink"}, + SampleConfig: ` + # The hostname of the compass service + host: https://compass.com + # Additional HTTP headers send to compass, multiple headers value are separated by a comma + headers: + Compass-User-Email: meteor@odpf.io + X-Other-Header: value1, value2 + # The labels to pass as payload label of the patch api + labels: + myCustom: $properties.attributes.myCustomField + sampleLabel: $properties.labels.sampleLabelField + `, +} type httpClient interface { Do(*http.Request) (*http.Response, error) } type Sink struct { + plugins.BasePlugin client httpClient config Config logger log.Logger } func New(c httpClient, logger log.Logger) plugins.Syncer { - sink := &Sink{client: c, logger: logger} - return sink -} - -func (s *Sink) Info() plugins.Info { - return plugins.Info{ - Description: "Send metadata to compass http service", - SampleConfig: sampleConfig, - Summary: summary, - Tags: []string{"http", "sink"}, + s := &Sink{ + logger: logger, + client: c, } -} + s.BasePlugin = plugins.NewBasePlugin(info, &s.config) -func (s *Sink) Validate(configMap map[string]interface{}) (err error) { - return utils.BuildConfig(configMap, &Config{}) + return s } -func (s *Sink) Init(ctx context.Context, configMap map[string]interface{}) (err error) { - if err = utils.BuildConfig(configMap, &s.config); err != nil { - return plugins.InvalidConfigError{Type: plugins.PluginTypeSink} +func (s *Sink) Init(ctx context.Context, config plugins.Config) (err error) { + if err = s.BasePlugin.Init(ctx, config); err != nil { + return err } return diff --git a/plugins/sinks/compass/sink_test.go b/plugins/sinks/compass/sink_test.go index 9a0a5eff3..a5ec915e9 100644 --- a/plugins/sinks/compass/sink_test.go +++ b/plugins/sinks/compass/sink_test.go @@ -1,3 +1,6 @@ +//go:build plugins +// +build plugins + package compass_test import ( @@ -42,9 +45,9 @@ func TestInit(t *testing.T) { for i, config := range invalidConfigs { t.Run(fmt.Sprintf("test invalid config #%d", i+1), func(t *testing.T) { compassSink := compass.New(newMockHTTPClient(config, http.MethodPatch, url, compass.RequestPayload{}), testUtils.Logger) - err := compassSink.Init(context.TODO(), config) + err := compassSink.Init(context.TODO(), plugins.Config{RawConfig: config}) - assert.Equal(t, plugins.InvalidConfigError{Type: plugins.PluginTypeSink}, err) + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) }) } }) @@ -62,9 +65,9 @@ func TestSink(t *testing.T) { ctx := context.TODO() compassSink := compass.New(client, testUtils.Logger) - err := compassSink.Init(ctx, map[string]interface{}{ + err := compassSink.Init(ctx, plugins.Config{RawConfig: map[string]interface{}{ "host": host, - }) + }}) if err != nil { t.Fatal(err) } @@ -83,9 +86,9 @@ func TestSink(t *testing.T) { ctx := context.TODO() compassSink := compass.New(client, testUtils.Logger) - err := compassSink.Init(ctx, map[string]interface{}{ + err := compassSink.Init(ctx, plugins.Config{RawConfig: map[string]interface{}{ "host": host, - }) + }}) if err != nil { t.Fatal(err) } @@ -163,7 +166,7 @@ func TestSink(t *testing.T) { client.SetupResponse(200, "") ctx := context.TODO() compassSink := compass.New(client, testUtils.Logger) - err := compassSink.Init(ctx, c) + err := compassSink.Init(ctx, plugins.Config{RawConfig: c}) if err != nil { t.Fatal(err) } @@ -456,7 +459,7 @@ func TestSink(t *testing.T) { ctx := context.TODO() compassSink := compass.New(client, testUtils.Logger) - err := compassSink.Init(ctx, tc.config) + err := compassSink.Init(ctx, plugins.Config{RawConfig: tc.config}) if err != nil { t.Fatal(err) } diff --git a/plugins/sinks/console/sink.go b/plugins/sinks/console/sink.go index a757d6527..a08c92f45 100644 --- a/plugins/sinks/console/sink.go +++ b/plugins/sinks/console/sink.go @@ -15,28 +15,31 @@ import ( //go:embed README.md var summary string -type Sink struct { - logger log.Logger +var info = plugins.Info{ + Description: "Log to standard output", + SampleConfig: "", + Summary: summary, + Tags: []string{"log", "sink"}, } -func New() plugins.Syncer { - return new(Sink) +type Sink struct { + plugins.BasePlugin + logger log.Logger } -func (s *Sink) Info() plugins.Info { - return plugins.Info{ - Description: "Log to standard output", - SampleConfig: "", - Summary: summary, - Tags: []string{"log", "sink"}, +func New(logger log.Logger) plugins.Syncer { + s := &Sink{ + logger: logger, } -} + s.BasePlugin = plugins.NewBasePlugin(info, nil) -func (s *Sink) Validate(configMap map[string]interface{}) (err error) { - return nil + return s } -func (s *Sink) Init(ctx context.Context, config map[string]interface{}) (err error) { +func (s *Sink) Init(ctx context.Context, config plugins.Config) (err error) { + if err = s.BasePlugin.Init(ctx, config); err != nil { + return err + } return } diff --git a/plugins/sinks/file/file.go b/plugins/sinks/file/file.go index 2b02f8e37..bb6acc040 100644 --- a/plugins/sinks/file/file.go +++ b/plugins/sinks/file/file.go @@ -10,7 +10,6 @@ import ( "github.com/odpf/meteor/models" "github.com/odpf/meteor/plugins" "github.com/odpf/meteor/registry" - "github.com/odpf/meteor/utils" "github.com/odpf/salt/log" ndjson "github.com/scizorman/go-ndjson" "gopkg.in/yaml.v3" @@ -25,38 +24,36 @@ type Config struct { Format string `mapstructure:"format" validate:"required"` } -var sampleConfig = ` -path: ./output-filename.txt -format: ndjson -` +var info = plugins.Info{ + Description: "save output to a file", + Summary: summary, + Tags: []string{"file", "json", "yaml", "sink"}, + SampleConfig: ` + path: ./output-filename.txt + format: ndjson + `, +} type Sink struct { + plugins.BasePlugin logger log.Logger config Config format string File *os.File } -func New() plugins.Syncer { - return new(Sink) -} - -func (s *Sink) Info() plugins.Info { - return plugins.Info{ - Description: "save output to a file", - SampleConfig: sampleConfig, - Summary: summary, - Tags: []string{"file", "json", "yaml", "sink"}, +func New(logger log.Logger) plugins.Syncer { + s := &Sink{ + logger: logger, } -} + s.BasePlugin = plugins.NewBasePlugin(info, &s.config) -func (s *Sink) Validate(configMap map[string]interface{}) (err error) { - return utils.BuildConfig(configMap, &Config{}) + return s } -func (s *Sink) Init(ctx context.Context, config map[string]interface{}) (err error) { - if err := utils.BuildConfig(config, &s.config); err != nil { - return plugins.InvalidConfigError{Type: "sink", PluginName: "file"} +func (s *Sink) Init(ctx context.Context, config plugins.Config) (err error) { + if err = s.BasePlugin.Init(ctx, config); err != nil { + return err } if err := s.validateFilePath(s.config.Path); err != nil { @@ -130,16 +127,14 @@ func (s *Sink) validateFilePath(path string) error { filename := dirs[len(dirs)-1] format := strings.Split(filename, ".") if len(format) != 2 { - return fmt.Errorf("invalid filename") + return fmt.Errorf("invalid filename for \"%s\"", path) } return nil } func init() { if err := registry.Sinks.Register("file", func() plugins.Syncer { - return &Sink{ - logger: plugins.GetLog(), - } + return New(plugins.GetLog()) }); err != nil { panic(err) } diff --git a/plugins/sinks/file/file_test.go b/plugins/sinks/file/file_test.go index 4873c320f..6a8ac58cf 100644 --- a/plugins/sinks/file/file_test.go +++ b/plugins/sinks/file/file_test.go @@ -1,3 +1,6 @@ +//go:build plugins +// +build plugins + package file_test import ( @@ -11,12 +14,10 @@ import ( assetsv1beta1 "github.com/odpf/meteor/models/odpf/assets/v1beta1" "github.com/odpf/meteor/plugins" f "github.com/odpf/meteor/plugins/sinks/file" + testUtils "github.com/odpf/meteor/test/utils" "github.com/stretchr/testify/assert" ) -//go:embed README.md -var summary string - var validConfig = map[string]interface{}{ "path": "./test-dir/sample.ndjson", "format": "ndjson", @@ -25,8 +26,8 @@ var validConfig = map[string]interface{}{ func TestValidate(t *testing.T) { t.Run("should return error on invalid config", func(t *testing.T) { invalidConfig := map[string]interface{}{} - fileSink := f.New() - err := fileSink.Validate(invalidConfig) + fileSink := f.New(testUtils.Logger) + err := fileSink.Validate(plugins.Config{RawConfig: invalidConfig}) assert.Error(t, err) }) } @@ -34,22 +35,22 @@ func TestValidate(t *testing.T) { func TestInit(t *testing.T) { t.Run("should return InvalidConfigError on invalid config", func(t *testing.T) { invalidConfig := map[string]interface{}{} - fileSink := f.New() - err := fileSink.Init(context.TODO(), invalidConfig) - assert.Equal(t, plugins.InvalidConfigError{Type: "sink", PluginName: "file"}, err) + fileSink := f.New(testUtils.Logger) + err := fileSink.Init(context.TODO(), plugins.Config{RawConfig: invalidConfig}) + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) }) - t.Run("should return error on filename missing", func(t *testing.T) { + t.Run("should return error if file is not found ", func(t *testing.T) { invalidConfig := map[string]interface{}{ "path": "./some-dir", "format": "ndjson", } - fileSink := f.New() - err := fileSink.Init(context.TODO(), invalidConfig) + fileSink := f.New(testUtils.Logger) + err := fileSink.Init(context.TODO(), plugins.Config{RawConfig: invalidConfig}) assert.Error(t, err) }) t.Run("should return no error on valid config", func(t *testing.T) { - fileSink := f.New() - err := fileSink.Init(context.TODO(), validConfig) + fileSink := f.New(testUtils.Logger) + err := fileSink.Init(context.TODO(), plugins.Config{RawConfig: validConfig}) assert.NoError(t, err) }) } @@ -82,19 +83,14 @@ func TestMain(t *testing.T) { }) } -func TestInfo(t *testing.T) { - info := f.New().Info() - assert.Equal(t, summary, info.Summary) -} - func sinkInvalidPath(t *testing.T, config map[string]interface{}) error { - fileSink := f.New() - return fileSink.Init(context.TODO(), config) + fileSink := f.New(testUtils.Logger) + return fileSink.Init(context.TODO(), plugins.Config{RawConfig: config}) } func sinkValidSetup(t *testing.T, config map[string]interface{}) error { - fileSink := f.New() - err := fileSink.Init(context.TODO(), config) + fileSink := f.New(testUtils.Logger) + err := fileSink.Init(context.TODO(), plugins.Config{RawConfig: config}) assert.NoError(t, err) err = fileSink.Sink(context.TODO(), getExpectedVal()) assert.NoError(t, err) diff --git a/plugins/sinks/http/http.go b/plugins/sinks/http/http.go index 9ef4b9d8f..a80d74a3a 100644 --- a/plugins/sinks/http/http.go +++ b/plugins/sinks/http/http.go @@ -13,7 +13,6 @@ import ( "github.com/odpf/meteor/models" "github.com/odpf/meteor/plugins" "github.com/odpf/meteor/registry" - "github.com/odpf/meteor/utils" "github.com/odpf/salt/log" "github.com/pkg/errors" ) @@ -28,46 +27,45 @@ type Config struct { SuccessCode int `mapstructure:"success_code" default:"200"` } -var sampleConfig = ` -# The url (hostname and route) of the http service -url: https://compass.com/route -# Additional HTTP headers, multiple headers value are separated by a comma -headers: - X-Other-Header: value1, value2 -` +var info = plugins.Info{ + Description: "Send metadata to http service", + Summary: summary, + Tags: []string{"http", "sink"}, + SampleConfig: ` + # The url (hostname and route) of the http service + url: https://compass.com/route + # Additional HTTP headers, multiple headers value are separated by a comma + headers: + X-Other-Header: value1, value2 + `, +} type httpClient interface { Do(*http.Request) (*http.Response, error) } type Sink struct { + plugins.BasePlugin client httpClient config Config logger log.Logger } func New(c httpClient, logger log.Logger) plugins.Syncer { - sink := &Sink{client: c, logger: logger} - return sink -} - -func (s *Sink) Info() plugins.Info { - return plugins.Info{ - Description: "Send metadata to http service", - SampleConfig: sampleConfig, - Summary: summary, - Tags: []string{"http", "sink"}, + s := &Sink{ + logger: logger, + client: c, } -} + s.BasePlugin = plugins.NewBasePlugin(info, &s.config) -func (s *Sink) Validate(configMap map[string]interface{}) (err error) { - return utils.BuildConfig(configMap, &s.config) + return s } -func (s *Sink) Init(ctx context.Context, configMap map[string]interface{}) (err error) { - if err = s.Validate(configMap); err != nil { - return plugins.InvalidConfigError{Type: plugins.PluginTypeSink, PluginName: "http"} +func (s *Sink) Init(ctx context.Context, config plugins.Config) (err error) { + if err = s.BasePlugin.Init(ctx, config); err != nil { + return err } + return } diff --git a/plugins/sinks/http/http_test.go b/plugins/sinks/http/http_test.go index 5ce454bca..a2a0ce758 100644 --- a/plugins/sinks/http/http_test.go +++ b/plugins/sinks/http/http_test.go @@ -1,3 +1,6 @@ +//go:build plugins +// +build plugins + package http_test import ( @@ -7,7 +10,6 @@ import ( "net/http" "testing" - "github.com/alecthomas/assert" "github.com/dnaeon/go-vcr/v2/recorder" "github.com/odpf/meteor/models" commonv1beta1 "github.com/odpf/meteor/models/odpf/assets/common/v1beta1" @@ -17,6 +19,7 @@ import ( h "github.com/odpf/meteor/plugins/sinks/http" testutils "github.com/odpf/meteor/test/utils" "github.com/pkg/errors" + "github.com/stretchr/testify/assert" ) //go:embed README.md @@ -35,8 +38,8 @@ func TestSink(t *testing.T) { "Accept": "application/json", }, } - err := httpSink.Init(context.TODO(), config) - assert.Equal(t, err, plugins.InvalidConfigError{Type: plugins.PluginTypeSink, PluginName: "http"}) + err := httpSink.Init(context.TODO(), plugins.Config{RawConfig: config}) + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) }) t.Run("should return no error for valid config, without optional values", func(t *testing.T) { @@ -45,7 +48,7 @@ func TestSink(t *testing.T) { "url": "http://sitename.com", "method": "POST", } - err := httpSink.Init(context.TODO(), config) + err := httpSink.Init(context.TODO(), plugins.Config{RawConfig: config}) assert.NoError(t, err) }) @@ -70,7 +73,7 @@ func TestSink(t *testing.T) { "Accept": "application/json", }, } - err = httpSink.Init(context.TODO(), config) + err = httpSink.Init(context.TODO(), plugins.Config{RawConfig: config}) assert.NoError(t, err) defer httpSink.Close() err = httpSink.Sink(context.TODO(), getExpectedVal()) @@ -78,7 +81,7 @@ func TestSink(t *testing.T) { // change value of url in config config["url"] = "https://random-incorrect-url.odpf.com" - err = httpSink.Init(context.TODO(), config) + err = httpSink.Init(context.TODO(), plugins.Config{RawConfig: config}) assert.NoError(t, err) err = httpSink.Sink(context.TODO(), getExpectedVal()) assert.Error(t, err) @@ -86,7 +89,7 @@ func TestSink(t *testing.T) { // change value of method in config config["method"] = "RANDOM" config["url"] = "http://127.0.0.1:54927" - err = httpSink.Init(context.TODO(), config) + err = httpSink.Init(context.TODO(), plugins.Config{RawConfig: config}) assert.NoError(t, err) err = httpSink.Sink(context.TODO(), getExpectedVal()) assert.Error(t, err) @@ -116,7 +119,7 @@ func TestSink(t *testing.T) { "Accept": "application/json", }, } - err = httpSink.Init(context.TODO(), config) + err = httpSink.Init(context.TODO(), plugins.Config{RawConfig: config}) assert.NoError(t, err) defer httpSink.Close() err = httpSink.Sink(context.TODO(), getExpectedVal()) @@ -147,7 +150,7 @@ func TestSink(t *testing.T) { "Accept": "application/json", }, } - err = httpSink.Init(context.TODO(), config) + err = httpSink.Init(context.TODO(), plugins.Config{RawConfig: config}) assert.NoError(t, err) defer httpSink.Close() err = httpSink.Sink(context.TODO(), getExpectedVal()) diff --git a/plugins/sinks/kafka/sink.go b/plugins/sinks/kafka/sink.go index b2bba9edd..acb8f53ac 100644 --- a/plugins/sinks/kafka/sink.go +++ b/plugins/sinks/kafka/sink.go @@ -12,7 +12,7 @@ import ( "github.com/odpf/meteor/models" "github.com/odpf/meteor/plugins" "github.com/odpf/meteor/registry" - "github.com/odpf/meteor/utils" + "github.com/odpf/salt/log" "github.com/pkg/errors" kafka "github.com/segmentio/kafka-go" "google.golang.org/protobuf/proto" @@ -27,42 +27,41 @@ type Config struct { KeyPath string `mapstructure:"key_path"` } -var sampleConfig = ` - # Kafka broker addresses - brokers: "localhost:9092" - # The Kafka topic to write to - topic: sample-topic-name - # The path to the key field in the payload - key_path: xxx` +var info = plugins.Info{ + Description: "Sink metadata to Apache Kafka topic", + Summary: summary, + Tags: []string{"kafka", "topic", "sink"}, + SampleConfig: ` + # Kafka broker addresses + brokers: "localhost:9092" + # The Kafka topic to write to + topic: sample-topic-name + # The path to the key field in the payload + key_path: xxx`, +} type ProtoReflector interface { ProtoReflect() protoreflect.Message } type Sink struct { + plugins.BasePlugin writer *kafka.Writer config Config + logger log.Logger } -func New() plugins.Syncer { - return new(Sink) -} - -func (s *Sink) Info() plugins.Info { - return plugins.Info{ - Description: "Sink metadata to Apache Kafka topic", - Summary: summary, - SampleConfig: sampleConfig, - Tags: []string{"kafka", "topic", "sink"}, +func New(logger log.Logger) plugins.Syncer { + s := &Sink{ + logger: logger, } -} + s.BasePlugin = plugins.NewBasePlugin(info, &s.config) -func (s *Sink) Validate(configMap map[string]interface{}) (err error) { - return utils.BuildConfig(configMap, &Config{}) + return s } -func (s *Sink) Init(ctx context.Context, configMap map[string]interface{}) (err error) { - if err := utils.BuildConfig(configMap, &s.config); err != nil { +func (s *Sink) Init(ctx context.Context, config plugins.Config) (err error) { + if err = s.BasePlugin.Init(ctx, config); err != nil { return err } @@ -198,7 +197,7 @@ func createWriter(config Config) *kafka.Writer { func init() { if err := registry.Sinks.Register("kafka", func() plugins.Syncer { - return &Sink{} + return New(plugins.GetLog()) }); err != nil { panic(err) } diff --git a/plugins/sinks/stencil/stencil.go b/plugins/sinks/stencil/stencil.go index 3d33f7354..7fada8af6 100644 --- a/plugins/sinks/stencil/stencil.go +++ b/plugins/sinks/stencil/stencil.go @@ -14,7 +14,6 @@ import ( assetsv1beta1 "github.com/odpf/meteor/models/odpf/assets/v1beta1" "github.com/odpf/meteor/plugins" "github.com/odpf/meteor/registry" - "github.com/odpf/meteor/utils" "github.com/odpf/salt/log" "github.com/pkg/errors" ) @@ -29,14 +28,19 @@ type Config struct { Format string `mapstructure:"format" validate:"oneof=json avro" default:"json"` } -var sampleConfig = ` -# The hostname of the stencil service -host: https://stencil.com -# The namespace ID of the stencil service -namespace_id: myNamespace -# The schema format in which data will sink to stencil -format: avro -` +var info = plugins.Info{ + Description: "Send metadata to stencil http service", + Summary: summary, + Tags: []string{"http", "sink"}, + SampleConfig: ` + # The hostname of the stencil service + host: https://stencil.com + # The namespace ID of the stencil service + namespace_id: myNamespace + # The schema format in which data will sink to stencil + format: avro + `, +} // httpClient holds the set of methods require for creating request type httpClient interface { @@ -45,6 +49,7 @@ type httpClient interface { // Sink manages the sinking of data to Stencil type Sink struct { + plugins.BasePlugin client httpClient config Config logger log.Logger @@ -52,29 +57,19 @@ type Sink struct { // New returns a pointer to an initialized Sink Object func New(c httpClient, logger log.Logger) plugins.Syncer { - sink := &Sink{client: c, logger: logger} - return sink -} - -// Info returns the brief information about the sink -func (s *Sink) Info() plugins.Info { - return plugins.Info{ - Description: "Send metadata to stencil http service", - SampleConfig: sampleConfig, - Summary: summary, - Tags: []string{"http", "sink"}, + s := &Sink{ + logger: logger, + client: c, } -} + s.BasePlugin = plugins.NewBasePlugin(info, &s.config) -// Validate validates the configuration of the sink -func (s *Sink) Validate(configMap map[string]interface{}) (err error) { - return utils.BuildConfig(configMap, &Config{}) + return s } // Init initializes the sink -func (s *Sink) Init(_ context.Context, configMap map[string]interface{}) (err error) { - if err = utils.BuildConfig(configMap, &s.config); err != nil { - return plugins.InvalidConfigError{Type: plugins.PluginTypeSink} +func (s *Sink) Init(ctx context.Context, config plugins.Config) (err error) { + if err = s.BasePlugin.Init(ctx, config); err != nil { + return err } return diff --git a/plugins/sinks/stencil/stencil_test.go b/plugins/sinks/stencil/stencil_test.go index 9cced537e..8d4e1383e 100644 --- a/plugins/sinks/stencil/stencil_test.go +++ b/plugins/sinks/stencil/stencil_test.go @@ -1,3 +1,6 @@ +//go:build plugins +// +build plugins + package stencil_test import ( @@ -40,9 +43,9 @@ func TestInit(t *testing.T) { for i, config := range invalidConfigs { t.Run(fmt.Sprintf("test invalid config #%d", i+1), func(t *testing.T) { stencilSink := stencil.New(newMockHTTPClient(config, http.MethodPost, url, stencil.JsonSchema{}), testUtils.Logger) - err := stencilSink.Init(context.TODO(), config) + err := stencilSink.Init(context.TODO(), plugins.Config{RawConfig: config}) - assert.Equal(t, plugins.InvalidConfigError{Type: plugins.PluginTypeSink}, err) + assert.ErrorAs(t, err, &plugins.InvalidConfigError{}) }) } }) @@ -60,11 +63,11 @@ func TestSink(t *testing.T) { ctx := context.TODO() stencilSink := stencil.New(client, testUtils.Logger) - err := stencilSink.Init(ctx, map[string]interface{}{ + err := stencilSink.Init(ctx, plugins.Config{RawConfig: map[string]interface{}{ "host": host, "namespace_id": namespaceID, "format": "json", - }) + }}) if err != nil { t.Fatal(err) } @@ -83,11 +86,11 @@ func TestSink(t *testing.T) { ctx := context.TODO() stencilSink := stencil.New(client, testUtils.Logger) - err := stencilSink.Init(ctx, map[string]interface{}{ + err := stencilSink.Init(ctx, plugins.Config{RawConfig: map[string]interface{}{ "host": host, "namespace_id": namespaceID, "format": "json", - }) + }}) if err != nil { t.Fatal(err) } @@ -351,7 +354,7 @@ func TestSink(t *testing.T) { ctx := context.TODO() stencilSink := stencil.New(client, testUtils.Logger) - err := stencilSink.Init(ctx, tc.config) + err := stencilSink.Init(ctx, plugins.Config{RawConfig: tc.config}) if err != nil { t.Fatal(err) } @@ -558,7 +561,7 @@ func TestSink(t *testing.T) { ctx := context.TODO() stencilSink := stencil.New(client, testUtils.Logger) - err := stencilSink.Init(ctx, tc.config) + err := stencilSink.Init(ctx, plugins.Config{RawConfig: tc.config}) if err != nil { t.Fatal(err) } diff --git a/plugins/util.go b/plugins/util.go new file mode 100644 index 000000000..949c2df31 --- /dev/null +++ b/plugins/util.go @@ -0,0 +1,61 @@ +package plugins + +import ( + "errors" + "fmt" + "reflect" + "strings" + + "github.com/go-playground/validator/v10" + "github.com/mcuadros/go-defaults" + "github.com/mitchellh/mapstructure" + "github.com/odpf/meteor/models" +) + +var validate *validator.Validate + +func init() { + validate = validator.New() + validate.RegisterTagNameFunc(func(fld reflect.StructField) string { + configName := strings.SplitN(fld.Tag.Get("mapstructure"), ",", 2)[0] + + if configName == "-" { + return "" + } + return configName + }) +} + +// BuildConfig builds a config struct from a map +func buildConfig(configMap map[string]interface{}, c interface{}) (err error) { + defaults.SetDefaults(c) + + if err = mapstructure.Decode(configMap, c); err != nil { + return err + } + if err = validate.Struct(c); err == nil { + return nil + } + + var validationErr validator.ValidationErrors + if errors.As(err, &validationErr) { + var configErrors []ConfigError + for _, fieldErr := range validationErr { + key := strings.TrimPrefix(fieldErr.Namespace(), "Config.") + configErrors = append(configErrors, ConfigError{ + Key: key, + Message: fieldErr.Error(), + }) + } + return InvalidConfigError{ + Errors: configErrors, + } + } + + return err +} + +func BigQueryURN(projectID, datasetID, tableID string) string { + fqn := fmt.Sprintf("%s:%s.%s", projectID, datasetID, tableID) + return models.NewURN("bigquery", projectID, "table", fqn) +} diff --git a/plugins/util_test.go b/plugins/util_test.go new file mode 100644 index 000000000..1e39cabee --- /dev/null +++ b/plugins/util_test.go @@ -0,0 +1,21 @@ +package plugins_test + +import ( + "testing" + + "github.com/odpf/meteor/plugins" + "github.com/stretchr/testify/assert" +) + +func TestBigQueryURN(t *testing.T) { + t.Run("should create bigquery URN", func(t *testing.T) { + project := "my-project" + dataset := "my-dataset" + table := "my-table" + + actual := plugins.BigQueryURN(project, dataset, table) + expected := "urn:bigquery:my-project:table:my-project:my-dataset.my-table" + + assert.Equal(t, expected, actual) + }) +} diff --git a/recipe/node.go b/recipe/node.go index c1bf72a40..f0bb38b39 100644 --- a/recipe/node.go +++ b/recipe/node.go @@ -20,6 +20,7 @@ type RecipeNode struct { type PluginNode struct { Name yaml.Node `json:"name" yaml:"name"` Type yaml.Node `json:"type" yaml:"type"` + Scope yaml.Node `json:"scope" yaml:"scope"` Config map[string]yaml.Node `json:"config" yaml:"config"` } @@ -60,11 +61,13 @@ func (node RecipeNode) toRecipe() (recipe Recipe, err error) { err = fmt.Errorf("error building sinks :%w", err) return } + recipe = Recipe{ Name: node.Name.Value, Version: node.Version.Value, Source: PluginRecipe{ Name: node.Source.Name.Value, + Scope: node.Source.Scope.Value, Config: sourceConfig, Node: node.Source, }, diff --git a/recipe/reader_test.go b/recipe/reader_test.go index 50ea5262a..278272ac7 100644 --- a/recipe/reader_test.go +++ b/recipe/reader_test.go @@ -42,7 +42,8 @@ func TestReaderRead(t *testing.T) { { Name: "test-recipe", Source: recipe.PluginRecipe{ - Name: "test-source", + Name: "test-source", + Scope: "my-scope", Config: map[string]interface{}{ "foo": "bar", }, @@ -73,7 +74,8 @@ func TestReaderRead(t *testing.T) { Name: "test-recipe-no-name", Version: "v1beta1", Source: recipe.PluginRecipe{ - Name: "test-source", + Name: "test-source", + Scope: "my-scope", Config: map[string]interface{}{ "foo": "bar", }, @@ -110,7 +112,8 @@ func TestReaderRead(t *testing.T) { { Name: "test-recipe", Source: recipe.PluginRecipe{ - Name: "test-source", + Name: "test-source", + Scope: "my-scope", Config: map[string]interface{}{ "username": username, "password": password, @@ -165,7 +168,8 @@ func TestReaderRead(t *testing.T) { { Name: "test-recipe-no-name", Source: recipe.PluginRecipe{ - Name: "test-source", + Name: "test-source", + Scope: "my-scope", Config: map[string]interface{}{ "foo": "bar", }, @@ -180,7 +184,8 @@ func TestReaderRead(t *testing.T) { { Name: "test-recipe", Source: recipe.PluginRecipe{ - Name: "test-source", + Name: "test-source", + Scope: "my-scope", Config: map[string]interface{}{ "username": username, "password": password, @@ -202,7 +207,8 @@ func TestReaderRead(t *testing.T) { { Name: "test-recipe", Source: recipe.PluginRecipe{ - Name: "test-source", + Name: "test-source", + Scope: "my-scope", Config: map[string]interface{}{ "foo": "bar", }, @@ -232,7 +238,8 @@ func TestReaderRead(t *testing.T) { expected := recipe.Recipe{ Name: "test-recipe", Source: recipe.PluginRecipe{ - Name: "test-source", + Name: "test-source", + Scope: "my-scope", Config: map[string]interface{}{ "username": username, "password": password, @@ -263,7 +270,8 @@ func TestReaderRead(t *testing.T) { expected := recipe.Recipe{ Name: "test-recipe", Source: recipe.PluginRecipe{ - Name: "test-source", + Name: "test-source", + Scope: "my-scope", Config: map[string]interface{}{ "username": username, "password": password, @@ -304,6 +312,7 @@ func compareRecipes(t *testing.T, expected, actual recipe.Recipe) { assert.Equal(t, len(expected.Processors), len(actual.Processors)) assert.Equal(t, expected.Source.Name, actual.Source.Name) + assert.Equal(t, expected.Source.Scope, actual.Source.Scope) assert.Equal(t, expected.Source.Config, actual.Source.Config) for i := range actual.Sinks { assert.Equal(t, expected.Sinks[i].Name, actual.Sinks[i].Name) diff --git a/recipe/recipe.go b/recipe/recipe.go index d7dd18829..b342d9a6a 100644 --- a/recipe/recipe.go +++ b/recipe/recipe.go @@ -14,6 +14,7 @@ type Recipe struct { // generating the plugins code for a recipe. type PluginRecipe struct { Name string `json:"name" yaml:"name" validate:"required"` + Scope string `json:"scope" yaml:"scope"` Config map[string]interface{} `json:"config" yaml:"config"` Node PluginNode } diff --git a/recipe/recipe_test.go b/recipe/recipe_test.go index f75d11b19..725fdc42e 100644 --- a/recipe/recipe_test.go +++ b/recipe/recipe_test.go @@ -10,7 +10,7 @@ import ( "github.com/stretchr/testify/require" ) -var testLog = log.NewLogrus(log.LogrusWithLevel("info")) +var testLog = log.NewNoop() // TestRecipeGetLine tests recipe by line number func TestRecipeGetLine(t *testing.T) { @@ -26,7 +26,7 @@ func TestRecipeGetLine(t *testing.T) { }) t.Run("should return config source lines", func(t *testing.T) { - expectedLineNum := []int{6, 7, 8} + expectedLineNum := []int{7, 8, 9} var lineNum []int srcConfig := rcp.Source.Node.Config for _, j := range srcConfig { @@ -37,24 +37,24 @@ func TestRecipeGetLine(t *testing.T) { }) t.Run("should return config source line for a specific config key", func(t *testing.T) { - expectedLineNum := 7 + expectedLineNum := 8 srcConfigKey := rcp.Source.Node.Config["srcKey2"] assert.Equal(t, expectedLineNum, srcConfigKey.Line) }) t.Run("should return processors line and column", func(t *testing.T) { - assert.Equal(t, 10, rcp.Processors[0].Node.Name.Line) + assert.Equal(t, 11, rcp.Processors[0].Node.Name.Line) assert.Equal(t, 11, rcp.Processors[0].Node.Name.Column) - assert.Equal(t, 15, rcp.Processors[1].Node.Name.Line) + assert.Equal(t, 16, rcp.Processors[1].Node.Name.Line) assert.Equal(t, 11, rcp.Processors[1].Node.Name.Column) }) t.Run("should return sinks line and column", func(t *testing.T) { - assert.Equal(t, 21, rcp.Sinks[0].Node.Name.Line) + assert.Equal(t, 22, rcp.Sinks[0].Node.Name.Line) assert.Equal(t, 11, rcp.Sinks[0].Node.Name.Column) - assert.Equal(t, 26, rcp.Sinks[1].Node.Name.Line) + assert.Equal(t, 27, rcp.Sinks[1].Node.Name.Line) assert.Equal(t, 11, rcp.Sinks[1].Node.Name.Column) }) } @@ -62,7 +62,7 @@ func TestRecipeGetLine(t *testing.T) { // TestRecipeGetLineBySrcTypeTag tests recipe source with tag `type` by line number func TestRecipeGetLineBySrcTypeTag(t *testing.T) { reader := recipe.NewReader(testLog, "") - r, err := reader.Read("./testdata/src- typeTag-recipe-read-line.yaml") + r, err := reader.Read("./testdata/src-typeTag-recipe-read-line.yaml") require.NoError(t, err) require.Len(t, r, 1) rcp := r[0] @@ -73,7 +73,7 @@ func TestRecipeGetLineBySrcTypeTag(t *testing.T) { }) t.Run("should return config source lines", func(t *testing.T) { - expectedLineNum := []int{6, 7, 8} + expectedLineNum := []int{7, 8, 9} var lineNum []int srcConfig := rcp.Source.Node.Config for _, j := range srcConfig { @@ -84,7 +84,7 @@ func TestRecipeGetLineBySrcTypeTag(t *testing.T) { }) t.Run("should return config source line for a specific config key", func(t *testing.T) { - expectedLineNum := 7 + expectedLineNum := 8 srcConfigKey := rcp.Source.Node.Config["srcKey2"] assert.Equal(t, expectedLineNum, srcConfigKey.Line) }) diff --git a/recipe/testdata/dir_2/missing-version.yaml b/recipe/testdata/dir_2/missing-version.yaml index 0bc91660d..73d542016 100644 --- a/recipe/testdata/dir_2/missing-version.yaml +++ b/recipe/testdata/dir_2/missing-version.yaml @@ -1,6 +1,7 @@ name: test-recipe source: name: test-source + scope: my-scope config: foo: bar sinks: diff --git a/recipe/testdata/incorrect-version.yaml b/recipe/testdata/incorrect-version.yaml index 873adcde1..5dcae3eca 100644 --- a/recipe/testdata/incorrect-version.yaml +++ b/recipe/testdata/incorrect-version.yaml @@ -2,6 +2,7 @@ name: recipe-three version: v1alpha0 source: type: kafka + scope: my-scope config: broker: "main-broker.com:9092" sinks: diff --git a/recipe/testdata/missing-version.yaml b/recipe/testdata/missing-version.yaml index 687464c42..c837932b7 100644 --- a/recipe/testdata/missing-version.yaml +++ b/recipe/testdata/missing-version.yaml @@ -1,6 +1,7 @@ name: recipe-three source: type: kafka + scope: my-scope config: broker: "main-broker.com:9092" sinks: diff --git a/recipe/testdata/recipe-read-line.yaml b/recipe/testdata/recipe-read-line.yaml index ccf2945eb..555c84dc7 100644 --- a/recipe/testdata/recipe-read-line.yaml +++ b/recipe/testdata/recipe-read-line.yaml @@ -2,6 +2,7 @@ name: recipe-read-line version: v1beta1 source: name: srcA + scope: my-scope config: srcKey1: srcVal1 srcKey2: srcVal2 diff --git a/recipe/testdata/src- typeTag-recipe-read-line.yaml b/recipe/testdata/src-typeTag-recipe-read-line.yaml similarity index 93% rename from recipe/testdata/src- typeTag-recipe-read-line.yaml rename to recipe/testdata/src-typeTag-recipe-read-line.yaml index 92b638a35..36c5e6afd 100644 --- a/recipe/testdata/src- typeTag-recipe-read-line.yaml +++ b/recipe/testdata/src-typeTag-recipe-read-line.yaml @@ -2,6 +2,7 @@ name: recipe-read-line version: v1beta1 source: type: srcA + scope: my-scope config: srcKey1: srcVal1 srcKey2: srcVal2 diff --git a/recipe/testdata/testdir/test-recipe-no-name.yaml b/recipe/testdata/testdir/test-recipe-no-name.yaml index e7951b6ba..909b12de9 100644 --- a/recipe/testdata/testdir/test-recipe-no-name.yaml +++ b/recipe/testdata/testdir/test-recipe-no-name.yaml @@ -1,6 +1,7 @@ version: v1beta1 source: name: test-source + scope: my-scope config: foo: bar sinks: diff --git a/recipe/testdata/testdir/test-recipe-variables.yaml b/recipe/testdata/testdir/test-recipe-variables.yaml index eaf8d171e..7dbd7e2bd 100644 --- a/recipe/testdata/testdir/test-recipe-variables.yaml +++ b/recipe/testdata/testdir/test-recipe-variables.yaml @@ -2,6 +2,7 @@ name: test-recipe version: v1beta1 source: name: test-source + scope: my-scope config: username: {{.source_username}} password: "{{.source_password}}" diff --git a/recipe/testdata/testdir/test-recipe.yaml b/recipe/testdata/testdir/test-recipe.yaml index ced651e47..3684fc008 100644 --- a/recipe/testdata/testdir/test-recipe.yaml +++ b/recipe/testdata/testdir/test-recipe.yaml @@ -2,6 +2,7 @@ name: test-recipe version: v1beta1 source: name: test-source + scope: my-scope config: foo: bar sinks: diff --git a/test/mocks/plugin.go b/test/mocks/plugin.go index dafa107ba..1a6aed0ab 100644 --- a/test/mocks/plugin.go +++ b/test/mocks/plugin.go @@ -17,12 +17,12 @@ func (m *Plugin) Info() plugins.Info { return args.Get(0).(plugins.Info) } -func (m *Plugin) Validate(config map[string]interface{}) error { +func (m *Plugin) Validate(config plugins.Config) error { args := m.Called(config) return args.Error(0) } -func (m *Plugin) Init(ctx context.Context, config map[string]interface{}) error { +func (m *Plugin) Init(ctx context.Context, config plugins.Config) error { args := m.Called(ctx, config) return args.Error(0) }