From 8dedad534c551cde29b185150954cbf8dbb52005 Mon Sep 17 00:00:00 2001 From: Dery Rahman Ahaddienata Date: Tue, 22 Oct 2024 10:30:20 +0700 Subject: [PATCH 1/5] feat: add upstream identifier for maxcompute --- plugin/plugin_service.go | 30 +-- plugin/plugin_service_test.go | 40 +++- .../bq_upstream_identifier.go | 3 +- .../bq_upstream_identifier_test.go | 19 +- .../maxcompute_upstream_identifier.go | 48 +++++ .../maxcompute_upstream_identifier_test.go | 3 + .../parser/{bq_parser.go => query_parser.go} | 61 +++--- ...bq_parser_test.go => query_parser_test.go} | 177 +++++++++--------- .../parser/urn_decorator.go | 38 ++++ .../upstream_identifier.go | 8 + sdk/plugin/plugin.go | 3 +- 11 files changed, 280 insertions(+), 150 deletions(-) create mode 100644 plugin/upstream_identifier/maxcompute_upstream_identifier.go create mode 100644 plugin/upstream_identifier/maxcompute_upstream_identifier_test.go rename plugin/upstream_identifier/parser/{bq_parser.go => query_parser.go} (57%) rename plugin/upstream_identifier/parser/{bq_parser_test.go => query_parser_test.go} (55%) create mode 100644 plugin/upstream_identifier/parser/urn_decorator.go diff --git a/plugin/plugin_service.go b/plugin/plugin_service.go index ae5a782a5c..79c1083568 100644 --- a/plugin/plugin_service.go +++ b/plugin/plugin_service.go @@ -35,6 +35,7 @@ type EvaluatorFactory interface { type UpstreamIdentifierFactory interface { GetBQUpstreamIdentifier(ctx context.Context, svcAcc string, evaluators ...evaluator.Evaluator) (upstreamidentifier.UpstreamIdentifier, error) + GetMaxcomputeUpstreamIdentifier(ctx context.Context, evaluators ...evaluator.Evaluator) (upstreamidentifier.UpstreamIdentifier, error) } type PluginService struct { @@ -108,20 +109,27 @@ func (s PluginService) IdentifyUpstreams(ctx context.Context, taskName string, c evaluators = append(evaluators, evaluator) } - if parserType != plugin.BQParser { + switch parserType { + case plugin.MaxcomputeParser: + upstreamIdentifier, err := s.upstreamIdentifierFactory.GetMaxcomputeUpstreamIdentifier(ctx, evaluators...) + if err != nil { + return nil, err + } + upstreamIdentifiers = append(upstreamIdentifiers, upstreamIdentifier) + case plugin.BQParser: + svcAcc, ok := compiledConfig[bqSvcAccKey] + if !ok { + return nil, fmt.Errorf("secret " + bqSvcAccKey + " required to generate upstream is not found") + } + upstreamIdentifier, err := s.upstreamIdentifierFactory.GetBQUpstreamIdentifier(ctx, svcAcc, evaluators...) + if err != nil { + return nil, err + } + upstreamIdentifiers = append(upstreamIdentifiers, upstreamIdentifier) + default: s.l.Warn("parserType %s is not supported", parserType) continue } - // for now parser type is only scoped for bigquery, so that it uses bigquery as upstream identifier - svcAcc, ok := compiledConfig[bqSvcAccKey] - if !ok { - return nil, fmt.Errorf("secret " + bqSvcAccKey + " required to generate upstream is not found") - } - upstreamIdentifier, err := s.upstreamIdentifierFactory.GetBQUpstreamIdentifier(ctx, svcAcc, evaluators...) - if err != nil { - return nil, err - } - upstreamIdentifiers = append(upstreamIdentifiers, upstreamIdentifier) } // identify all upstream resource urns by all identifier from given asset diff --git a/plugin/plugin_service_test.go b/plugin/plugin_service_test.go index c91edd117e..1ed3771759 100644 --- a/plugin/plugin_service_test.go +++ b/plugin/plugin_service_test.go @@ -542,13 +542,49 @@ func (_m *UpstreamIdentifierFactory) GetBQUpstreamIdentifier(ctx context.Context return r0, r1 } +// GetMaxcomputeUpstreamIdentifier provides a mock function with given fields: ctx, evaluators +func (_m *UpstreamIdentifierFactory) GetMaxcomputeUpstreamIdentifier(ctx context.Context, evaluators ...evaluator.Evaluator) (upstreamidentifier.UpstreamIdentifier, error) { + _va := make([]interface{}, len(evaluators)) + for _i := range evaluators { + _va[_i] = evaluators[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for GetMaxcomputeUpstreamIdentifier") + } + + var r0 upstreamidentifier.UpstreamIdentifier + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, ...evaluator.Evaluator) (upstreamidentifier.UpstreamIdentifier, error)); ok { + return rf(ctx, evaluators...) + } + if rf, ok := ret.Get(0).(func(context.Context, ...evaluator.Evaluator) upstreamidentifier.UpstreamIdentifier); ok { + r0 = rf(ctx, evaluators...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(upstreamidentifier.UpstreamIdentifier) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, ...evaluator.Evaluator) error); ok { + r1 = rf(ctx, evaluators...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // NewUpstreamIdentifierFactory creates a new instance of UpstreamIdentifierFactory. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewUpstreamIdentifierFactory(t interface { mock.TestingT Cleanup(func()) -}, -) *UpstreamIdentifierFactory { +}) *UpstreamIdentifierFactory { mock := &UpstreamIdentifierFactory{} mock.Mock.Test(t) diff --git a/plugin/upstream_identifier/bq_upstream_identifier.go b/plugin/upstream_identifier/bq_upstream_identifier.go index 8f1a307137..5690ce77d9 100644 --- a/plugin/upstream_identifier/bq_upstream_identifier.go +++ b/plugin/upstream_identifier/bq_upstream_identifier.go @@ -9,6 +9,7 @@ import ( "github.com/goto/optimus/core/resource" "github.com/goto/optimus/ext/store/bigquery" "github.com/goto/optimus/internal/errors" + "github.com/goto/optimus/plugin/upstream_identifier/parser" ) type ( @@ -129,7 +130,7 @@ func NewBQUpstreamIdentifier(logger log.Logger, parserFunc ParserFunc, bqExtract return &BQUpstreamIdentifier{ logger: logger, - parserFunc: parserFunc, + parserFunc: parser.BQURNDecorator(parserFunc), extractorFunc: bqExtractorDecorator(logger, bqExtractorFunc), evaluatorFuncs: sanitizedEvaluatorFuncs, }, nil diff --git a/plugin/upstream_identifier/bq_upstream_identifier_test.go b/plugin/upstream_identifier/bq_upstream_identifier_test.go index cd577a6ba1..650cf069a4 100644 --- a/plugin/upstream_identifier/bq_upstream_identifier_test.go +++ b/plugin/upstream_identifier/bq_upstream_identifier_test.go @@ -85,7 +85,7 @@ func TestIdentifyResources(t *testing.T) { defer bqExtractorFunc.AssertExpectations(t) evaluatorFunc.On("Execute", assets).Return(assets["./query.sql"]) - parserFunc.On("Execute", assets["./query.sql"]).Return([]string{"bigquery://project1:dataset1.name1"}) + parserFunc.On("Execute", assets["./query.sql"]).Return([]string{"project1.dataset1.name1"}) bqExtractorFunc.On("Execute", ctx, mock.Anything).Return(nil, errors.New("some error")) bqUpstreamIdentifier, err := upstreamidentifier.NewBQUpstreamIdentifier(logger, parserFunc.Execute, bqExtractorFunc.Execute, evaluatorFunc.Execute) @@ -105,9 +105,8 @@ func TestIdentifyResources(t *testing.T) { defer bqExtractorFunc.AssertExpectations(t) evaluatorFunc.On("Execute", assets).Return(assets["./query.sql"]) - parserFunc.On("Execute", assets["./query.sql"]).Return([]string{"broken://project1;dataset1.name1"}) - // bq extractor should receives empty resource urn, since the urn construction is fail - bqExtractorFunc.On("Execute", ctx, []bigquery.ResourceURN{}).Return(map[bigquery.ResourceURN]string{}, nil) + parserFunc.On("Execute", assets["./query.sql"]).Return([]string{"project1;dataset1.name1"}) + // bq extractor should not be executed since the result of parser is empty bqUpstreamIdentifier, err := upstreamidentifier.NewBQUpstreamIdentifier(logger, parserFunc.Execute, bqExtractorFunc.Execute, evaluatorFunc.Execute) assert.NoError(t, err) @@ -135,13 +134,13 @@ func TestIdentifyResources(t *testing.T) { sqlView2 := "select 1 from `project1.dataset1.name1` join `project1.dataset1.name3` on true" evaluatorFunc.On("Execute", assets).Return(assets["./query.sql"]) - parserFunc.On("Execute", assets["./query.sql"]).Return([]string{"bigquery://project1:dataset1.name1"}) + parserFunc.On("Execute", assets["./query.sql"]).Return([]string{"project1.dataset1.name1"}) bqExtractorFunc.On("Execute", ctx, []bigquery.ResourceURN{resourceURN1}).Return(map[bigquery.ResourceURN]string{resourceURN1: sqlView1}, nil) - parserFunc.On("Execute", sqlView1).Return([]string{"bigquery://project1:dataset1.name2"}) + parserFunc.On("Execute", sqlView1).Return([]string{"project1.dataset1.name2"}) bqExtractorFunc.On("Execute", ctx, []bigquery.ResourceURN{resourceURN2}).Return(map[bigquery.ResourceURN]string{resourceURN2: sqlView2}, nil) - parserFunc.On("Execute", sqlView2).Return([]string{"bigquery://project1:dataset1.name1", "bigquery://project1:dataset1.name3"}) + parserFunc.On("Execute", sqlView2).Return([]string{"project1.dataset1.name1", "project1.dataset1.name3"}) bqExtractorFunc.On("Execute", ctx, []bigquery.ResourceURN{resourceURN1, resourceURN3}).Return(map[bigquery.ResourceURN]string{resourceURN1: sqlView1, resourceURN3: ""}, nil) parserFunc.On("Execute", "").Return([]string{}) @@ -172,13 +171,13 @@ func TestIdentifyResources(t *testing.T) { sqlView2 := "select 1 from `project1.dataset1.name3`" evaluatorFunc.On("Execute", assets).Return(assets["./query.sql"]) - parserFunc.On("Execute", assets["./query.sql"]).Return([]string{"bigquery://project1:dataset1.name1"}) + parserFunc.On("Execute", assets["./query.sql"]).Return([]string{"project1.dataset1.name1"}) bqExtractorFunc.On("Execute", ctx, []bigquery.ResourceURN{resourceURN1}).Return(map[bigquery.ResourceURN]string{resourceURN1: sqlView1}, nil) - parserFunc.On("Execute", sqlView1).Return([]string{"bigquery://project1:dataset1.name2", "bigquery://project1:dataset1.name3"}) + parserFunc.On("Execute", sqlView1).Return([]string{"project1.dataset1.name2", "project1.dataset1.name3"}) bqExtractorFunc.On("Execute", ctx, []bigquery.ResourceURN{resourceURN2, resourceURN3}).Return(map[bigquery.ResourceURN]string{resourceURN2: sqlView2, resourceURN3: ""}, nil) - parserFunc.On("Execute", sqlView2).Return([]string{"bigquery://project1:dataset1.name3"}) + parserFunc.On("Execute", sqlView2).Return([]string{"project1.dataset1.name3"}) bqExtractorFunc.On("Execute", ctx, []bigquery.ResourceURN{resourceURN3}).Return(map[bigquery.ResourceURN]string{resourceURN3: ""}, nil) parserFunc.On("Execute", "").Return([]string{}) diff --git a/plugin/upstream_identifier/maxcompute_upstream_identifier.go b/plugin/upstream_identifier/maxcompute_upstream_identifier.go new file mode 100644 index 0000000000..bfbb2f4cb9 --- /dev/null +++ b/plugin/upstream_identifier/maxcompute_upstream_identifier.go @@ -0,0 +1,48 @@ +package upstreamidentifier + +import ( + "context" + + "github.com/goto/optimus/core/resource" + "github.com/goto/optimus/plugin/upstream_identifier/parser" + "github.com/goto/salt/log" +) + +type MaxcomputeUpstreamIdentifier struct { + logger log.Logger + parserFunc ParserFunc + evaluatorFuncs []EvalAssetFunc +} + +func NewMaxcomputeUpstreamIdentifier(logger log.Logger, parserFunc ParserFunc, evaluatorFuncs ...EvalAssetFunc) (*MaxcomputeUpstreamIdentifier, error) { + return &MaxcomputeUpstreamIdentifier{ + logger: logger, + parserFunc: parser.MaxcomputeURNDecorator(parserFunc), + evaluatorFuncs: evaluatorFuncs, + }, nil +} + +func (g MaxcomputeUpstreamIdentifier) IdentifyResources(ctx context.Context, assets map[string]string) ([]resource.URN, error) { + resourceURNs := []resource.URN{} + + // generate resource urn with upstream from each evaluator + for _, evaluatorFunc := range g.evaluatorFuncs { + query := evaluatorFunc(assets) + if query == "" { + continue + } + resources := g.identifyResources(query) + resourceURNs = append(resourceURNs, resources...) + } + return resourceURNs, nil +} + +func (g MaxcomputeUpstreamIdentifier) identifyResources(query string) []resource.URN { + resources := g.parserFunc(query) + resourceURNs := make([]resource.URN, len(resources)) + for _, r := range resources { + resourceURN, _ := resource.NewURN("maxcompute", r) // TODO: use dedicated function new resource from string + resourceURNs = append(resourceURNs, resourceURN) + } + return resourceURNs +} diff --git a/plugin/upstream_identifier/maxcompute_upstream_identifier_test.go b/plugin/upstream_identifier/maxcompute_upstream_identifier_test.go new file mode 100644 index 0000000000..52a7ba24e2 --- /dev/null +++ b/plugin/upstream_identifier/maxcompute_upstream_identifier_test.go @@ -0,0 +1,3 @@ +package upstreamidentifier_test + +// TODO: Implement test diff --git a/plugin/upstream_identifier/parser/bq_parser.go b/plugin/upstream_identifier/parser/query_parser.go similarity index 57% rename from plugin/upstream_identifier/parser/bq_parser.go rename to plugin/upstream_identifier/parser/query_parser.go index 8fd776926c..956252f812 100644 --- a/plugin/upstream_identifier/parser/bq_parser.go +++ b/plugin/upstream_identifier/parser/query_parser.go @@ -3,31 +3,29 @@ package parser import ( "regexp" "strings" - - "github.com/goto/optimus/ext/store/bigquery" ) var ( topLevelUpstreamsPattern = regexp.MustCompile( - "(?i)(?:FROM)\\s*(?:/\\*\\s*([a-zA-Z0-9@_-]*)\\s*\\*/)?\\s+`?([\\w-]+)\\.([\\w-]+)\\.([\\w-\\*?]+)`?" + //nolint:gocritic + "(?i)(?:FROM)\\s*(?:/\\*\\s*([a-zA-Z0-9@_-]*)\\s*\\*/)?\\s+`?([\\w-]+\\.[\\w-]+\\.[\\w-\\*?]+)`?" + //nolint:gocritic "|" + - "(?i)(?:JOIN)\\s*(?:/\\*\\s*([a-zA-Z0-9@_-]*)\\s*\\*/)?\\s+`?([\\w-]+)\\.([\\w-]+)\\.([\\w-]+)`?" + + "(?i)(?:JOIN)\\s*(?:/\\*\\s*([a-zA-Z0-9@_-]*)\\s*\\*/)?\\s+`?([\\w-]+\\.[\\w-]+\\.[\\w-]+)`?" + "|" + - "(?i)(?:WITH)\\s*(?:/\\*\\s*([a-zA-Z0-9@_-]*)\\s*\\*/)?\\s+`?([\\w-]+)\\.([\\w-]+)\\.([\\w-]+)`?\\s+(?:AS)" + + "(?i)(?:WITH)\\s*(?:/\\*\\s*([a-zA-Z0-9@_-]*)\\s*\\*/)?\\s+`?([\\w-]+\\.[\\w-]+\\.[\\w-]+)`?\\s+(?:AS)" + "|" + // ref: https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax#merge_statement - "(?i)(?:MERGE)\\s*(?:INTO)?\\s*(?:/\\*\\s*([a-zA-Z0-9@_-]*)\\s*\\*/)?\\s+`?([\\w-]+)\\.([\\w-]+)\\.([\\w-]+)`?" + // to ignore + "(?i)(?:MERGE)\\s*(?:INTO)?\\s*(?:/\\*\\s*([a-zA-Z0-9@_-]*)\\s*\\*/)?\\s+`?([\\w-]+\\.[\\w-]+\\.[\\w-]+)`?" + // to ignore "|" + // ref: https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax#insert_statement - "(?i)(?:INSERT)\\s*(?:INTO)?\\s*(?:/\\*\\s*([a-zA-Z0-9@_-]*)\\s*\\*/)?\\s+`?([\\w-]+)\\.([\\w-]+)\\.([\\w-]+)`?" + // to ignore + "(?i)(?:INSERT)\\s*(?:INTO)?\\s*(?:/\\*\\s*([a-zA-Z0-9@_-]*)\\s*\\*/)?\\s+`?([\\w-]+\\.[\\w-]+\\.[\\w-]+)`?" + // to ignore "|" + // ref: https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax#delete_statement - "(?i)(?:DELETE)\\s*(?:FROM)?\\s*(?:/\\*\\s*([a-zA-Z0-9@_-]*)\\s*\\*/)?\\s+`?([\\w-]+)\\.([\\w-]+)\\.([\\w-]+)`?" + // to ignore + "(?i)(?:DELETE)\\s*(?:FROM)?\\s*(?:/\\*\\s*([a-zA-Z0-9@_-]*)\\s*\\*/)?\\s+`?([\\w-]+\\.[\\w-]+\\.[\\w-]+)`?" + // to ignore "|" + // ref: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language - "(?i)(?:CREATE)\\s*(?:OR\\s+REPLACE)?\\s*(?:VIEW|(?:TEMP\\s+)?TABLE)\\s*(?:/\\*\\s*([a-zA-Z0-9@_-]*)\\s*\\*/)?\\s+`?([\\w-]+)\\.([\\w-]+)\\.([\\w-]+)`?" + // to ignore + "(?i)(?:CREATE)\\s*(?:OR\\s+REPLACE)?\\s*(?:VIEW|(?:TEMP\\s+)?TABLE)\\s*(?:/\\*\\s*([a-zA-Z0-9@_-]*)\\s*\\*/)?\\s+`?([\\w-]+\\.[\\w-]+\\.[\\w-]+)`?" + // to ignore "|" + - "(?i)(?:/\\*\\s*([a-zA-Z0-9@_-]*)\\s*\\*/)?\\s+`([\\w-]+)\\.([\\w-]+)\\.([\\w-]+)`\\s*(?:AS)?") + "(?i)(?:/\\*\\s*([a-zA-Z0-9@_-]*)\\s*\\*/)?\\s+`?([\\w-]+\\.[\\w-]+\\.[\\w-]+)`?\\s*(?:AS)?") singleLineCommentsPattern = regexp.MustCompile(`(--.*)`) multiLineCommentsPattern = regexp.MustCompile(`(((/\*)+?[\w\W]*?(\*/)+))`) @@ -37,41 +35,33 @@ var ( func ParseTopLevelUpstreamsFromQuery(query string) []string { cleanedQuery := cleanQueryFromComment(query) - resourcesFound := make(map[bigquery.ResourceURN]bool) - pseudoResources := make(map[bigquery.ResourceURN]bool) + tableFound := map[string]bool{} + pseudoTable := map[string]bool{} matches := topLevelUpstreamsPattern.FindAllStringSubmatch(cleanedQuery, -1) for _, match := range matches { - var projectIdx, datasetIdx, nameIdx, ignoreUpstreamIdx int + var tableIdx, ignoreUpstreamIdx int tokens := strings.Fields(match[0]) clause := strings.ToLower(tokens[0]) switch clause { case "from": - ignoreUpstreamIdx, projectIdx, datasetIdx, nameIdx = 1, 2, 3, 4 + ignoreUpstreamIdx, tableIdx = 1, 2 case "join": - ignoreUpstreamIdx, projectIdx, datasetIdx, nameIdx = 5, 6, 7, 8 + ignoreUpstreamIdx, tableIdx = 3, 4 case "with": - ignoreUpstreamIdx, projectIdx, datasetIdx, nameIdx = 9, 10, 11, 12 + ignoreUpstreamIdx, tableIdx = 5, 6 case "merge": - ignoreUpstreamIdx, projectIdx, datasetIdx, nameIdx = 13, 14, 15, 16 + ignoreUpstreamIdx, tableIdx = 7, 8 case "insert": - ignoreUpstreamIdx, projectIdx, datasetIdx, nameIdx = 17, 18, 19, 20 + ignoreUpstreamIdx, tableIdx = 9, 10 case "delete": - ignoreUpstreamIdx, projectIdx, datasetIdx, nameIdx = 21, 22, 23, 24 + ignoreUpstreamIdx, tableIdx = 11, 12 case "create": - ignoreUpstreamIdx, projectIdx, datasetIdx, nameIdx = 25, 26, 27, 28 + ignoreUpstreamIdx, tableIdx = 13, 14 default: - ignoreUpstreamIdx, projectIdx, datasetIdx, nameIdx = 29, 30, 31, 32 - } - - project := match[projectIdx] - dataset := match[datasetIdx] - name := match[nameIdx] - - if project == "" || dataset == "" || name == "" { - continue + ignoreUpstreamIdx, tableIdx = 15, 16 } if strings.TrimSpace(match[ignoreUpstreamIdx]) == "@ignoreupstream" { @@ -82,22 +72,21 @@ func ParseTopLevelUpstreamsFromQuery(query string) []string { continue } - resourceURN, _ := bigquery.NewResourceURN(project, dataset, name) - + tableName := match[tableIdx] if clause == "with" { - pseudoResources[resourceURN] = true + pseudoTable[tableName] = true } else { - resourcesFound[resourceURN] = true + tableFound[tableName] = true } } output := []string{} - for resourceURN := range resourcesFound { - if pseudoResources[resourceURN] { + for table := range tableFound { + if pseudoTable[table] { continue } - output = append(output, resourceURN.URN()) + output = append(output, table) } return output diff --git a/plugin/upstream_identifier/parser/bq_parser_test.go b/plugin/upstream_identifier/parser/query_parser_test.go similarity index 55% rename from plugin/upstream_identifier/parser/bq_parser_test.go rename to plugin/upstream_identifier/parser/query_parser_test.go index 0d0b42689b..69ad1546ab 100644 --- a/plugin/upstream_identifier/parser/bq_parser_test.go +++ b/plugin/upstream_identifier/parser/query_parser_test.go @@ -1,148 +1,148 @@ package parser_test import ( + "fmt" "testing" "github.com/stretchr/testify/assert" - "github.com/goto/optimus/ext/store/bigquery" "github.com/goto/optimus/plugin/upstream_identifier/parser" ) func TestParseTopLevelUpstreamsFromQuery(t *testing.T) { t.Run("parse test", func(t *testing.T) { testCases := []struct { - Name string - InputQuery string - ExpectedResourceURNs []string + Name string + InputQuery string + ExpectedTables []string }{ { - Name: "empty query", - InputQuery: "", - ExpectedResourceURNs: []string{}, + Name: "empty query", + InputQuery: "", + ExpectedTables: []string{}, }, { Name: "simple query", InputQuery: "select * from data-engineering.testing.table1", - ExpectedResourceURNs: []string{ - newBQResourceURN("data-engineering", "testing", "table1"), + ExpectedTables: []string{ + newTable("data-engineering", "testing", "table1"), }, }, { Name: "simple query with hyphenated table name", InputQuery: "select * from data-engineering.testing.table_name-1", - ExpectedResourceURNs: []string{ - newBQResourceURN("data-engineering", "testing", "table_name-1"), + ExpectedTables: []string{ + newTable("data-engineering", "testing", "table_name-1"), }, }, { Name: "simple query with quotes", InputQuery: "select * from `data-engineering.testing.table1`", - ExpectedResourceURNs: []string{ - newBQResourceURN("data-engineering", "testing", "table1"), + ExpectedTables: []string{ + newTable("data-engineering", "testing", "table1"), }, }, { - Name: "simple query without project name", - InputQuery: "select * from testing.table1", - ExpectedResourceURNs: []string{}, + Name: "simple query without project name", + InputQuery: "select * from testing.table1", + ExpectedTables: []string{}, }, { Name: "simple query with simple join", InputQuery: "select * from data-engineering.testing.table1 join data-engineering.testing.table2 on some_field", - ExpectedResourceURNs: []string{ - newBQResourceURN("data-engineering", "testing", "table1"), - newBQResourceURN("data-engineering", "testing", "table2"), + ExpectedTables: []string{ + newTable("data-engineering", "testing", "table1"), + newTable("data-engineering", "testing", "table2"), }, }, { Name: "simple query with outer join", InputQuery: "select * from data-engineering.testing.table1 outer join data-engineering.testing.table2 on some_field", - ExpectedResourceURNs: []string{ - newBQResourceURN("data-engineering", "testing", "table1"), - newBQResourceURN("data-engineering", "testing", "table2"), + ExpectedTables: []string{ + newTable("data-engineering", "testing", "table1"), + newTable("data-engineering", "testing", "table2"), }, }, { Name: "subquery", InputQuery: "select * from (select order_id from data-engineering.testing.orders)", - ExpectedResourceURNs: []string{ - newBQResourceURN("data-engineering", "testing", "orders"), + ExpectedTables: []string{ + newTable("data-engineering", "testing", "orders"), }, }, { Name: "`with` clause + simple query", InputQuery: "with `information.foo.bar` as (select * from `data-engineering.testing.data`) select * from `information.foo.bar`", - ExpectedResourceURNs: []string{ - newBQResourceURN("data-engineering", "testing", "data"), + ExpectedTables: []string{ + newTable("data-engineering", "testing", "data"), }, }, { Name: "`with` clause with missing project name", InputQuery: "with `foo.bar` as (select * from `data-engineering.testing.data`) select * from `foo.bar`", - ExpectedResourceURNs: []string{ - newBQResourceURN("data-engineering", "testing", "data"), + ExpectedTables: []string{ + newTable("data-engineering", "testing", "data"), }, }, { Name: "project name with dashes", InputQuery: "select * from `foo-bar.baz.data`", - ExpectedResourceURNs: []string{ - newBQResourceURN("foo-bar", "baz", "data"), + ExpectedTables: []string{ + newTable("foo-bar", "baz", "data"), }, }, { Name: "dataset and project name with dashes", InputQuery: "select * from `foo-bar.bar-baz.data", - ExpectedResourceURNs: []string{ - newBQResourceURN("foo-bar", "bar-baz", "data"), + ExpectedTables: []string{ + newTable("foo-bar", "bar-baz", "data"), }, }, { Name: "`with` clause + join", InputQuery: "with dedup_source as (select * from `project.fire.fly`) select * from dedup_source join `project.maximum.overdrive` on dedup_source.left = `project.maximum.overdrive`.right", - ExpectedResourceURNs: []string{ - newBQResourceURN("project", "fire", "fly"), - newBQResourceURN("project", "maximum", "overdrive"), + ExpectedTables: []string{ + newTable("project", "fire", "fly"), + newTable("project", "maximum", "overdrive"), }, }, { Name: "double `with` + pseudoreference", InputQuery: "with s1 as (select * from internal.pseudo.ref), with internal.pseudo.ref as (select * from `project.another.name`) select * from s1", - ExpectedResourceURNs: []string{ - newBQResourceURN("project", "another", "name"), + ExpectedTables: []string{ + newTable("project", "another", "name"), }, }, { - Name: "simple query that ignores from upstream", - InputQuery: "select * from /* @ignoreupstream */ data-engineering.testing.table1", - ExpectedResourceURNs: []string{}, + Name: "simple query that ignores from upstream", + InputQuery: "select * from /* @ignoreupstream */ data-engineering.testing.table1", + ExpectedTables: []string{}, }, { - Name: "simple query that ignores from upstream with quotes", - InputQuery: "select * from /* @ignoreupstream */ `data-engineering.testing.table1`", - ExpectedResourceURNs: []string{}, + Name: "simple query that ignores from upstream with quotes", + InputQuery: "select * from /* @ignoreupstream */ `data-engineering.testing.table1`", + ExpectedTables: []string{}, }, { Name: "simple query with simple join that ignores from upstream", InputQuery: "select * from /* @ignoreupstream */ data-engineering.testing.table1 join data-engineering.testing.table2 on some_field", - ExpectedResourceURNs: []string{ - newBQResourceURN("data-engineering", "testing", "table2"), + ExpectedTables: []string{ + newTable("data-engineering", "testing", "table2"), }, }, { Name: "simple query with simple join that has comments but does not ignores upstream", InputQuery: "select * from /* */ data-engineering.testing.table1 join data-engineering.testing.table2 on some_field", - ExpectedResourceURNs: []string{ - newBQResourceURN("data-engineering", "testing", "table1"), - newBQResourceURN("data-engineering", "testing", "table2"), + ExpectedTables: []string{ + newTable("data-engineering", "testing", "table1"), + newTable("data-engineering", "testing", "table2"), }, }, { Name: "simple query with simple join that ignores upstream of join", InputQuery: "select * from data-engineering.testing.table1 join /* @ignoreupstream */ data-engineering.testing.table2 on some_field", - ExpectedResourceURNs: []string{ - newBQResourceURN("data-engineering", "testing", "table1"), + ExpectedTables: []string{ + newTable("data-engineering", "testing", "table1"), }, }, { @@ -153,8 +153,8 @@ func TestParseTopLevelUpstreamsFromQuery(t *testing.T) { ) SELECT id FROM /* @ignoreupstream */ my_temp_table `, - ExpectedResourceURNs: []string{ - newBQResourceURN("data-engineering", "testing", "an_upstream_table"), + ExpectedTables: []string{ + newTable("data-engineering", "testing", "an_upstream_table"), }, }, { @@ -165,12 +165,12 @@ func TestParseTopLevelUpstreamsFromQuery(t *testing.T) { ) SELECT id FROM my_temp_table `, - ExpectedResourceURNs: []string{}, + ExpectedTables: []string{}, }, { - Name: "simple query with simple join that ignores upstream of join", - InputQuery: "WITH my_temp_table AS ( SELECT id, name FROM /* @ignoreupstream */ data-engineering.testing.an_upstream_table ) SELECT id FROM /* @ignoreupstream */ my_temp_table", - ExpectedResourceURNs: []string{}, + Name: "simple query with simple join that ignores upstream of join", + InputQuery: "WITH my_temp_table AS ( SELECT id, name FROM /* @ignoreupstream */ data-engineering.testing.an_upstream_table ) SELECT id FROM /* @ignoreupstream */ my_temp_table", + ExpectedTables: []string{}, }, { Name: "simple query with another query inside comment", @@ -178,8 +178,8 @@ func TestParseTopLevelUpstreamsFromQuery(t *testing.T) { select * from data-engineering.testing.tableABC -- select * from data-engineering.testing.table1 join data-engineering.testing.table2 on some_field `, - ExpectedResourceURNs: []string{ - newBQResourceURN("data-engineering", "testing", "tableABC"), + ExpectedTables: []string{ + newTable("data-engineering", "testing", "tableABC"), }, }, { @@ -189,8 +189,8 @@ func TestParseTopLevelUpstreamsFromQuery(t *testing.T) { /* select * from data-engineering.testing.table1 join data-engineering.testing.table2 on some_field */ join /* @ignoreupstream */ data-engineering.testing.table2 on some_field `, - ExpectedResourceURNs: []string{ - newBQResourceURN("data-engineering", "testing", "tableABC"), + ExpectedTables: []string{ + newTable("data-engineering", "testing", "tableABC"), }, }, { @@ -201,8 +201,8 @@ func TestParseTopLevelUpstreamsFromQuery(t *testing.T) { from data-engineering.testing.tableDEF, `, - ExpectedResourceURNs: []string{ - newBQResourceURN("data-engineering", "testing", "tableDEF"), + ExpectedTables: []string{ + newTable("data-engineering", "testing", "tableDEF"), }, }, { @@ -216,9 +216,9 @@ func TestParseTopLevelUpstreamsFromQuery(t *testing.T) { ` + "`data-engineering.testing.tableDEF`," + ` as backup_table, /* @ignoreupstream */ data-engineering.testing.tableGHI as ignored_table, `, - ExpectedResourceURNs: []string{ - newBQResourceURN("data-engineering", "testing", "tableABC"), - newBQResourceURN("data-engineering", "testing", "tableDEF"), + ExpectedTables: []string{ + newTable("data-engineering", "testing", "tableABC"), + newTable("data-engineering", "testing", "tableDEF"), }, }, { @@ -235,9 +235,9 @@ func TestParseTopLevelUpstreamsFromQuery(t *testing.T) { from /*@ignoreupstream*/ data-engineering.testing.tableC* `, - ExpectedResourceURNs: []string{ - newBQResourceURN("data-engineering", "testing", "tableA*"), - newBQResourceURN("data-engineering", "testing", "tableB*"), + ExpectedTables: []string{ + newTable("data-engineering", "testing", "tableA*"), + newTable("data-engineering", "testing", "tableB*"), }, }, { @@ -254,8 +254,8 @@ func TestParseTopLevelUpstreamsFromQuery(t *testing.T) { from data-engineering.testing.table_a join /* @ignoreupstream */ data-engineering.testing.table_d `, - ExpectedResourceURNs: []string{ - newBQResourceURN("data-engineering", "testing", "table_a"), + ExpectedTables: []string{ + newTable("data-engineering", "testing", "table_a"), }, }, { @@ -276,42 +276,41 @@ func TestParseTopLevelUpstreamsFromQuery(t *testing.T) { join /* @ignoreupstream */ data-engineering.testing.table_e `, - ExpectedResourceURNs: []string{ - newBQResourceURN("data-engineering", "testing", "table_a"), - newBQResourceURN("data-engineering", "testing", "table_d"), + ExpectedTables: []string{ + newTable("data-engineering", "testing", "table_a"), + newTable("data-engineering", "testing", "table_d"), }, }, { - Name: "ignore merge into query", - InputQuery: "merge into `data-engineering.testing.table_a` as target", - ExpectedResourceURNs: []string{}, + Name: "ignore merge into query", + InputQuery: "merge into `data-engineering.testing.table_a` as target", + ExpectedTables: []string{}, }, { - Name: "ignore insert into query", - InputQuery: "insert into `data-engineering.testing.table_a`(id,name)", - ExpectedResourceURNs: []string{}, + Name: "ignore insert into query", + InputQuery: "insert into `data-engineering.testing.table_a`(id,name)", + ExpectedTables: []string{}, }, { - Name: "ignore delete + insert query", - InputQuery: "delete from `data-engineering.testing.table_b`; create or replace table `data-engineering.testing.table_b`", - ExpectedResourceURNs: []string{}, + Name: "ignore delete + insert query", + InputQuery: "delete from `data-engineering.testing.table_b`; create or replace table `data-engineering.testing.table_b`", + ExpectedTables: []string{}, }, { - Name: "ignore create or replace query", - InputQuery: "create or replace table `data-engineering.testing.table_b`", - ExpectedResourceURNs: []string{}, + Name: "ignore create or replace query", + InputQuery: "create or replace table `data-engineering.testing.table_b`", + ExpectedTables: []string{}, }, } for _, test := range testCases { t.Run(test.Name, func(t *testing.T) { actualResourceURNs := parser.ParseTopLevelUpstreamsFromQuery(test.InputQuery) - assert.ElementsMatch(t, test.ExpectedResourceURNs, actualResourceURNs) + assert.ElementsMatch(t, test.ExpectedTables, actualResourceURNs) }) } }) } -func newBQResourceURN(project, dataset, name string) string { - resourceURN, _ := bigquery.NewResourceURN(project, dataset, name) - return resourceURN.URN() +func newTable(project, dataset, name string) string { + return fmt.Sprintf("%s.%s.%s", project, dataset, name) } diff --git a/plugin/upstream_identifier/parser/urn_decorator.go b/plugin/upstream_identifier/parser/urn_decorator.go new file mode 100644 index 0000000000..3f125c471a --- /dev/null +++ b/plugin/upstream_identifier/parser/urn_decorator.go @@ -0,0 +1,38 @@ +package parser + +import ( + "fmt" + "strings" +) + +func BQURNDecorator(f func(string) []string) func(string) []string { + return func(rawResource string) []string { + resourceURNs := []string{} + tables := f(rawResource) + for _, table := range tables { + tableSplitted := strings.Split(table, ".") + if len(tableSplitted) != 3 { + continue + } + resourceURN := fmt.Sprintf("bigquery://%s:%s.%s", tableSplitted[0], tableSplitted[1], tableSplitted[2]) + resourceURNs = append(resourceURNs, resourceURN) + } + return resourceURNs + } +} + +func MaxcomputeURNDecorator(f func(string) []string) func(string) []string { + return func(rawResource string) []string { + resourceURNs := []string{} + tables := f(rawResource) + for _, table := range tables { + tableSplitted := strings.Split(table, ".") + if len(tableSplitted) != 3 { + continue + } + resourceURN := fmt.Sprintf("maxcompute://%s.%s.%s", tableSplitted[0], tableSplitted[1], tableSplitted[2]) + resourceURNs = append(resourceURNs, resourceURN) + } + return resourceURNs + } +} diff --git a/plugin/upstream_identifier/upstream_identifier.go b/plugin/upstream_identifier/upstream_identifier.go index 54cbbc3684..1d9ac5b9f0 100644 --- a/plugin/upstream_identifier/upstream_identifier.go +++ b/plugin/upstream_identifier/upstream_identifier.go @@ -45,6 +45,14 @@ func (u *UpstreamIdentifierFactory) GetBQUpstreamIdentifier(ctx context.Context, return NewBQUpstreamIdentifier(u.l, parser.ParseTopLevelUpstreamsFromQuery, e.Extract, evaluatorFuncs...) } +func (u *UpstreamIdentifierFactory) GetMaxcomputeUpstreamIdentifier(ctx context.Context, evaluators ...evaluator.Evaluator) (UpstreamIdentifier, error) { + evaluatorFuncs := make([]EvalAssetFunc, len(evaluators)) + for i, evaluator := range evaluators { + evaluatorFuncs[i] = evaluator.Evaluate + } + return NewMaxcomputeUpstreamIdentifier(u.l, parser.ParseTopLevelUpstreamsFromQuery, evaluatorFuncs...) +} + func NewUpstreamIdentifierFactory(logger log.Logger) (*UpstreamIdentifierFactory, error) { if logger == nil { return nil, fmt.Errorf("logger is nil") diff --git a/sdk/plugin/plugin.go b/sdk/plugin/plugin.go index 5c80a2cf47..9ff49413c2 100644 --- a/sdk/plugin/plugin.go +++ b/sdk/plugin/plugin.go @@ -46,7 +46,8 @@ type ( ) const ( - BQParser ParserType = "bq" + BQParser ParserType = "bq" + MaxcomputeParser ParserType = "maxcompute" ) type Evaluator struct { From a76da38ccec4c114555533f2956622c73bed2647 Mon Sep 17 00:00:00 2001 From: Dery Rahman Ahaddienata Date: Tue, 22 Oct 2024 10:41:12 +0700 Subject: [PATCH 2/5] fix: linter --- plugin/plugin_service_test.go | 3 +- .../maxcompute_upstream_identifier.go | 30 ++++++++++++++++--- .../parser/urn_decorator.go | 8 +++-- .../upstream_identifier.go | 2 +- 4 files changed, 34 insertions(+), 9 deletions(-) diff --git a/plugin/plugin_service_test.go b/plugin/plugin_service_test.go index 1ed3771759..03644785b2 100644 --- a/plugin/plugin_service_test.go +++ b/plugin/plugin_service_test.go @@ -584,7 +584,8 @@ func (_m *UpstreamIdentifierFactory) GetMaxcomputeUpstreamIdentifier(ctx context func NewUpstreamIdentifierFactory(t interface { mock.TestingT Cleanup(func()) -}) *UpstreamIdentifierFactory { +}, +) *UpstreamIdentifierFactory { mock := &UpstreamIdentifierFactory{} mock.Mock.Test(t) diff --git a/plugin/upstream_identifier/maxcompute_upstream_identifier.go b/plugin/upstream_identifier/maxcompute_upstream_identifier.go index bfbb2f4cb9..318810920b 100644 --- a/plugin/upstream_identifier/maxcompute_upstream_identifier.go +++ b/plugin/upstream_identifier/maxcompute_upstream_identifier.go @@ -2,10 +2,13 @@ package upstreamidentifier import ( "context" + "fmt" + + "github.com/goto/salt/log" "github.com/goto/optimus/core/resource" + "github.com/goto/optimus/internal/errors" "github.com/goto/optimus/plugin/upstream_identifier/parser" - "github.com/goto/salt/log" ) type MaxcomputeUpstreamIdentifier struct { @@ -15,6 +18,25 @@ type MaxcomputeUpstreamIdentifier struct { } func NewMaxcomputeUpstreamIdentifier(logger log.Logger, parserFunc ParserFunc, evaluatorFuncs ...EvalAssetFunc) (*MaxcomputeUpstreamIdentifier, error) { + me := errors.NewMultiError("create maxcompute upstream generator errors") + if logger == nil { + me.Append(fmt.Errorf("logger is nil")) + } + if parserFunc == nil { + me.Append(fmt.Errorf("parserFunc is nil")) + } + sanitizedEvaluatorFuncs := []EvalAssetFunc{} + for _, evaluatorFunc := range evaluatorFuncs { + if evaluatorFunc != nil { + sanitizedEvaluatorFuncs = append(sanitizedEvaluatorFuncs, evaluatorFunc) + } + } + if len(sanitizedEvaluatorFuncs) == 0 { + me.Append(fmt.Errorf("non-nil evaluatorFuncs is needed")) + } + if me.ToErr() != nil { + return nil, me.ToErr() + } return &MaxcomputeUpstreamIdentifier{ logger: logger, parserFunc: parser.MaxcomputeURNDecorator(parserFunc), @@ -22,7 +44,7 @@ func NewMaxcomputeUpstreamIdentifier(logger log.Logger, parserFunc ParserFunc, e }, nil } -func (g MaxcomputeUpstreamIdentifier) IdentifyResources(ctx context.Context, assets map[string]string) ([]resource.URN, error) { +func (g MaxcomputeUpstreamIdentifier) IdentifyResources(_ context.Context, assets map[string]string) ([]resource.URN, error) { resourceURNs := []resource.URN{} // generate resource urn with upstream from each evaluator @@ -40,9 +62,9 @@ func (g MaxcomputeUpstreamIdentifier) IdentifyResources(ctx context.Context, ass func (g MaxcomputeUpstreamIdentifier) identifyResources(query string) []resource.URN { resources := g.parserFunc(query) resourceURNs := make([]resource.URN, len(resources)) - for _, r := range resources { + for i, r := range resources { resourceURN, _ := resource.NewURN("maxcompute", r) // TODO: use dedicated function new resource from string - resourceURNs = append(resourceURNs, resourceURN) + resourceURNs[i] = resourceURN } return resourceURNs } diff --git a/plugin/upstream_identifier/parser/urn_decorator.go b/plugin/upstream_identifier/parser/urn_decorator.go index 3f125c471a..08f1dfbf05 100644 --- a/plugin/upstream_identifier/parser/urn_decorator.go +++ b/plugin/upstream_identifier/parser/urn_decorator.go @@ -5,16 +5,18 @@ import ( "strings" ) +const tablePart = 3 + func BQURNDecorator(f func(string) []string) func(string) []string { return func(rawResource string) []string { resourceURNs := []string{} tables := f(rawResource) for _, table := range tables { tableSplitted := strings.Split(table, ".") - if len(tableSplitted) != 3 { + if len(tableSplitted) != tablePart { continue } - resourceURN := fmt.Sprintf("bigquery://%s:%s.%s", tableSplitted[0], tableSplitted[1], tableSplitted[2]) + resourceURN := fmt.Sprintf("bigquery://%s:%s.%s", tableSplitted[0], tableSplitted[1], tableSplitted[2]) //nolint:nosprintfhostport resourceURNs = append(resourceURNs, resourceURN) } return resourceURNs @@ -27,7 +29,7 @@ func MaxcomputeURNDecorator(f func(string) []string) func(string) []string { tables := f(rawResource) for _, table := range tables { tableSplitted := strings.Split(table, ".") - if len(tableSplitted) != 3 { + if len(tableSplitted) != tablePart { continue } resourceURN := fmt.Sprintf("maxcompute://%s.%s.%s", tableSplitted[0], tableSplitted[1], tableSplitted[2]) diff --git a/plugin/upstream_identifier/upstream_identifier.go b/plugin/upstream_identifier/upstream_identifier.go index 1d9ac5b9f0..ee04978a2a 100644 --- a/plugin/upstream_identifier/upstream_identifier.go +++ b/plugin/upstream_identifier/upstream_identifier.go @@ -45,7 +45,7 @@ func (u *UpstreamIdentifierFactory) GetBQUpstreamIdentifier(ctx context.Context, return NewBQUpstreamIdentifier(u.l, parser.ParseTopLevelUpstreamsFromQuery, e.Extract, evaluatorFuncs...) } -func (u *UpstreamIdentifierFactory) GetMaxcomputeUpstreamIdentifier(ctx context.Context, evaluators ...evaluator.Evaluator) (UpstreamIdentifier, error) { +func (u *UpstreamIdentifierFactory) GetMaxcomputeUpstreamIdentifier(_ context.Context, evaluators ...evaluator.Evaluator) (UpstreamIdentifier, error) { evaluatorFuncs := make([]EvalAssetFunc, len(evaluators)) for i, evaluator := range evaluators { evaluatorFuncs[i] = evaluator.Evaluate From 2107d4baba8e580271c4feaaa309d53cc299b586 Mon Sep 17 00:00:00 2001 From: Dery Rahman Ahaddienata Date: Tue, 22 Oct 2024 14:18:08 +0700 Subject: [PATCH 3/5] test: add test case for maxcompute identifier --- .../maxcompute_upstream_identifier.go | 6 +- .../maxcompute_upstream_identifier_test.go | 60 ++++++++++++++++++- 2 files changed, 64 insertions(+), 2 deletions(-) diff --git a/plugin/upstream_identifier/maxcompute_upstream_identifier.go b/plugin/upstream_identifier/maxcompute_upstream_identifier.go index 318810920b..10664a1021 100644 --- a/plugin/upstream_identifier/maxcompute_upstream_identifier.go +++ b/plugin/upstream_identifier/maxcompute_upstream_identifier.go @@ -63,7 +63,11 @@ func (g MaxcomputeUpstreamIdentifier) identifyResources(query string) []resource resources := g.parserFunc(query) resourceURNs := make([]resource.URN, len(resources)) for i, r := range resources { - resourceURN, _ := resource.NewURN("maxcompute", r) // TODO: use dedicated function new resource from string + resourceURN, err := resource.ParseURN(r) + if err != nil { + g.logger.Error("error when parsing resource urn %s", r) + continue + } resourceURNs[i] = resourceURN } return resourceURNs diff --git a/plugin/upstream_identifier/maxcompute_upstream_identifier_test.go b/plugin/upstream_identifier/maxcompute_upstream_identifier_test.go index 52a7ba24e2..e80b40f804 100644 --- a/plugin/upstream_identifier/maxcompute_upstream_identifier_test.go +++ b/plugin/upstream_identifier/maxcompute_upstream_identifier_test.go @@ -1,3 +1,61 @@ package upstreamidentifier_test -// TODO: Implement test +import ( + "context" + "testing" + + upstreamidentifier "github.com/goto/optimus/plugin/upstream_identifier" + "github.com/goto/salt/log" + "github.com/stretchr/testify/assert" +) + +func TestNewMaxcomputeUpstreamIdentifier(t *testing.T) { + logger := log.NewNoop() + parserFunc := func(string) []string { return nil } + evaluatorFunc := func(map[string]string) string { return "" } + t.Run("return error when logger is nil", func(t *testing.T) { + upstreamIdentifier, err := upstreamidentifier.NewMaxcomputeUpstreamIdentifier(nil, parserFunc, evaluatorFunc) + assert.Error(t, err) + assert.Nil(t, upstreamIdentifier) + }) + t.Run("return error when parserFunc is nil", func(t *testing.T) { + upstreamIdentifier, err := upstreamidentifier.NewMaxcomputeUpstreamIdentifier(logger, nil, evaluatorFunc) + assert.Error(t, err) + assert.Nil(t, upstreamIdentifier) + }) + t.Run("return error when no evaluators", func(t *testing.T) { + upstreamIdentifier, err := upstreamidentifier.NewMaxcomputeUpstreamIdentifier(logger, parserFunc) + assert.Error(t, err) + assert.Nil(t, upstreamIdentifier) + }) + t.Run("return error when evaluatorFuncs is nil", func(t *testing.T) { + upstreamIdentifier, err := upstreamidentifier.NewMaxcomputeUpstreamIdentifier(logger, parserFunc, nil) + assert.Error(t, err) + assert.Nil(t, upstreamIdentifier) + }) + t.Run("return success", func(t *testing.T) { + upstreamIdentifier, err := upstreamidentifier.NewMaxcomputeUpstreamIdentifier(logger, parserFunc, evaluatorFunc) + assert.NoError(t, err) + assert.NotNil(t, upstreamIdentifier) + }) +} + +func TestMaxcomputeUpstreamIdentifier_IdentifyResources(t *testing.T) { + ctx := context.Background() + logger := log.NewNoop() + assets := map[string]string{ + "./query.sql": "select 1 from project1.schema1.name1", + } + // TODO: adding failure test cases + t.Run("return success", func(t *testing.T) { + parserFunc := func(string) []string { return []string{"project1.schema1.name1"} } + evaluatorFunc := func(map[string]string) string { return "./query.sql" } + upstreamIdentifier, err := upstreamidentifier.NewMaxcomputeUpstreamIdentifier(logger, parserFunc, evaluatorFunc) + assert.NoError(t, err) + assert.NotNil(t, upstreamIdentifier) + resourceURNs, err := upstreamIdentifier.IdentifyResources(ctx, assets) + assert.NoError(t, err) + assert.Len(t, resourceURNs, 1) + assert.Equal(t, "maxcompute://project1.schema1.name1", resourceURNs[0].String()) + }) +} From ac5151284c639cf978e168fcd71e5b0c8aa6c336 Mon Sep 17 00:00:00 2001 From: Dery Rahman Ahaddienata Date: Tue, 22 Oct 2024 15:06:04 +0700 Subject: [PATCH 4/5] fix: linter --- .../upstream_identifier/maxcompute_upstream_identifier_test.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/plugin/upstream_identifier/maxcompute_upstream_identifier_test.go b/plugin/upstream_identifier/maxcompute_upstream_identifier_test.go index e80b40f804..c87663c2f9 100644 --- a/plugin/upstream_identifier/maxcompute_upstream_identifier_test.go +++ b/plugin/upstream_identifier/maxcompute_upstream_identifier_test.go @@ -4,9 +4,10 @@ import ( "context" "testing" - upstreamidentifier "github.com/goto/optimus/plugin/upstream_identifier" "github.com/goto/salt/log" "github.com/stretchr/testify/assert" + + upstreamidentifier "github.com/goto/optimus/plugin/upstream_identifier" ) func TestNewMaxcomputeUpstreamIdentifier(t *testing.T) { From 07872e9a0b0f1849b06f6643b7657bbabaef265e Mon Sep 17 00:00:00 2001 From: Dery Rahman Ahaddienata Date: Wed, 23 Oct 2024 11:37:16 +0700 Subject: [PATCH 5/5] feat: support tick quote in one part of table --- .../parser/query_parser.go | 22 +++++++++++-------- .../parser/query_parser_test.go | 15 +++++++++++++ 2 files changed, 28 insertions(+), 9 deletions(-) diff --git a/plugin/upstream_identifier/parser/query_parser.go b/plugin/upstream_identifier/parser/query_parser.go index 956252f812..3137a545f7 100644 --- a/plugin/upstream_identifier/parser/query_parser.go +++ b/plugin/upstream_identifier/parser/query_parser.go @@ -7,25 +7,25 @@ import ( var ( topLevelUpstreamsPattern = regexp.MustCompile( - "(?i)(?:FROM)\\s*(?:/\\*\\s*([a-zA-Z0-9@_-]*)\\s*\\*/)?\\s+`?([\\w-]+\\.[\\w-]+\\.[\\w-\\*?]+)`?" + //nolint:gocritic + "(?i)(?:FROM)\\s*(?:/\\*\\s*([a-zA-Z0-9@_-]*)\\s*\\*/)?\\s+`?(`?[\\w-]+`?\\.`?[\\w-]+`?\\.`?[\\w-\\*?]+`?)`?" + //nolint:gocritic "|" + - "(?i)(?:JOIN)\\s*(?:/\\*\\s*([a-zA-Z0-9@_-]*)\\s*\\*/)?\\s+`?([\\w-]+\\.[\\w-]+\\.[\\w-]+)`?" + + "(?i)(?:JOIN)\\s*(?:/\\*\\s*([a-zA-Z0-9@_-]*)\\s*\\*/)?\\s+`?(`?[\\w-]+`?\\.`?[\\w-]+`?\\.`?[\\w-]+`?)`?" + "|" + - "(?i)(?:WITH)\\s*(?:/\\*\\s*([a-zA-Z0-9@_-]*)\\s*\\*/)?\\s+`?([\\w-]+\\.[\\w-]+\\.[\\w-]+)`?\\s+(?:AS)" + + "(?i)(?:WITH)\\s*(?:/\\*\\s*([a-zA-Z0-9@_-]*)\\s*\\*/)?\\s+`?(`?[\\w-]+`?\\.`?[\\w-]+`?\\.`?[\\w-]+`?)`?\\s+(?:AS)" + "|" + // ref: https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax#merge_statement - "(?i)(?:MERGE)\\s*(?:INTO)?\\s*(?:/\\*\\s*([a-zA-Z0-9@_-]*)\\s*\\*/)?\\s+`?([\\w-]+\\.[\\w-]+\\.[\\w-]+)`?" + // to ignore + "(?i)(?:MERGE)\\s*(?:INTO)?\\s*(?:/\\*\\s*([a-zA-Z0-9@_-]*)\\s*\\*/)?\\s+`?(`?[\\w-]+`?\\.`?[\\w-]+`?\\.`?[\\w-]+`?)`?" + // to ignore "|" + // ref: https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax#insert_statement - "(?i)(?:INSERT)\\s*(?:INTO)?\\s*(?:/\\*\\s*([a-zA-Z0-9@_-]*)\\s*\\*/)?\\s+`?([\\w-]+\\.[\\w-]+\\.[\\w-]+)`?" + // to ignore + "(?i)(?:INSERT)\\s*(?:INTO)?\\s*(?:/\\*\\s*([a-zA-Z0-9@_-]*)\\s*\\*/)?\\s+`?(`?[\\w-]+`?\\.`?[\\w-]+`?\\.`?[\\w-]+`?)`?" + // to ignore "|" + // ref: https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax#delete_statement - "(?i)(?:DELETE)\\s*(?:FROM)?\\s*(?:/\\*\\s*([a-zA-Z0-9@_-]*)\\s*\\*/)?\\s+`?([\\w-]+\\.[\\w-]+\\.[\\w-]+)`?" + // to ignore + "(?i)(?:DELETE)\\s*(?:FROM)?\\s*(?:/\\*\\s*([a-zA-Z0-9@_-]*)\\s*\\*/)?\\s+`?(`?[\\w-]+`?\\.`?[\\w-]+`?\\.`?[\\w-]+`?)`?" + // to ignore "|" + // ref: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language - "(?i)(?:CREATE)\\s*(?:OR\\s+REPLACE)?\\s*(?:VIEW|(?:TEMP\\s+)?TABLE)\\s*(?:/\\*\\s*([a-zA-Z0-9@_-]*)\\s*\\*/)?\\s+`?([\\w-]+\\.[\\w-]+\\.[\\w-]+)`?" + // to ignore + "(?i)(?:CREATE)\\s*(?:OR\\s+REPLACE)?\\s*(?:VIEW|(?:TEMP\\s+)?TABLE)\\s*(?:/\\*\\s*([a-zA-Z0-9@_-]*)\\s*\\*/)?\\s+`?(`?[\\w-]+`?\\.`?[\\w-]+`?\\.`?[\\w-]+`?)`?" + // to ignore "|" + - "(?i)(?:/\\*\\s*([a-zA-Z0-9@_-]*)\\s*\\*/)?\\s+`?([\\w-]+\\.[\\w-]+\\.[\\w-]+)`?\\s*(?:AS)?") + "(?i)(?:/\\*\\s*([a-zA-Z0-9@_-]*)\\s*\\*/)?\\s+`?(`?[\\w-]+`?\\.`?[\\w-]+`?\\.`?[\\w-]+`?)`?\\s*(?:AS)?") singleLineCommentsPattern = regexp.MustCompile(`(--.*)`) multiLineCommentsPattern = regexp.MustCompile(`(((/\*)+?[\w\W]*?(\*/)+))`) @@ -72,7 +72,7 @@ func ParseTopLevelUpstreamsFromQuery(query string) []string { continue } - tableName := match[tableIdx] + tableName := cleanTableFromTickQuote(match[tableIdx]) if clause == "with" { pseudoTable[tableName] = true } else { @@ -105,3 +105,7 @@ func cleanQueryFromComment(query string) string { return cleanedQuery } + +func cleanTableFromTickQuote(tableName string) string { + return strings.ReplaceAll(tableName, "`", "") +} diff --git a/plugin/upstream_identifier/parser/query_parser_test.go b/plugin/upstream_identifier/parser/query_parser_test.go index 69ad1546ab..8851d40a07 100644 --- a/plugin/upstream_identifier/parser/query_parser_test.go +++ b/plugin/upstream_identifier/parser/query_parser_test.go @@ -240,6 +240,21 @@ func TestParseTopLevelUpstreamsFromQuery(t *testing.T) { newTable("data-engineering", "testing", "tableB*"), }, }, + { + Name: "have quote in one part of table name", + InputQuery: ` + select * + from ` + + "data-engineering.`testing`.tableB" + ` + + select * + from + /*@ignoreupstream*/ data-engineering.testing.tableC + `, + ExpectedTables: []string{ + newTable("data-engineering", "testing", "tableB"), + }, + }, { Name: "ignore characters after -- comment", InputQuery: `