diff --git a/.github/workflows/verify.yml b/.github/workflows/verify.yml index 032f971336..29f89ffee2 100644 --- a/.github/workflows/verify.yml +++ b/.github/workflows/verify.yml @@ -16,10 +16,10 @@ jobs: with: fetch-depth: 0 - name: golangci-lint - uses: golangci/golangci-lint-action@v3 + uses: golangci/golangci-lint-action@v6 with: skip-go-installation: true - version: v1.55.2 + version: v1.61.0 unit-test: runs-on: ubuntu-latest steps: diff --git a/.golangci.yml b/.golangci.yml index 87d750d2a3..fbaef16be0 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -16,8 +16,6 @@ linters: - durationcheck - errname - errorlint - - execinquery - - exportloopref - forbidigo - gci - ginkgolinter @@ -29,7 +27,6 @@ linters: - gofumpt - goheader - goimports - - gomnd - gomodguard - goprintffuncname - gosec @@ -42,6 +39,7 @@ linters: - makezero - mirror - misspell + - mnd - nakedret - nilerr - nilnil @@ -98,7 +96,7 @@ linters-settings: goconst: ignore-tests: true numbers: true - gomnd: + mnd: ignored-numbers: # Why we have a big range of file permissions - '0o600' - '0o644' @@ -115,6 +113,7 @@ linters-settings: gosec: excludes: - G101 + - G115 gosimple: checks: - "all" @@ -153,7 +152,7 @@ issues: - path: tests linters: - gocritic - - gomnd + - mnd - unparam - testpackage severity: diff --git a/Makefile b/Makefile index 715a24c84b..18b95d2353 100644 --- a/Makefile +++ b/Makefile @@ -58,7 +58,7 @@ lint: install: ## install required dependencies @echo "> installing dependencies" - go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.55.2 + go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.61.0 go install google.golang.org/protobuf/cmd/protoc-gen-go@v1.28.0 go install github.com/bufbuild/buf/cmd/buf@v1.5.0 go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@v1.2.0 diff --git a/client/cmd/extension/run.go b/client/cmd/extension/run.go index b3afdd726c..73f889e7e5 100644 --- a/client/cmd/extension/run.go +++ b/client/cmd/extension/run.go @@ -17,7 +17,7 @@ func generateRunCommands(reservedCommandNames []string) []*cobra.Command { Short: fmt.Sprintf("Execute [%s/%s@%s] extension", owner.Name, project.Name, project.ActiveTagName, ), - RunE: func(cmd *cobra.Command, args []string) error { + RunE: func(_ *cobra.Command, args []string) error { manager, err := getExtensionManager(true, reservedCommandNames...) if err != nil { return err diff --git a/client/cmd/job/inspect.go b/client/cmd/job/inspect.go index e08cf8beea..61ca34198e 100644 --- a/client/cmd/job/inspect.go +++ b/client/cmd/job/inspect.go @@ -264,7 +264,7 @@ func (e *inspectCommand) displayBasicInfoSection(basicInfoSection *pb.JobInspect e.printLogs(basicInfoSection.Notice) } -func (e *inspectCommand) processJobInspectResponse(resp *pb.JobInspectResponse) error { +func (e *inspectCommand) processJobInspectResponse(resp *pb.JobInspectResponse) error { //nolint:unparam e.displayBasicInfoSection(resp.BasicInfo) e.displayUpstreamSection(resp.Upstreams) e.displayDownstreamSection(resp.Downstreams) diff --git a/client/cmd/playground/window/v1v2/model.go b/client/cmd/playground/window/v1v2/model.go index 2aa828d6ec..5e6bf9fb7d 100644 --- a/client/cmd/playground/window/v1v2/model.go +++ b/client/cmd/playground/window/v1v2/model.go @@ -93,7 +93,7 @@ func (m *model) generateWindowResultView() string { table := tablewriter.NewWriter(buff) table.SetHeader([]string{"Version", "Start Time", "End Time"}) table.Append(m.generateWindowTableRowView(1)) - table.Append(m.generateWindowTableRowView(2)) //nolint: gomnd + table.Append(m.generateWindowTableRowView(2)) //nolint: mnd table.Render() return buff.String() } diff --git a/client/cmd/replay/cancel.go b/client/cmd/replay/cancel.go index 332fa60197..1f5b7ebaa8 100644 --- a/client/cmd/replay/cancel.go +++ b/client/cmd/replay/cancel.go @@ -37,7 +37,7 @@ func CancelCommand() *cobra.Command { Short: "Cancel replay using replay ID", Long: "This operation takes 1 argument, replayID [required] \nwhich UUID format ", Example: "optimus replay cancel ", - Args: func(cmd *cobra.Command, args []string) error { + Args: func(_ *cobra.Command, args []string) error { if len(args) < 1 { return errors.New("replayID is required") } diff --git a/client/cmd/replay/create.go b/client/cmd/replay/create.go index a7889cfbcd..074531cbe5 100644 --- a/client/cmd/replay/create.go +++ b/client/cmd/replay/create.go @@ -59,11 +59,11 @@ func CreateCommand() *cobra.Command { "second is start time[required] of\nreplay, third is end time[optional] of replay. \nDate ranges are inclusive. " + "Supported date formats are RFC3339 and \nsimple date YYYY-MM-DD", Example: "optimus replay create <2023-01-01T02:30:00Z00:00> [2023-01-02T02:30:00Z00:00]\noptimus replay create <2023-01-01> [2023-01-02]", - Args: func(cmd *cobra.Command, args []string) error { + Args: func(_ *cobra.Command, args []string) error { if len(args) < 1 { return errors.New("job name is required") } - if len(args) < 2 { //nolint: gomnd + if len(args) < 2 { //nolint: mnd return errors.New("replay start time is required") } return nil @@ -117,7 +117,7 @@ func (r *createCommand) RunE(_ *cobra.Command, args []string) error { jobName := args[0] startTime := args[1] endTime := args[1] - if len(args) >= 3 { //nolint: gomnd + if len(args) >= 3 { //nolint: mnd endTime = args[2] } diff --git a/client/cmd/replay/status.go b/client/cmd/replay/status.go index 4c864ebcdf..574380982a 100644 --- a/client/cmd/replay/status.go +++ b/client/cmd/replay/status.go @@ -39,7 +39,7 @@ func StatusCommand() *cobra.Command { Short: "Get replay detailed status by replay ID", Long: "This operation takes 1 argument, replayID [required] \nwhich UUID format ", Example: "optimus replay status ", - Args: func(cmd *cobra.Command, args []string) error { + Args: func(_ *cobra.Command, args []string) error { if len(args) < 1 { return errors.New("replayID is required") } diff --git a/client/cmd/secret/secret.go b/client/cmd/secret/secret.go index deee24cdeb..57df76101f 100644 --- a/client/cmd/secret/secret.go +++ b/client/cmd/secret/secret.go @@ -46,7 +46,7 @@ func getSecretName(args []string) (string, error) { func getSecretValue(args []string, filePath string, encoded bool) (string, error) { var secretValue string if filePath == "" { - if len(args) < 2 { //nolint: gomnd + if len(args) < 2 { //nolint: mnd return "", errors.New("secret value is required") } secretValue = args[1] diff --git a/client/extension/internal/install_test.go b/client/extension/internal/install_test.go index c6e3cfcb7b..8b3b95e0e4 100644 --- a/client/extension/internal/install_test.go +++ b/client/extension/internal/install_test.go @@ -47,7 +47,7 @@ func (i *InstallManagerTestSuite) TestInstall() { i.Run("should return error if error encountered during extracting remote metadata", func() { factory.ParseRegistry = []model.Parser{ - func(remotePath string) (*model.Metadata, error) { + func(_ string) (*model.Metadata, error) { return nil, errors.New("extraction failed") }, } @@ -70,7 +70,7 @@ func (i *InstallManagerTestSuite) TestInstall() { i.Run("should return error if no parser could recognize remote path", func() { factory.ParseRegistry = []model.Parser{ - func(remotePath string) (*model.Metadata, error) { + func(_ string) (*model.Metadata, error) { return nil, model.ErrUnrecognizedRemotePath }, } @@ -93,7 +93,7 @@ func (i *InstallManagerTestSuite) TestInstall() { i.Run("should return error if error loading manifest", func() { factory.ParseRegistry = []model.Parser{ - func(remotePath string) (*model.Metadata, error) { + func(_ string) (*model.Metadata, error) { return &model.Metadata{}, nil }, } @@ -122,7 +122,7 @@ func (i *InstallManagerTestSuite) TestInstall() { ProviderName: provider, } factory.ParseRegistry = []model.Parser{ - func(remotePath string) (*model.Metadata, error) { + func(_ string) (*model.Metadata, error) { return metadata, nil }, } @@ -153,7 +153,7 @@ func (i *InstallManagerTestSuite) TestInstall() { ProviderName: provider, } factory.ParseRegistry = []model.Parser{ - func(remotePath string) (*model.Metadata, error) { + func(_ string) (*model.Metadata, error) { return metadata, nil }, } @@ -190,7 +190,7 @@ func (i *InstallManagerTestSuite) TestInstall() { ProjectName: "optimus-extension-valor", } factory.ParseRegistry = []model.Parser{ - func(remotePath string) (*model.Metadata, error) { + func(_ string) (*model.Metadata, error) { return metadata, nil }, } @@ -248,7 +248,7 @@ func (i *InstallManagerTestSuite) TestInstall() { TagName: "", } factory.ParseRegistry = []model.Parser{ - func(remotePath string) (*model.Metadata, error) { + func(_ string) (*model.Metadata, error) { return metadata, nil }, } @@ -305,7 +305,7 @@ func (i *InstallManagerTestSuite) TestInstall() { TagName: "", } factory.ParseRegistry = []model.Parser{ - func(remotePath string) (*model.Metadata, error) { + func(_ string) (*model.Metadata, error) { return metadata, nil }, } @@ -359,7 +359,7 @@ func (i *InstallManagerTestSuite) TestInstall() { ProviderName: provider, } factory.ParseRegistry = []model.Parser{ - func(remotePath string) (*model.Metadata, error) { + func(_ string) (*model.Metadata, error) { return metadata, nil }, } @@ -398,7 +398,7 @@ func (i *InstallManagerTestSuite) TestInstall() { ProviderName: provider, } factory.ParseRegistry = []model.Parser{ - func(remotePath string) (*model.Metadata, error) { + func(_ string) (*model.Metadata, error) { return metadata, nil }, } @@ -439,7 +439,7 @@ func (i *InstallManagerTestSuite) TestInstall() { ProviderName: provider, } factory.ParseRegistry = []model.Parser{ - func(remotePath string) (*model.Metadata, error) { + func(_ string) (*model.Metadata, error) { return metadata, nil }, } @@ -481,7 +481,7 @@ func (i *InstallManagerTestSuite) TestInstall() { ProviderName: provider, } factory.ParseRegistry = []model.Parser{ - func(remotePath string) (*model.Metadata, error) { + func(_ string) (*model.Metadata, error) { return metadata, nil }, } @@ -525,7 +525,7 @@ func (i *InstallManagerTestSuite) TestInstall() { ProviderName: provider, } factory.ParseRegistry = []model.Parser{ - func(remotePath string) (*model.Metadata, error) { + func(_ string) (*model.Metadata, error) { return metadata, nil }, } diff --git a/client/extension/manager_test.go b/client/extension/manager_test.go index 37049ee67f..b40fd7b29e 100644 --- a/client/extension/manager_test.go +++ b/client/extension/manager_test.go @@ -43,7 +43,7 @@ func (m *ManagerTestSuite) TestInstall() { ProviderName: provider, } factory.ParseRegistry = []model.Parser{ - func(remotePath string) (*model.Metadata, error) { + func(_ string) (*model.Metadata, error) { return metadata, nil }, } diff --git a/client/extension/provider/github/client_test.go b/client/extension/provider/github/client_test.go index 7d87af4f2c..e2833272ed 100644 --- a/client/extension/provider/github/client_test.go +++ b/client/extension/provider/github/client_test.go @@ -51,7 +51,7 @@ func (c *ClientTestSuite) TestDownloadRelease() { c.Run("should return nil and error if response status is not ok", func() { testPath := "/gojek/optimus-extension-valor" - handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + handler := http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { w.Header().Add("Content-Type", "application/json") w.WriteHeader(http.StatusNotFound) }) @@ -69,7 +69,7 @@ func (c *ClientTestSuite) TestDownloadRelease() { testPath := "/gojek/optimus-extension-valor" message := "invalid-content" - handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + handler := http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { content := []byte(message) w.Header().Add("Content-Type", "application/json") @@ -90,7 +90,7 @@ func (c *ClientTestSuite) TestDownloadRelease() { testPath := "/gojek/optimus-extension-valor" release := &github.Release{} - handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + handler := http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { content, _ := json.Marshal(release) w.Header().Add("Content-Type", "application/json") @@ -129,7 +129,7 @@ func (c *ClientTestSuite) TestDownloadAsset() { server := httptest.NewServer(router) defer server.Close() - router.HandleFunc(testReleasePath, func(w http.ResponseWriter, r *http.Request) { + router.HandleFunc(testReleasePath, func(w http.ResponseWriter, _ *http.Request) { content := []byte(message) w.Header().Add("Content-Type", "application/json") @@ -152,7 +152,7 @@ func (c *ClientTestSuite) TestDownloadAsset() { defer server.Close() releaseAPIPath := server.URL + testReleasePath - router.HandleFunc(testReleasePath, func(w http.ResponseWriter, r *http.Request) { + router.HandleFunc(testReleasePath, func(w http.ResponseWriter, _ *http.Request) { content, _ := json.Marshal(release) w.Header().Add("Content-Type", "application/json") @@ -182,7 +182,7 @@ func (c *ClientTestSuite) TestDownloadAsset() { defer server.Close() releaseAPIPath := server.URL + testReleasePath - router.HandleFunc(testReleasePath, func(w http.ResponseWriter, r *http.Request) { + router.HandleFunc(testReleasePath, func(w http.ResponseWriter, _ *http.Request) { content, _ := json.Marshal(release) w.Header().Add("Content-Type", "application/json") @@ -212,7 +212,7 @@ func (c *ClientTestSuite) TestDownloadAsset() { defer server.Close() releaseAPIPath := server.URL + testReleasePath - router.HandleFunc(testReleasePath, func(w http.ResponseWriter, r *http.Request) { + router.HandleFunc(testReleasePath, func(w http.ResponseWriter, _ *http.Request) { content, _ := json.Marshal(release) w.Header().Add("Content-Type", "application/json") @@ -222,7 +222,7 @@ func (c *ClientTestSuite) TestDownloadAsset() { testAssetPath := release.Assets[0].BrowserDownloadURL assetAPIPath := server.URL + testAssetPath release.Assets[0].BrowserDownloadURL = assetAPIPath - router.HandleFunc(testAssetPath, func(w http.ResponseWriter, r *http.Request) { + router.HandleFunc(testAssetPath, func(w http.ResponseWriter, _ *http.Request) { w.Header().Add("Content-Type", "application/json") w.WriteHeader(http.StatusNotFound) }) @@ -250,7 +250,7 @@ func (c *ClientTestSuite) TestDownloadAsset() { defer server.Close() releaseAPIPath := server.URL + testReleasePath - router.HandleFunc(testReleasePath, func(w http.ResponseWriter, r *http.Request) { + router.HandleFunc(testReleasePath, func(w http.ResponseWriter, _ *http.Request) { content, _ := json.Marshal(release) w.Header().Add("Content-Type", "application/json") @@ -260,7 +260,7 @@ func (c *ClientTestSuite) TestDownloadAsset() { testAssetPath := release.Assets[0].BrowserDownloadURL assetAPIPath := server.URL + testAssetPath release.Assets[0].BrowserDownloadURL = assetAPIPath - router.HandleFunc(testAssetPath, func(w http.ResponseWriter, r *http.Request) { + router.HandleFunc(testAssetPath, func(w http.ResponseWriter, _ *http.Request) { content := []byte(assetPayload) w.Header().Add("Content-Type", "application/json") diff --git a/core/resource/resource.go b/core/resource/resource.go index 840711a1b2..01a8844910 100644 --- a/core/resource/resource.go +++ b/core/resource/resource.go @@ -78,6 +78,7 @@ func NameFrom(name string) (Name, error) { return Name(cleaned), nil } +// Deprecated: Sections is deprecated. move to warehouse func (n Name) Sections() []string { return strings.Split(n.String(), nameSectionSeparator) } diff --git a/core/resource/store.go b/core/resource/store.go index 671bb35f10..bda784a566 100644 --- a/core/resource/store.go +++ b/core/resource/store.go @@ -3,7 +3,8 @@ package resource import "github.com/goto/optimus/internal/errors" const ( - Bigquery Store = "bigquery" + Bigquery Store = "bigquery" + MaxCompute Store = "maxcompute" ) // Store represents the type of datasource, resource corresponds to @@ -17,6 +18,8 @@ func FromStringToStore(name string) (Store, error) { switch name { case string(Bigquery): return Bigquery, nil + case string(MaxCompute): + return MaxCompute, nil default: return "", errors.InvalidArgument(EntityResource, "unknown store "+name) } diff --git a/core/resource/store_test.go b/core/resource/store_test.go index b87e8fc97f..0ca8791aea 100644 --- a/core/resource/store_test.go +++ b/core/resource/store_test.go @@ -19,4 +19,9 @@ func TestDataStore(t *testing.T) { assert.Nil(t, err) assert.Equal(t, "bigquery", bq.String()) }) + t.Run("converts a string to store when correct", func(t *testing.T) { + mc, err := resource.FromStringToStore("maxcompute") + assert.Nil(t, err) + assert.Equal(t, "maxcompute", mc.String()) + }) } diff --git a/core/scheduler/handler/v1beta1/job_run.go b/core/scheduler/handler/v1beta1/job_run.go index 03bf4f9330..9705910cd1 100644 --- a/core/scheduler/handler/v1beta1/job_run.go +++ b/core/scheduler/handler/v1beta1/job_run.go @@ -195,7 +195,7 @@ func (h JobRunHandler) UploadToScheduler(_ context.Context, req *pb.UploadToSche h.l.Error("error adapting project name [%s]: %s", req.GetProjectName(), err) return nil, errors.GRPCErr(err, "unable to get projectName") } - go func() { + go func() { //nolint: contextcheck err = h.service.UploadToScheduler(context.Background(), projectName) if err != nil { h.l.Error("Finished upload to scheduler with error: %s", err) diff --git a/ext/notify/pagerduty/pagerdutyservice.go b/ext/notify/pagerduty/pagerdutyservice.go index 7c9bfa8ba5..6d801563a9 100644 --- a/ext/notify/pagerduty/pagerdutyservice.go +++ b/ext/notify/pagerduty/pagerdutyservice.go @@ -63,7 +63,6 @@ func (*PagerDutyServiceImpl) SendAlert(ctx context.Context, evt Event) error { Payload: &payload, } _, err = pagerduty.ManageEventWithContext(ctx, e) - if err != nil { return err } diff --git a/ext/notify/slack/slack_test.go b/ext/notify/slack/slack_test.go index 159a488679..960a549d83 100644 --- a/ext/notify/slack/slack_test.go +++ b/ext/notify/slack/slack_test.go @@ -55,7 +55,7 @@ func TestSlack(t *testing.T) { t.Run("should send message to user using email address successfully", func(t *testing.T) { muxRouter := http.NewServeMux() server := httptest.NewServer(muxRouter) - muxRouter.HandleFunc("/users.lookupByEmail", func(rw http.ResponseWriter, r *http.Request) { + muxRouter.HandleFunc("/users.lookupByEmail", func(rw http.ResponseWriter, _ *http.Request) { rw.Header().Set("Content-Type", "application/json") response, _ := json.Marshal(struct { Ok bool `json:"ok"` @@ -66,7 +66,7 @@ func TestSlack(t *testing.T) { }) rw.Write(response) }) - muxRouter.HandleFunc("/chat.postMessage", func(rw http.ResponseWriter, r *http.Request) { + muxRouter.HandleFunc("/chat.postMessage", func(rw http.ResponseWriter, _ *http.Request) { rw.Header().Set("Content-Type", "application/json") response, _ := json.Marshal(struct { SlackResponse api.SlackResponse @@ -108,7 +108,7 @@ func TestSlack(t *testing.T) { t.Run("should send message to user groups successfully", func(t *testing.T) { muxRouter := http.NewServeMux() server := httptest.NewServer(muxRouter) - muxRouter.HandleFunc("/usergroups.list", func(rw http.ResponseWriter, r *http.Request) { + muxRouter.HandleFunc("/usergroups.list", func(rw http.ResponseWriter, _ *http.Request) { rw.Header().Set("Content-Type", "application/json") response, _ := json.Marshal(struct { Ok bool `json:"ok"` @@ -121,7 +121,7 @@ func TestSlack(t *testing.T) { }) rw.Write(response) }) - muxRouter.HandleFunc("/usergroups.users.list", func(rw http.ResponseWriter, r *http.Request) { + muxRouter.HandleFunc("/usergroups.users.list", func(rw http.ResponseWriter, _ *http.Request) { rw.Header().Set("Content-Type", "application/json") response, _ := json.Marshal(struct { Ok bool `json:"ok"` @@ -132,7 +132,7 @@ func TestSlack(t *testing.T) { }) rw.Write(response) }) - muxRouter.HandleFunc("/chat.postMessage", func(rw http.ResponseWriter, r *http.Request) { + muxRouter.HandleFunc("/chat.postMessage", func(rw http.ResponseWriter, _ *http.Request) { rw.Header().Set("Content-Type", "application/json") response, _ := json.Marshal(struct { SlackResponse api.SlackResponse diff --git a/ext/notify/webhook/webhook_test.go b/ext/notify/webhook/webhook_test.go index 4f0fb31f1e..45461897cc 100644 --- a/ext/notify/webhook/webhook_test.go +++ b/ext/notify/webhook/webhook_test.go @@ -30,7 +30,7 @@ func TestWebhook(t *testing.T) { muxRouter := http.NewServeMux() server := httptest.NewServer(muxRouter) defer server.Close() - muxRouter.HandleFunc("/users/webhook_end_point", func(rw http.ResponseWriter, r *http.Request) { + muxRouter.HandleFunc("/users/webhook_end_point", func(rw http.ResponseWriter, _ *http.Request) { rw.Header().Set("Content-Type", "application/json") response, _ := json.Marshal(struct { Ok bool `json:"ok"` @@ -85,7 +85,7 @@ func TestWebhook(t *testing.T) { muxRouter := http.NewServeMux() server := httptest.NewServer(muxRouter) defer server.Close() - muxRouter.HandleFunc("/users/webhook_end_point", func(rw http.ResponseWriter, r *http.Request) { + muxRouter.HandleFunc("/users/webhook_end_point", func(rw http.ResponseWriter, _ *http.Request) { rw.Header().Set("Content-Type", "application/json") response, _ := json.Marshal(struct { Ok bool `json:"ok"` diff --git a/ext/resourcemanager/optimus_test.go b/ext/resourcemanager/optimus_test.go index e1b29cd125..84f37e2714 100644 --- a/ext/resourcemanager/optimus_test.go +++ b/ext/resourcemanager/optimus_test.go @@ -82,7 +82,7 @@ func (o *OptimusResourceManager) TestGetJobSpecifications() { panic(err) } - router.HandleFunc(apiPath, func(w http.ResponseWriter, r *http.Request) { + router.HandleFunc(apiPath, func(w http.ResponseWriter, _ *http.Request) { w.Header().Add("Content-Type", "application/json") w.WriteHeader(http.StatusNotFound) }) @@ -111,7 +111,7 @@ func (o *OptimusResourceManager) TestGetJobSpecifications() { panic(err) } - router.HandleFunc(apiPath, func(w http.ResponseWriter, r *http.Request) { + router.HandleFunc(apiPath, func(w http.ResponseWriter, _ *http.Request) { content := []byte("invalid-content") w.Header().Add("Content-Type", "application/json") diff --git a/ext/scheduler/airflow/client.go b/ext/scheduler/airflow/client.go index 9b8b4a6d07..3bc89f3344 100644 --- a/ext/scheduler/airflow/client.go +++ b/ext/scheduler/airflow/client.go @@ -53,7 +53,7 @@ type DagRunRequest struct { OrderBy string `json:"order_by"` PageOffset int `json:"page_offset"` PageLimit int `json:"page_limit"` - DagIds []string `json:"dag_ids"` + DagIds []string `json:"dag_ids"` // nolint: revive ExecutionDateGte string `json:"execution_date_gte,omitempty"` ExecutionDateLte string `json:"execution_date_lte,omitempty"` } diff --git a/ext/scheduler/airflow/dag/template.go b/ext/scheduler/airflow/dag/template.go index 5b1f684a9e..5c6f38fe67 100644 --- a/ext/scheduler/airflow/dag/template.go +++ b/ext/scheduler/airflow/dag/template.go @@ -41,7 +41,7 @@ type templates map[string]*template.Template func NewTemplates() (templates, error) { templates := map[string]*template.Template{} re := regexp.MustCompile(`dag\.(\d.\d)\.py\.tmpl`) - err := fs.WalkDir(templateFS, ".", func(path string, d fs.DirEntry, err error) error { + err := fs.WalkDir(templateFS, ".", func(path string, d fs.DirEntry, _ error) error { if d.IsDir() { return nil } diff --git a/ext/store/maxcompute/client.go b/ext/store/maxcompute/client.go new file mode 100644 index 0000000000..6085cfcf18 --- /dev/null +++ b/ext/store/maxcompute/client.go @@ -0,0 +1,63 @@ +package maxcompute + +import ( + "encoding/json" + + "github.com/aliyun/aliyun-odps-go-sdk/odps" + "github.com/aliyun/aliyun-odps-go-sdk/odps/account" + + "github.com/goto/optimus/internal/errors" +) + +type MaxComputeClientProvider struct{} + +func NewClientProvider() *MaxComputeClientProvider { + return &MaxComputeClientProvider{} +} + +func (MaxComputeClientProvider) Get(account string) (Client, error) { + return NewClient(account) +} + +type MaxComputeClient struct { + *odps.Odps +} + +type maxComputeCredentials struct { + AccessID string `json:"access_id"` + AccessKey string `json:"access_key"` + Endpoint string `json:"endpoint"` + ProjectName string `json:"project_name"` +} + +func NewClient(svcAccount string) (*MaxComputeClient, error) { + cred, err := collectMaxComputeCredential([]byte(svcAccount)) + if err != nil { + return nil, errors.InternalError(store, "failed to read account", err) + } + + aliAccount := account.NewAliyunAccount(cred.AccessID, cred.AccessKey) + odpsIns := odps.NewOdps(aliAccount, cred.Endpoint) + odpsIns.SetDefaultProjectName(cred.ProjectName) + + return &MaxComputeClient{odpsIns}, nil +} + +func (c *MaxComputeClient) TableHandleFrom() TableResourceHandle { + t := c.Tables() + return NewTableHandle(c, &t) +} + +func (c *MaxComputeClient) ViewHandleFrom() TableResourceHandle { + t := c.Tables() + return NewViewHandle(c, &t) +} + +func collectMaxComputeCredential(jsonData []byte) (*maxComputeCredentials, error) { + var creds maxComputeCredentials + if err := json.Unmarshal(jsonData, &creds); err != nil { + return nil, err + } + + return &creds, nil +} diff --git a/ext/store/maxcompute/client_test.go b/ext/store/maxcompute/client_test.go new file mode 100644 index 0000000000..65b9fd2e7f --- /dev/null +++ b/ext/store/maxcompute/client_test.go @@ -0,0 +1,74 @@ +package maxcompute_test + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/goto/optimus/ext/store/maxcompute" +) + +func TestMaxComputeClient(t *testing.T) { + testCredJSON := ` +{ + "access_id": "LNRJ5tH1XMSINW5J3TjYAvfX", + "access_key": "lAZBJhdkNbwVj3bej5BuhjwbdV0nSp", + "endpoint": "http://service.ap-southeast-5.maxcompute.aliyun.com/api", + "project_name": "proj" +} +` + + t.Run("NewClient", func(t *testing.T) { + t.Run("returns error when invalid creds", func(t *testing.T) { + _, err := maxcompute.NewClient("") + assert.NotNil(t, err) + assert.ErrorContains(t, err, "failed to read account") + }) + t.Run("returns success when create new client on valid creds", func(t *testing.T) { + client, err := maxcompute.NewClient(testCredJSON) + assert.Nil(t, err) + assert.NotNil(t, client) + }) + }) + t.Run("TableHandleFrom", func(t *testing.T) { + t.Run("returns success when init the table handle", func(t *testing.T) { + client, err := maxcompute.NewClient(testCredJSON) + assert.Nil(t, err) + + tableHandle := client.TableHandleFrom() + assert.NotNil(t, tableHandle) + }) + }) + t.Run("ViewHandleFrom", func(t *testing.T) { + t.Run("returns success when init the view handle", func(t *testing.T) { + client, err := maxcompute.NewClient(testCredJSON) + assert.Nil(t, err) + + viewHandle := client.ViewHandleFrom() + assert.NotNil(t, viewHandle) + }) + }) +} + +func TestClientProvider(t *testing.T) { + clientProvider := maxcompute.NewClientProvider() + testCredJSON := ` +{ + "access_id": "LNRJ5tH1XMSINW5J3TjYAvfX", + "access_key": "lAZBJhdkNbwVj3bej5BuhjwbdV0nSp", + "endpoint": "http://service.ap-southeast-5.maxcompute.aliyun.com/api", + "project_name": "proj" +} +` + + t.Run("return error when client provider cannot create new client", func(t *testing.T) { + _, err := clientProvider.Get("") + assert.NotNil(t, err) + assert.ErrorContains(t, err, "failed to read account") + }) + t.Run("return success when client provider creates new client with json", func(t *testing.T) { + client, err := clientProvider.Get(testCredJSON) + assert.Nil(t, err) + assert.NotNil(t, client) + }) +} diff --git a/ext/store/maxcompute/maxcompute.go b/ext/store/maxcompute/maxcompute.go new file mode 100644 index 0000000000..67d892bf40 --- /dev/null +++ b/ext/store/maxcompute/maxcompute.go @@ -0,0 +1,197 @@ +package maxcompute + +import ( + "context" + "fmt" + + "go.opentelemetry.io/otel" + "go.opentelemetry.io/otel/trace" + + "github.com/goto/optimus/core/resource" + "github.com/goto/optimus/core/tenant" + "github.com/goto/optimus/internal/errors" +) + +const ( + accountKey = "DATASTORE_MAXCOMPUTE" + store = "MaxComputeStore" + + maxcomputeID = "maxcompute" + + TableNameSections = 3 +) + +type ResourceHandle interface { + Create(res *resource.Resource) error + Update(res *resource.Resource) error + Exists(tableName string) bool +} + +type TableResourceHandle interface { + ResourceHandle +} + +type Client interface { + TableHandleFrom() TableResourceHandle + ViewHandleFrom() TableResourceHandle +} + +type ClientProvider interface { + Get(account string) (Client, error) +} + +type SecretProvider interface { + GetSecret(ctx context.Context, tnnt tenant.Tenant, key string) (*tenant.PlainTextSecret, error) +} + +type MaxCompute struct { + secretProvider SecretProvider + clientProvider ClientProvider +} + +func (m MaxCompute) Create(ctx context.Context, res *resource.Resource) error { + spanCtx, span := startChildSpan(ctx, "maxcompute/CreateResource") + defer span.End() + + account, err := m.secretProvider.GetSecret(spanCtx, res.Tenant(), accountKey) + if err != nil { + return err + } + + odpsClient, err := m.clientProvider.Get(account.Value()) + if err != nil { + return err + } + + switch res.Kind() { + case KindTable: + handle := odpsClient.TableHandleFrom() + return handle.Create(res) + + case KindView: + handle := odpsClient.ViewHandleFrom() + return handle.Create(res) + + default: + return errors.InvalidArgument(store, "invalid kind for maxcompute resource "+res.Kind()) + } +} + +func (m MaxCompute) Update(ctx context.Context, resource *resource.Resource) error { + spanCtx, span := startChildSpan(ctx, "maxcompute/UpdateResource") + defer span.End() + + account, err := m.secretProvider.GetSecret(spanCtx, resource.Tenant(), accountKey) + if err != nil { + return err + } + + odpsClient, err := m.clientProvider.Get(account.Value()) + if err != nil { + return err + } + + switch resource.Kind() { + case KindTable: + handle := odpsClient.TableHandleFrom() + return handle.Update(resource) + + case KindView: + handle := odpsClient.ViewHandleFrom() + return handle.Update(resource) + + default: + return errors.InvalidArgument(store, "invalid kind for maxcompute resource "+resource.Kind()) + } +} + +func (MaxCompute) BatchUpdate(_ context.Context, _ []*resource.Resource) error { + return errors.InternalError(resourceSchema, "support for BatchUpdate is not present", nil) +} + +func (MaxCompute) Validate(r *resource.Resource) error { + switch r.Kind() { + case KindTable: + table, err := ConvertSpecTo[Table](r) + if err != nil { + return err + } + table.Name = r.Name() + return table.Validate() + + case KindView: + view, err := ConvertSpecTo[View](r) + if err != nil { + return err + } + view.Name = r.Name() + return view.Validate() + + default: + return errors.InvalidArgument(resource.EntityResource, "unknown kind") + } +} + +func (MaxCompute) GetURN(res *resource.Resource) (resource.URN, error) { + return URNFor(res) +} + +func (MaxCompute) Backup(_ context.Context, _ *resource.Backup, _ []*resource.Resource) (*resource.BackupResult, error) { + return nil, errors.InternalError(resourceSchema, "support for Backup is not present", nil) +} + +func (m MaxCompute) Exist(ctx context.Context, tnnt tenant.Tenant, urn resource.URN) (bool, error) { + spanCtx, span := startChildSpan(ctx, "maxcompute/Exist") + defer span.End() + + if urn.GetStore() != maxcomputeID { + msg := fmt.Sprintf("expected store [%s] but received [%s]", maxcomputeID, urn.GetStore()) + return false, errors.InvalidArgument(store, msg) + } + + account, err := m.secretProvider.GetSecret(spanCtx, tnnt, accountKey) + if err != nil { + return false, err + } + + client, err := m.clientProvider.Get(account.Value()) + if err != nil { + return false, err + } + + name, err := resource.NameFrom(urn.GetName()) + if err != nil { + return false, err + } + + kindToHandleFn := map[string]func() TableResourceHandle{ + KindTable: client.TableHandleFrom, + KindView: client.ViewHandleFrom, + } + + for _, resourceHandleFn := range kindToHandleFn { + resourceName, err := resourceNameFor(name) + if err != nil { + return true, err + } + + if resourceHandleFn().Exists(resourceName) { + return true, nil + } + } + + return false, nil +} + +func startChildSpan(ctx context.Context, name string) (context.Context, trace.Span) { + tracer := otel.Tracer("datastore/maxcompute") + + return tracer.Start(ctx, name) +} + +func NewMaxComputeDataStore(secretProvider SecretProvider, clientProvider ClientProvider) *MaxCompute { + return &MaxCompute{ + secretProvider: secretProvider, + clientProvider: clientProvider, + } +} diff --git a/ext/store/maxcompute/maxcompute_test.go b/ext/store/maxcompute/maxcompute_test.go new file mode 100644 index 0000000000..570e84c336 --- /dev/null +++ b/ext/store/maxcompute/maxcompute_test.go @@ -0,0 +1,550 @@ +package maxcompute_test + +import ( + "context" + "errors" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + + "github.com/goto/optimus/core/resource" + "github.com/goto/optimus/core/tenant" + "github.com/goto/optimus/ext/store/maxcompute" +) + +func TestMaxComputeStore(t *testing.T) { + ctx := context.Background() + tableName := "test_table" + tnnt, _ := tenant.NewTenant("proj", "ns") + pts, _ := tenant.NewPlainTextSecret("secret_name", "secret_value") + store := resource.MaxCompute + metadata := resource.Metadata{ + Version: 1, + Description: "resource description", + Labels: map[string]string{"owner": "optimus"}, + } + spec := map[string]any{"description": "resource"} + + t.Run("Create", func(t *testing.T) { + t.Run("returns error when secret is not provided", func(t *testing.T) { + secretProvider := new(mockSecretProvider) + secretProvider.On("GetSecret", mock.Anything, tnnt, "DATASTORE_MAXCOMPUTE"). + Return(nil, errors.New("not found secret")) + defer secretProvider.AssertExpectations(t) + + clientProvider := new(mockClientProvider) + mcStore := maxcompute.NewMaxComputeDataStore(secretProvider, clientProvider) + + res, err := resource.NewResource(tableName, maxcompute.KindTable, store, tnnt, &metadata, spec) + assert.Nil(t, err) + + err = mcStore.Create(ctx, res) + assert.NotNil(t, err) + assert.EqualError(t, err, "not found secret") + }) + t.Run("returns error when not able to get client", func(t *testing.T) { + secretProvider := new(mockSecretProvider) + secretProvider.On("GetSecret", mock.Anything, tnnt, "DATASTORE_MAXCOMPUTE"). + Return(pts, nil) + defer secretProvider.AssertExpectations(t) + + clientProvider := new(mockClientProvider) + clientProvider.On("Get", "secret_value").Return(nil, errors.New("error in client")) + defer clientProvider.AssertExpectations(t) + mcStore := maxcompute.NewMaxComputeDataStore(secretProvider, clientProvider) + + res, err := resource.NewResource(tableName, maxcompute.KindTable, store, tnnt, &metadata, spec) + assert.Nil(t, err) + + err = mcStore.Create(ctx, res) + assert.NotNil(t, err) + assert.EqualError(t, err, "error in client") + }) + t.Run("returns error when kind is invalid", func(t *testing.T) { + secretProvider := new(mockSecretProvider) + secretProvider.On("GetSecret", mock.Anything, tnnt, "DATASTORE_MAXCOMPUTE"). + Return(pts, nil) + defer secretProvider.AssertExpectations(t) + + client := new(mockClient) + defer client.AssertExpectations(t) + + clientProvider := new(mockClientProvider) + clientProvider.On("Get", "secret_value").Return(client, nil) + defer clientProvider.AssertExpectations(t) + + mcStore := maxcompute.NewMaxComputeDataStore(secretProvider, clientProvider) + + res, err := resource.NewResource(tableName, "unknown", store, tnnt, &metadata, spec) + assert.Nil(t, err) + + err = mcStore.Create(ctx, res) + assert.NotNil(t, err) + assert.ErrorContains(t, err, "invalid kind for maxcompute resource unknown") + }) + t.Run("return success when calls appropriate handler for table", func(t *testing.T) { + secretProvider := new(mockSecretProvider) + secretProvider.On("GetSecret", mock.Anything, tnnt, "DATASTORE_MAXCOMPUTE"). + Return(pts, nil) + defer secretProvider.AssertExpectations(t) + + res, err := resource.NewResource(tableName, maxcompute.KindTable, store, tnnt, &metadata, spec) + assert.Nil(t, err) + + tableHandle := new(mockTableResourceHandle) + tableHandle.On("Create", res).Return(nil) + defer tableHandle.AssertExpectations(t) + + client := new(mockClient) + client.On("TableHandleFrom").Return(tableHandle) + defer client.AssertExpectations(t) + + clientProvider := new(mockClientProvider) + clientProvider.On("Get", "secret_value").Return(client, nil) + defer clientProvider.AssertExpectations(t) + + mcStore := maxcompute.NewMaxComputeDataStore(secretProvider, clientProvider) + err = mcStore.Create(ctx, res) + assert.Nil(t, err) + }) + t.Run("return success when calls appropriate handler for view", func(t *testing.T) { + secretProvider := new(mockSecretProvider) + secretProvider.On("GetSecret", mock.Anything, tnnt, "DATASTORE_MAXCOMPUTE"). + Return(pts, nil) + defer secretProvider.AssertExpectations(t) + + res, err := resource.NewResource(tableName, maxcompute.KindView, store, tnnt, &metadata, spec) + assert.Nil(t, err) + + tableHandle := new(mockTableResourceHandle) + tableHandle.On("Create", res).Return(nil) + defer tableHandle.AssertExpectations(t) + + client := new(mockClient) + client.On("ViewHandleFrom").Return(tableHandle) + defer client.AssertExpectations(t) + + clientProvider := new(mockClientProvider) + clientProvider.On("Get", "secret_value").Return(client, nil) + defer clientProvider.AssertExpectations(t) + + mcStore := maxcompute.NewMaxComputeDataStore(secretProvider, clientProvider) + err = mcStore.Create(ctx, res) + assert.Nil(t, err) + }) + }) + t.Run("Update", func(t *testing.T) { + t.Run("returns error when secret is not provided", func(t *testing.T) { + secretProvider := new(mockSecretProvider) + secretProvider.On("GetSecret", mock.Anything, tnnt, "DATASTORE_MAXCOMPUTE"). + Return(nil, errors.New("not found secret")) + defer secretProvider.AssertExpectations(t) + + clientProvider := new(mockClientProvider) + mcStore := maxcompute.NewMaxComputeDataStore(secretProvider, clientProvider) + + res, err := resource.NewResource(tableName, maxcompute.KindTable, store, tnnt, &metadata, spec) + assert.Nil(t, err) + + err = mcStore.Update(ctx, res) + assert.NotNil(t, err) + assert.EqualError(t, err, "not found secret") + }) + t.Run("returns error when not able to get client", func(t *testing.T) { + secretProvider := new(mockSecretProvider) + secretProvider.On("GetSecret", mock.Anything, tnnt, "DATASTORE_MAXCOMPUTE"). + Return(pts, nil) + defer secretProvider.AssertExpectations(t) + + clientProvider := new(mockClientProvider) + clientProvider.On("Get", "secret_value").Return(nil, errors.New("error in client")) + defer clientProvider.AssertExpectations(t) + mcStore := maxcompute.NewMaxComputeDataStore(secretProvider, clientProvider) + + res, err := resource.NewResource(tableName, maxcompute.KindTable, store, tnnt, &metadata, spec) + assert.Nil(t, err) + + err = mcStore.Update(ctx, res) + assert.NotNil(t, err) + assert.EqualError(t, err, "error in client") + }) + t.Run("returns error when kind is invalid", func(t *testing.T) { + secretProvider := new(mockSecretProvider) + secretProvider.On("GetSecret", mock.Anything, tnnt, "DATASTORE_MAXCOMPUTE"). + Return(pts, nil) + defer secretProvider.AssertExpectations(t) + + client := new(mockClient) + defer client.AssertExpectations(t) + + clientProvider := new(mockClientProvider) + clientProvider.On("Get", "secret_value").Return(client, nil) + defer clientProvider.AssertExpectations(t) + + mcStore := maxcompute.NewMaxComputeDataStore(secretProvider, clientProvider) + + res, err := resource.NewResource(tableName, "unknown", store, tnnt, &metadata, spec) + assert.Nil(t, err) + + err = mcStore.Update(ctx, res) + assert.NotNil(t, err) + assert.ErrorContains(t, err, "invalid kind for maxcompute resource unknown") + }) + t.Run("return success when calls appropriate handler for table", func(t *testing.T) { + secretProvider := new(mockSecretProvider) + secretProvider.On("GetSecret", mock.Anything, tnnt, "DATASTORE_MAXCOMPUTE"). + Return(pts, nil) + defer secretProvider.AssertExpectations(t) + + res, err := resource.NewResource(tableName, maxcompute.KindTable, store, tnnt, &metadata, spec) + assert.Nil(t, err) + + tableHandle := new(mockTableResourceHandle) + tableHandle.On("Update", res).Return(nil) + defer tableHandle.AssertExpectations(t) + + client := new(mockClient) + client.On("TableHandleFrom").Return(tableHandle) + defer client.AssertExpectations(t) + + clientProvider := new(mockClientProvider) + clientProvider.On("Get", "secret_value").Return(client, nil) + defer clientProvider.AssertExpectations(t) + + mcStore := maxcompute.NewMaxComputeDataStore(secretProvider, clientProvider) + err = mcStore.Update(ctx, res) + assert.Nil(t, err) + }) + t.Run("return success when calls appropriate handler for view", func(t *testing.T) { + secretProvider := new(mockSecretProvider) + secretProvider.On("GetSecret", mock.Anything, tnnt, "DATASTORE_MAXCOMPUTE"). + Return(pts, nil) + defer secretProvider.AssertExpectations(t) + + res, err := resource.NewResource(tableName, maxcompute.KindView, store, tnnt, &metadata, spec) + assert.Nil(t, err) + + tableHandle := new(mockTableResourceHandle) + tableHandle.On("Update", res).Return(nil) + defer tableHandle.AssertExpectations(t) + + client := new(mockClient) + client.On("ViewHandleFrom").Return(tableHandle) + defer client.AssertExpectations(t) + + clientProvider := new(mockClientProvider) + clientProvider.On("Get", "secret_value").Return(client, nil) + defer clientProvider.AssertExpectations(t) + + mcStore := maxcompute.NewMaxComputeDataStore(secretProvider, clientProvider) + err = mcStore.Update(ctx, res) + assert.Nil(t, err) + }) + }) + t.Run("Validate", func(t *testing.T) { + invalidSpec := map[string]any{ + "description": map[string]any{"some": "desc"}, + } + specWithoutValues := map[string]any{"a": "b"} + t.Run("returns error when resource kind is invalid", func(t *testing.T) { + res, err := resource.NewResource(tableName, "unknown", store, tnnt, &metadata, invalidSpec) + assert.Nil(t, err) + + mcStore := maxcompute.NewMaxComputeDataStore(nil, nil) + err = mcStore.Validate(res) + assert.NotNil(t, err) + assert.ErrorContains(t, err, "unknown kind") + }) + t.Run("for table", func(t *testing.T) { + t.Run("returns error when cannot decode table", func(t *testing.T) { + res, err := resource.NewResource(tableName, maxcompute.KindTable, store, tnnt, &metadata, invalidSpec) + assert.Nil(t, err) + assert.Equal(t, tableName, res.FullName()) + + mcStore := maxcompute.NewMaxComputeDataStore(nil, nil) + err = mcStore.Validate(res) + assert.NotNil(t, err) + assert.ErrorContains(t, err, "not able to decode spec for "+tableName) + }) + t.Run("returns error when decode empty table schema", func(t *testing.T) { + res, err := resource.NewResource(tableName, maxcompute.KindTable, store, tnnt, &metadata, specWithoutValues) + assert.Nil(t, err) + assert.Equal(t, tableName, res.FullName()) + + mcStore := maxcompute.NewMaxComputeDataStore(nil, nil) + err = mcStore.Validate(res) + assert.NotNil(t, err) + assert.ErrorContains(t, err, "empty schema for table "+tableName) + }) + }) + t.Run("for view", func(t *testing.T) { + t.Run("returns error when cannot decode view", func(t *testing.T) { + res, err := resource.NewResource(tableName, maxcompute.KindView, store, tnnt, &metadata, invalidSpec) + assert.Nil(t, err) + assert.Equal(t, tableName, res.FullName()) + + mcStore := maxcompute.NewMaxComputeDataStore(nil, nil) + err = mcStore.Validate(res) + assert.NotNil(t, err) + assert.ErrorContains(t, err, "not able to decode spec for "+tableName) + }) + t.Run("returns error when decode empty view schema", func(t *testing.T) { + res, err := resource.NewResource(tableName, maxcompute.KindView, store, tnnt, &metadata, specWithoutValues) + assert.Nil(t, err) + assert.Equal(t, tableName, res.FullName()) + + mcStore := maxcompute.NewMaxComputeDataStore(nil, nil) + err = mcStore.Validate(res) + assert.NotNil(t, err) + assert.ErrorContains(t, err, "view query is empty for "+tableName) + }) + }) + }) + t.Run("GetURN", func(t *testing.T) { + spec := map[string]any{ + "description": "resource", + "project": "proj", + "database": "schema", + "name": tableName, + } + t.Run("returns urn for resource", func(t *testing.T) { + expectedURN, err := resource.ParseURN("maxcompute://proj.schema." + tableName) + assert.NoError(t, err) + + res, err := resource.NewResource("proj.schema."+tableName, maxcompute.KindTable, store, tnnt, &metadata, spec) + assert.NoError(t, err) + + mcStore := maxcompute.NewMaxComputeDataStore(nil, nil) + actualURN, err := mcStore.GetURN(res) + assert.NoError(t, err) + assert.Equal(t, expectedURN, actualURN) + }) + }) + t.Run("Exist", func(t *testing.T) { + t.Run("returns false and error when store is not maxcompute", func(t *testing.T) { + secretProvider := new(mockSecretProvider) + defer secretProvider.AssertExpectations(t) + + clientProvider := new(mockClientProvider) + defer clientProvider.AssertExpectations(t) + + mcStore := maxcompute.NewMaxComputeDataStore(secretProvider, clientProvider) + + urn, err := resource.NewURN("random_store", "project.table") + assert.NoError(t, err) + + actualExist, actualError := mcStore.Exist(ctx, tnnt, urn) + assert.False(t, actualExist) + assert.ErrorContains(t, actualError, "expected store [maxcompute] but received [random_store]") + }) + t.Run("returns false and error when secret is not provided", func(t *testing.T) { + secretProvider := new(mockSecretProvider) + secretProvider.On("GetSecret", mock.Anything, tnnt, "DATASTORE_MAXCOMPUTE"). + Return(nil, errors.New("not found secret")) + defer secretProvider.AssertExpectations(t) + + clientProvider := new(mockClientProvider) + defer clientProvider.AssertExpectations(t) + + mcStore := maxcompute.NewMaxComputeDataStore(secretProvider, clientProvider) + + urn, err := resource.NewURN("maxcompute", "project.table") + assert.NoError(t, err) + + actualExist, actualError := mcStore.Exist(ctx, tnnt, urn) + assert.False(t, actualExist) + assert.ErrorContains(t, actualError, "not found secret") + }) + t.Run("returns false and error when not able to get client", func(t *testing.T) { + secretProvider := new(mockSecretProvider) + secretProvider.On("GetSecret", mock.Anything, tnnt, "DATASTORE_MAXCOMPUTE").Return(pts, nil) + defer secretProvider.AssertExpectations(t) + + clientProvider := new(mockClientProvider) + clientProvider.On("Get", "secret_value").Return(nil, errors.New("error in client")) + defer clientProvider.AssertExpectations(t) + + mcStore := maxcompute.NewMaxComputeDataStore(secretProvider, clientProvider) + + urn, err := resource.NewURN("maxcompute", "project.table") + assert.NoError(t, err) + + actualExist, actualError := mcStore.Exist(ctx, tnnt, urn) + assert.False(t, actualExist) + assert.ErrorContains(t, actualError, "error in client") + }) + t.Run("returns true and error when resource name is invalid", func(t *testing.T) { + secretProvider := new(mockSecretProvider) + secretProvider.On("GetSecret", mock.Anything, tnnt, "DATASTORE_MAXCOMPUTE").Return(pts, nil) + defer secretProvider.AssertExpectations(t) + + client := new(mockClient) + defer client.AssertExpectations(t) + + clientProvider := new(mockClientProvider) + clientProvider.On("Get", pts.Value()).Return(client, nil) + defer clientProvider.AssertExpectations(t) + + tableHandle := new(mockTableResourceHandle) + defer tableHandle.AssertExpectations(t) + + mcStore := maxcompute.NewMaxComputeDataStore(secretProvider, clientProvider) + + urn, err := resource.NewURN("maxcompute", "table") + assert.NoError(t, err) + + actualExist, actualError := mcStore.Exist(ctx, tnnt, urn) + assert.True(t, actualExist) + assert.ErrorContains(t, actualError, "invalid resource name: table") + }) + t.Run("returns true and error when resource name is empty", func(t *testing.T) { + secretProvider := new(mockSecretProvider) + secretProvider.On("GetSecret", mock.Anything, tnnt, "DATASTORE_MAXCOMPUTE").Return(pts, nil) + defer secretProvider.AssertExpectations(t) + + client := new(mockClient) + defer client.AssertExpectations(t) + + clientProvider := new(mockClientProvider) + clientProvider.On("Get", pts.Value()).Return(client, nil) + defer clientProvider.AssertExpectations(t) + + tableHandle := new(mockTableResourceHandle) + defer tableHandle.AssertExpectations(t) + + mcStore := maxcompute.NewMaxComputeDataStore(secretProvider, clientProvider) + + urn, err := resource.NewURN("maxcompute", "project.") + assert.NoError(t, err) + + actualExist, actualError := mcStore.Exist(ctx, tnnt, urn) + assert.True(t, actualExist) + assert.ErrorContains(t, actualError, "invalid resource name: project.") + }) + t.Run("returns true and nil when schema table resource does exist", func(t *testing.T) { + secretProvider := new(mockSecretProvider) + secretProvider.On("GetSecret", mock.Anything, tnnt, "DATASTORE_MAXCOMPUTE").Return(pts, nil) + defer secretProvider.AssertExpectations(t) + + client := new(mockClient) + defer client.AssertExpectations(t) + + clientProvider := new(mockClientProvider) + clientProvider.On("Get", pts.Value()).Return(client, nil) + defer clientProvider.AssertExpectations(t) + + tableHandle := new(mockTableResourceHandle) + viewHandle := new(mockTableResourceHandle) + defer func() { + tableHandle.AssertExpectations(t) + viewHandle.AssertExpectations(t) + }() + + mcStore := maxcompute.NewMaxComputeDataStore(secretProvider, clientProvider) + + urn, err := resource.NewURN("maxcompute", "project.schema.table") + assert.NoError(t, err) + + client.On("TableHandleFrom").Return(tableHandle).Maybe() + tableHandle.On("Exists", mock.Anything).Return(false).Maybe() + client.On("ViewHandleFrom").Return(viewHandle) + viewHandle.On("Exists", mock.Anything).Return(true) + + actualExist, actualError := mcStore.Exist(ctx, tnnt, urn) + assert.True(t, actualExist) + assert.NoError(t, actualError) + }) + t.Run("returns false and nil when schema table resource does not exist", func(t *testing.T) { + secretProvider := new(mockSecretProvider) + secretProvider.On("GetSecret", mock.Anything, tnnt, "DATASTORE_MAXCOMPUTE").Return(pts, nil) + defer secretProvider.AssertExpectations(t) + + client := new(mockClient) + defer client.AssertExpectations(t) + + clientProvider := new(mockClientProvider) + clientProvider.On("Get", pts.Value()).Return(client, nil) + defer clientProvider.AssertExpectations(t) + + tableHandle := new(mockTableResourceHandle) + viewHandle := new(mockTableResourceHandle) + defer func() { + tableHandle.AssertExpectations(t) + viewHandle.AssertExpectations(t) + }() + + mcStore := maxcompute.NewMaxComputeDataStore(secretProvider, clientProvider) + + urn, err := resource.NewURN("maxcompute", "project.schema.table") + assert.NoError(t, err) + + client.On("TableHandleFrom").Return(tableHandle).Maybe() + tableHandle.On("Exists", mock.Anything).Return(false).Maybe() + client.On("ViewHandleFrom").Return(viewHandle).Maybe() + viewHandle.On("Exists", mock.Anything).Return(false).Maybe() + + actualExist, actualError := mcStore.Exist(ctx, tnnt, urn) + assert.False(t, actualExist) + assert.NoError(t, actualError) + }) + }) +} + +type mockTableResourceHandle struct { + mock.Mock +} + +func (m *mockTableResourceHandle) Create(res *resource.Resource) error { + args := m.Called(res) + return args.Error(0) +} + +func (m *mockTableResourceHandle) Update(res *resource.Resource) error { + args := m.Called(res) + return args.Error(0) +} + +func (m *mockTableResourceHandle) Exists(tableName string) bool { + args := m.Called(tableName) + return args.Get(0).(bool) +} + +type mockClient struct { + mock.Mock +} + +func (m *mockClient) TableHandleFrom() maxcompute.TableResourceHandle { + args := m.Called() + return args.Get(0).(maxcompute.TableResourceHandle) +} + +func (m *mockClient) ViewHandleFrom() maxcompute.TableResourceHandle { + args := m.Called() + return args.Get(0).(maxcompute.TableResourceHandle) +} + +type mockClientProvider struct { + mock.Mock +} + +func (m *mockClientProvider) Get(account string) (maxcompute.Client, error) { + args := m.Called(account) + if args.Get(0) != nil { + return args.Get(0).(maxcompute.Client), args.Error(1) + } + return nil, args.Error(1) +} + +type mockSecretProvider struct { + mock.Mock +} + +func (s *mockSecretProvider) GetSecret(ctx context.Context, ten tenant.Tenant, name string) (*tenant.PlainTextSecret, error) { + args := s.Called(ctx, ten, name) + var pts *tenant.PlainTextSecret + if args.Get(0) != nil { + pts = args.Get(0).(*tenant.PlainTextSecret) + } + return pts, args.Error(1) +} diff --git a/ext/store/maxcompute/resource_urn.go b/ext/store/maxcompute/resource_urn.go new file mode 100644 index 0000000000..7ded6b8bad --- /dev/null +++ b/ext/store/maxcompute/resource_urn.go @@ -0,0 +1,140 @@ +package maxcompute + +import ( + "fmt" + "strings" + + "github.com/mitchellh/mapstructure" + + "github.com/goto/optimus/core/resource" + "github.com/goto/optimus/internal/errors" +) + +type ResourceURN struct { + Project string `mapstructure:"project"` + Schema string `mapstructure:"database"` + Name string `mapstructure:"name"` +} + +func NewResourceURN(project, schema, name string) (ResourceURN, error) { + me := errors.NewMultiError("resource urn constructor errors") + if project == "" { + me.Append(fmt.Errorf("project is empty")) + } + if schema == "" { + me.Append(fmt.Errorf("schema is empty")) + } + if name == "" { + me.Append(fmt.Errorf("name is empty")) + } + + if len(me.Errors) > 0 { + return ResourceURN{}, me.ToErr() + } + + return ResourceURN{ + Project: project, + Schema: schema, + Name: name, + }, nil +} + +func (n ResourceURN) URN() string { + return "maxcompute://" + fmt.Sprintf("%s.%s.%s", n.Project, n.Schema, n.Name) +} + +type ResourceURNWithUpstreams struct { + ResourceURN ResourceURN + Upstreams []*ResourceURNWithUpstreams +} + +type ResourceURNWithUpstreamsList []*ResourceURNWithUpstreams + +func (rs ResourceURNWithUpstreamsList) FlattenUnique() []*ResourceURNWithUpstreams { + var output []*ResourceURNWithUpstreams + for _, r := range rs { + if r == nil { + continue + } + newResource := *r + newResource.Upstreams = nil + nested := ResourceURNWithUpstreamsList(r.Upstreams).FlattenUnique() + output = append(output, &newResource) + output = append(output, nested...) + } + + return ResourceURNWithUpstreamsList(output).unique() +} + +func (rs ResourceURNWithUpstreamsList) unique() ResourceURNWithUpstreamsList { + mapUnique := map[ResourceURN]*ResourceURNWithUpstreams{} + for _, r := range rs { + mapUnique[r.ResourceURN] = r + } + + output := make([]*ResourceURNWithUpstreams, len(mapUnique)) + i := 0 + for _, u := range mapUnique { + output[i] = u + i++ + } + return output +} + +type ProjectSchema struct { + Project string + Schema string +} + +type ResourceURNs []ResourceURN + +func (n ResourceURNs) GroupByProjectschema() map[ProjectSchema][]string { + output := make(map[ProjectSchema][]string) + + for _, resourceURN := range n { + pd := ProjectSchema{Project: resourceURN.Project, Schema: resourceURN.Schema} + if _, ok := output[pd]; !ok { + output[pd] = []string{} + } + output[pd] = append(output[pd], resourceURN.Name) + } + + return output +} + +func URNFor(res *resource.Resource) (resource.URN, error) { + spec, err := getURNComponent(res) + if err != nil { + return resource.ZeroURN(), errors.InvalidArgument(resource.EntityResource, "not able to decode spec") + } + + name := spec.Project + "." + spec.Schema + "." + spec.Name + + return resource.NewURN(resource.MaxCompute.String(), name) +} + +func getURNComponent(res *resource.Resource) (ResourceURN, error) { + var spec ResourceURN + if err := mapstructure.Decode(res.Spec(), &spec); err != nil { + return spec, err + } + + return spec, nil +} + +func getComponentName(res *resource.Resource) (resource.Name, error) { + component, err := getURNComponent(res) + if err != nil { + return "", err + } + return resource.Name(component.Name), nil +} + +func resourceNameFor(name resource.Name) (string, error) { + parts := strings.Split(name.String(), ".") + if len(parts) < TableNameSections { + return "", errors.InvalidArgument(resource.EntityResource, "invalid resource name: "+name.String()) + } + + return parts[2], nil +} diff --git a/ext/store/maxcompute/schema.go b/ext/store/maxcompute/schema.go new file mode 100644 index 0000000000..a69949a0e2 --- /dev/null +++ b/ext/store/maxcompute/schema.go @@ -0,0 +1,287 @@ +package maxcompute + +import ( + "fmt" + "strings" + + "github.com/aliyun/aliyun-odps-go-sdk/odps/datatype" + "github.com/aliyun/aliyun-odps-go-sdk/odps/tableschema" + "github.com/mitchellh/mapstructure" + + "github.com/goto/optimus/core/resource" + "github.com/goto/optimus/internal/errors" +) + +const ( + resourceSchema = "maxcompute_schema" + + KindTable string = "table" + KindView string = "view" + KindSchema string = "schema" +) + +type Schema []*Field + +func (s Schema) Validate() error { + for _, f := range s { + err := f.Validate() + if err != nil { + return err + } + } + return nil +} + +func (s Schema) ToMaxComputeColumns(partitionColumn map[string]struct{}, clusterColumn *Cluster, schemaBuilder *tableschema.SchemaBuilder) error { + mu := errors.NewMultiError("converting to max compute column") + + clusterColumnAllowed := map[string]struct{}{} + for _, f := range s { + column, err := f.ToColumn() + if err != nil { + mu.Append(err) + continue + } + + if _, ok := partitionColumn[f.Name]; ok { + schemaBuilder.PartitionColumn(column) + } else { + schemaBuilder.Column(column) + clusterColumnAllowed[column.Name] = struct{}{} + } + } + + if clusterColumn != nil && len(clusterColumn.Using) != 0 { + if clusterColumn.Type == "" { + clusterColumn.Type = tableschema.CLUSTER_TYPE.Hash + } + schemaBuilder.ClusterType(clusterColumn.Type) + + if clusterColumn.Type == tableschema.CLUSTER_TYPE.Hash { + if clusterColumn.Buckets == 0 { + mu.Append(errors.InvalidArgument(resourceSchema, "number of cluster buckets is needed for hash type clustering")) + return mu.ToErr() + } + schemaBuilder.ClusterBucketNum(clusterColumn.Buckets) + } + + sortClusterAllowed := map[string]struct{}{} + for _, column := range clusterColumn.Using { + if _, ok := clusterColumnAllowed[column]; !ok { + mu.Append(errors.InvalidArgument(resourceSchema, fmt.Sprintf("cluster column %s not found in normal column", column))) + return mu.ToErr() + } + sortClusterAllowed[column] = struct{}{} + } + schemaBuilder.ClusterColumns(clusterColumn.Using) + + if len(clusterColumn.SortBy) != 0 { + var sortClusterColumn []tableschema.SortColumn + for _, sortColumn := range clusterColumn.SortBy { + if _, ok := sortClusterAllowed[sortColumn.Name]; !ok { + mu.Append(errors.InvalidArgument(resourceSchema, fmt.Sprintf("sort column %s not found in cluster column", sortColumn.Name))) + return mu.ToErr() + } + sortClusterColumn = append(sortClusterColumn, tableschema.SortColumn{Name: sortColumn.Name, Order: tableschema.SortOrder(sortColumn.Order)}) + } + schemaBuilder.ClusterSortColumns(sortClusterColumn) + } + } + + return mu.ToErr() +} + +type MapSchema struct { + Key Field `mapstructure:"key"` + Value Field `mapstructure:"value"` +} + +func (m *MapSchema) Validate() error { + mu := errors.NewMultiError("map schema validation") + mu.Append(m.Key.validateNode(false)) + mu.Append(m.Value.validateNode(false)) + return mu.ToErr() +} + +type Field struct { + Name string `mapstructure:"name,omitempty"` + Type string `mapstructure:"type,omitempty"` + Description string `mapstructure:"description,omitempty"` + + // First label should be the primary label and others as extended + Labels []string `mapstructure:"labels,omitempty"` + + DefaultValue string `mapstructure:"default_value,omitempty"` + Required bool `mapstructure:"required,omitempty"` + + Decimal *Decimal `mapstructure:"decimal,omitempty"` + Char *Char `mapstructure:"char,omitempty"` + VarChar *VarChar `mapstructure:"varchar,omitempty"` + StructSchema []Field `mapstructure:"struct,omitempty"` + ArraySchema *Field `mapstructure:"array,omitempty"` + MapSchema *MapSchema `mapstructure:"map,omitempty"` +} + +func (f *Field) Validate() error { + return f.validateNode(true) +} + +func (f *Field) toMaxDataType() (datatype.DataType, error) { + typeCode := datatype.TypeCodeFromStr(strings.ToUpper(f.Type)) + + switch typeCode { + case datatype.ARRAY: + d2, err := f.ArraySchema.toMaxDataType() + if err != nil { + return nil, err + } + return datatype.NewArrayType(d2), nil + + case datatype.STRUCT: + fields := make([]datatype.StructFieldType, len(f.StructSchema)) + for i, f1 := range f.StructSchema { + d1, err := f1.toMaxDataType() + if err != nil { + return nil, err + } + fields[i] = datatype.StructFieldType{ + Name: f1.Name, + Type: d1, + } + } + return datatype.NewStructType(fields...), nil + + case datatype.CHAR: + return datatype.NewCharType(f.Char.Length), nil + + case datatype.VARCHAR: + return datatype.NewVarcharType(f.VarChar.Length), nil + + case datatype.DECIMAL: + return datatype.NewDecimalType(f.Decimal.Precision, f.Decimal.Scale), nil + + case datatype.JSON: + return datatype.NewJsonType(), nil + + case datatype.MAP: + keyType, err := f.MapSchema.Key.toMaxDataType() + if err != nil { + return nil, err + } + valueType, err := f.MapSchema.Value.toMaxDataType() + if err != nil { + return nil, err + } + return datatype.NewMapType(keyType, valueType), nil + + case datatype.TypeUnknown: + return nil, errors.InvalidArgument(resourceSchema, "unknown data type: "+f.Type) + default: + return datatype.NewPrimitiveType(typeCode), nil + } +} + +func (f *Field) ToColumn() (tableschema.Column, error) { + dataType, err := f.toMaxDataType() + if err != nil { + return tableschema.Column{}, err + } + + c1 := tableschema.Column{ + Name: f.Name, + Type: dataType, + Comment: f.Description, + ExtendedLabels: nil, + IsNullable: true, + } + + if f.Required { + c1.IsNullable = false + } + + if f.DefaultValue != "" { + c1.HasDefaultValue = true + c1.DefaultValue = f.DefaultValue + } + + if len(f.Labels) > 0 { + c1.Label = f.Labels[0] + c1.ExtendedLabels = f.Labels[1:] + } + + return c1, nil +} + +func (f *Field) validateNode(checkName bool) error { + mu := errors.NewMultiError("field validation") + if checkName && strings.TrimSpace(f.Name) == "" { + mu.Append(errors.InvalidArgument(resourceSchema, "field name is empty")) + } + + typeCode := datatype.TypeCodeFromStr(strings.ToUpper(f.Type)) + if typeCode == datatype.TypeUnknown { + mu.Append(errors.InvalidArgument(resourceSchema, "unknown field type for "+f.Name)) + } + + switch typeCode { + case datatype.TypeUnknown: + mu.Append(errors.InvalidArgument(resourceSchema, "unknown data type: "+f.Type)) + + case datatype.DECIMAL: + if f.Decimal == nil { + mu.Append(errors.InvalidArgument(resourceSchema, "field decimal is empty")) + } else { + mu.Append(f.Decimal.Validate()) + } + + case datatype.CHAR: + if f.Char == nil { + mu.Append(errors.InvalidArgument(resourceSchema, "field char is empty")) + } else { + mu.Append(f.Char.Validate()) + } + + case datatype.VARCHAR: + if f.VarChar == nil { + mu.Append(errors.InvalidArgument(resourceSchema, "field varchar is empty")) + } else { + mu.Append(f.VarChar.Validate()) + } + + case datatype.STRUCT: + if f.StructSchema == nil { + mu.Append(errors.InvalidArgument(resourceSchema, "struct schema is empty")) + } else { + for _, rField := range f.StructSchema { + mu.Append(rField.validateNode(true)) + } + } + + case datatype.ARRAY: + if f.ArraySchema == nil { + mu.Append(errors.InvalidArgument(resourceSchema, "array schema is empty")) + } else { + mu.Append(f.ArraySchema.validateNode(false)) + } + + case datatype.MAP: + if f.MapSchema == nil { + mu.Append(errors.InvalidArgument(resourceSchema, "map schema is empty")) + } else { + mu.Append(f.MapSchema.Validate()) + } + default: + // other data types do not require special properties + } + + return mu.ToErr() +} + +func ConvertSpecTo[T Table | View](res *resource.Resource) (*T, error) { + var spec T + if err := mapstructure.Decode(res.Spec(), &spec); err != nil { + msg := fmt.Sprintf("%s: not able to decode spec for %s", err, res.FullName()) + return nil, errors.InvalidArgument(resource.EntityResource, msg) + } + return &spec, nil +} diff --git a/ext/store/maxcompute/schema_test.go b/ext/store/maxcompute/schema_test.go new file mode 100644 index 0000000000..1589384dd9 --- /dev/null +++ b/ext/store/maxcompute/schema_test.go @@ -0,0 +1,455 @@ +package maxcompute_test + +import ( + "fmt" + "testing" + + "github.com/aliyun/aliyun-odps-go-sdk/odps/tableschema" + "github.com/stretchr/testify/assert" + + "github.com/goto/optimus/ext/store/maxcompute" +) + +func TestSchemaValidate(t *testing.T) { + t.Run("returns error when schema field type is unknown", func(t *testing.T) { + schema := maxcompute.Schema{ + { + Name: "name", + Type: "unknown", + }, + } + + err := schema.Validate() + assert.NotNil(t, err) + assert.ErrorContains(t, err, "unknown field type") + }) + t.Run("return success when schema is valid", func(t *testing.T) { + schema := maxcompute.Schema{ + { + Name: "name", + Type: "string", + }, + { + Name: "age", + Type: "int", + }, + } + + err := schema.Validate() + assert.Nil(t, err) + }) +} + +func TestSchemaToMaxComputeColumn(t *testing.T) { + emptyPartitionColumnName := map[string]struct{}{} + t.Run("return error when schema column type is unknown", func(t *testing.T) { + schema := maxcompute.Schema{ + { + Name: "name", + Type: "unknown", + }, + } + + err := schema.ToMaxComputeColumns(emptyPartitionColumnName, nil, nil) + assert.NotNil(t, err) + assert.ErrorContains(t, err, "unknown data type") + }) + t.Run("return error when schema column array type is invalid", func(t *testing.T) { + schema := maxcompute.Schema{ + { + Name: "name", + Type: "array", + ArraySchema: &maxcompute.Field{ + Type: "unknown", + }, + }, + } + + err := schema.ToMaxComputeColumns(emptyPartitionColumnName, nil, nil) + assert.NotNil(t, err) + assert.ErrorContains(t, err, "unknown data type") + }) + t.Run("return error when schema column struct type is invalid", func(t *testing.T) { + schema := maxcompute.Schema{ + { + Name: "name", + Type: "struct", + StructSchema: []maxcompute.Field{ + { + Name: "test", + Type: "unknown", + }, + }, + }, + } + + err := schema.ToMaxComputeColumns(emptyPartitionColumnName, nil, nil) + assert.NotNil(t, err) + assert.ErrorContains(t, err, "unknown data type") + }) + t.Run("return error when schema column map type is invalid", func(t *testing.T) { + schema := maxcompute.Schema{ + { + Name: "name", + Type: "map", + MapSchema: &maxcompute.MapSchema{ + Key: maxcompute.Field{ + Name: "test_key", + Type: "string", + }, + Value: maxcompute.Field{ + Name: "test_key", + Type: "unknown", + }, + }, + }, + { + Name: "other", + Type: "map", + MapSchema: &maxcompute.MapSchema{ + Key: maxcompute.Field{ + Name: "test_key", + Type: "unknown", + }, + Value: maxcompute.Field{ + Name: "test_key", + Type: "string", + }, + }, + }, + } + + err := schema.ToMaxComputeColumns(emptyPartitionColumnName, nil, nil) + assert.NotNil(t, err) + assert.ErrorContains(t, err, "unknown data type") + }) + t.Run("return error when cluster column type is hash but not specify bucket", func(t *testing.T) { + builder := tableschema.NewSchemaBuilder() + schema := maxcompute.Schema{ + { + Name: "name", + Required: true, + DefaultValue: "test", + Type: "char", + Char: &maxcompute.Char{Length: 255}, + Labels: []string{"owner", "member"}, + }, + } + + clusterColumns := &maxcompute.Cluster{ + Using: []string{"name", "age"}, + SortBy: []maxcompute.SortColumn{{Name: "name", Order: "asc"}}, + } + + err := schema.ToMaxComputeColumns(emptyPartitionColumnName, clusterColumns, &builder) + assert.NotNil(t, err) + assert.ErrorContains(t, err, "number of cluster buckets is needed for hash type clustering") + }) + t.Run("return error when cluster column is not found in normal column", func(t *testing.T) { + builder := tableschema.NewSchemaBuilder() + schema := maxcompute.Schema{ + { + Name: "name", + Required: true, + DefaultValue: "test", + Type: "char", + Char: &maxcompute.Char{Length: 255}, + Labels: []string{"owner", "member"}, + }, + } + + invalidClusterColumn := "age" + clusterColumns := &maxcompute.Cluster{ + Using: []string{invalidClusterColumn}, + Buckets: 5, + } + + err := schema.ToMaxComputeColumns(emptyPartitionColumnName, clusterColumns, &builder) + assert.NotNil(t, err) + assert.ErrorContains(t, err, fmt.Sprintf("cluster column %s not found in normal column", invalidClusterColumn)) + }) + t.Run("return error when sort column is not found in cluster column", func(t *testing.T) { + builder := tableschema.NewSchemaBuilder() + schema := maxcompute.Schema{ + { + Name: "name", + Required: true, + DefaultValue: "test", + Type: "char", + Char: &maxcompute.Char{Length: 255}, + Labels: []string{"owner", "member"}, + }, + } + + invalidSortClusterColumn := "age" + clusterColumns := &maxcompute.Cluster{ + Using: []string{"name"}, + SortBy: []maxcompute.SortColumn{{Name: invalidSortClusterColumn, Order: "asc"}}, + Buckets: 5, + } + + err := schema.ToMaxComputeColumns(emptyPartitionColumnName, clusterColumns, &builder) + assert.NotNil(t, err) + assert.ErrorContains(t, err, fmt.Sprintf("sort column %s not found in cluster column", invalidSortClusterColumn)) + }) + t.Run("return success when schema column is valid", func(t *testing.T) { + builder := tableschema.NewSchemaBuilder() + schema := maxcompute.Schema{ + { + Name: "name", + Required: true, + DefaultValue: "test", + Type: "char", + Char: &maxcompute.Char{Length: 255}, + Labels: []string{"owner", "member"}, + }, + { + Name: "introduction", + Type: "varchar", + VarChar: &maxcompute.VarChar{Length: 300}, + }, + { + Name: "age", + Type: "int", + }, + { + Name: "weight", + Type: "decimal", + Decimal: &maxcompute.Decimal{Precision: 2, Scale: 1}, + }, + { + Name: "friends", + Type: "array", + ArraySchema: &maxcompute.Field{ + Type: "string", + }, + }, + { + Name: "address", + Type: "struct", + StructSchema: []maxcompute.Field{ + { + Name: "city", + Type: "string", + }, + { + Name: "zip", + Type: "string", + }, + }, + }, + { + Name: "other", + Type: "map", + MapSchema: &maxcompute.MapSchema{ + Key: maxcompute.Field{ + Type: "string", + }, + Value: maxcompute.Field{ + Type: "string", + }, + }, + }, + { + Name: "data", + Type: "json", + }, + } + partitionColumnName := map[string]struct{}{ + "data": {}, + } + + clusterColumns := &maxcompute.Cluster{ + Using: []string{"name", "age"}, + SortBy: []maxcompute.SortColumn{{Name: "name", Order: "asc"}}, + Buckets: 5, + } + + err := schema.ToMaxComputeColumns(partitionColumnName, clusterColumns, &builder) + assert.Nil(t, err) + }) +} + +func TestFieldValidate(t *testing.T) { + t.Run("when invalid", func(t *testing.T) { + t.Run("returns error when name is empty", func(t *testing.T) { + f := maxcompute.Field{ + Name: "", + Type: "string", + } + + err := f.Validate() + assert.NotNil(t, err) + assert.ErrorContains(t, err, "field name is empty") + }) + t.Run("returns error when type is unknown", func(t *testing.T) { + f := maxcompute.Field{ + Name: "name", + Type: "unknown", + } + + err := f.Validate() + assert.NotNil(t, err) + assert.ErrorContains(t, err, "unknown field type") + }) + t.Run("returns error when decimal type is empty", func(t *testing.T) { + f := maxcompute.Field{ + Name: "name", + Type: "decimal", + Decimal: nil, + } + + err := f.Validate() + assert.NotNil(t, err) + assert.ErrorContains(t, err, "field decimal is empty") + }) + t.Run("returns error when decimal type is invalid", func(t *testing.T) { + f := maxcompute.Field{ + Name: "name", + Type: "decimal", + Decimal: &maxcompute.Decimal{Precision: 10, Scale: 20}, + } + + err := f.Validate() + assert.NotNil(t, err) + assert.ErrorContains(t, err, "decimal scale[20] is not valid") + }) + t.Run("returns error when char type is empty", func(t *testing.T) { + f := maxcompute.Field{ + Name: "name", + Type: "char", + Char: nil, + } + + err := f.Validate() + assert.NotNil(t, err) + assert.ErrorContains(t, err, "field char is empty") + }) + t.Run("returns error when char type is invalid", func(t *testing.T) { + f := maxcompute.Field{ + Name: "name", + Type: "char", + Char: &maxcompute.Char{Length: 300}, + } + + err := f.Validate() + assert.NotNil(t, err) + assert.ErrorContains(t, err, "char length[300] is not valid") + }) + t.Run("returns error when varchar type is empty", func(t *testing.T) { + f := maxcompute.Field{ + Name: "name", + Type: "varchar", + VarChar: nil, + } + + err := f.Validate() + assert.NotNil(t, err) + assert.ErrorContains(t, err, "field varchar is empty") + }) + t.Run("returns error when varchar type is invalid", func(t *testing.T) { + f := maxcompute.Field{ + Name: "name", + Type: "varchar", + VarChar: &maxcompute.VarChar{Length: 65999}, + } + + err := f.Validate() + assert.NotNil(t, err) + assert.ErrorContains(t, err, "varchar length[65999] is not valid") + }) + t.Run("returns error when struct type is empty", func(t *testing.T) { + f := maxcompute.Field{ + Name: "name", + Type: "struct", + StructSchema: nil, + } + + err := f.Validate() + assert.NotNil(t, err) + assert.ErrorContains(t, err, "struct schema is empty") + }) + t.Run("returns error when struct type is invalid", func(t *testing.T) { + f := maxcompute.Field{ + Name: "collection", + Type: "struct", + StructSchema: []maxcompute.Field{ + { + Name: "store", + Type: "string", + }, + { + Name: "product", + Type: "unknown", + }, + }, + } + + err := f.Validate() + assert.NotNil(t, err) + assert.ErrorContains(t, err, "unknown field type") + }) + t.Run("returns error when array type is empty", func(t *testing.T) { + f := maxcompute.Field{ + Name: "name", + Type: "array", + ArraySchema: nil, + } + + err := f.Validate() + assert.NotNil(t, err) + assert.ErrorContains(t, err, "array schema is empty") + }) + t.Run("returns error when array type is invalid", func(t *testing.T) { + f := maxcompute.Field{ + Name: "names", + Type: "array", + ArraySchema: &maxcompute.Field{ + Type: "unknown", + }, + } + + err := f.Validate() + assert.NotNil(t, err) + assert.ErrorContains(t, err, "unknown field type") + }) + t.Run("returns error when map type is empty", func(t *testing.T) { + f := maxcompute.Field{ + Name: "name", + Type: "map", + MapSchema: nil, + } + + err := f.Validate() + assert.NotNil(t, err) + assert.ErrorContains(t, err, "map schema is empty") + }) + t.Run("returns error when map type is invalid", func(t *testing.T) { + f := maxcompute.Field{ + Name: "name", + Type: "map", + MapSchema: &maxcompute.MapSchema{ + Key: maxcompute.Field{ + Type: "string", + }, + Value: maxcompute.Field{ + Type: "unknown", + }, + }, + } + + err := f.Validate() + assert.NotNil(t, err) + assert.ErrorContains(t, err, "unknown field type") + }) + }) + t.Run("return success when field schema is valid", func(t *testing.T) { + f := maxcompute.Field{ + Name: "name", + Type: "string", + } + + err := f.Validate() + assert.Nil(t, err) + }) +} diff --git a/ext/store/maxcompute/table.go b/ext/store/maxcompute/table.go new file mode 100644 index 0000000000..6e860189fd --- /dev/null +++ b/ext/store/maxcompute/table.go @@ -0,0 +1,285 @@ +package maxcompute + +import ( + "fmt" + "strings" + + "github.com/aliyun/aliyun-odps-go-sdk/odps" + "github.com/aliyun/aliyun-odps-go-sdk/odps/datatype" + "github.com/aliyun/aliyun-odps-go-sdk/odps/tableschema" + + "github.com/goto/optimus/core/resource" + "github.com/goto/optimus/internal/errors" + "github.com/goto/optimus/internal/utils" +) + +type ColumnRecord struct { + columnStructure string + columnValue tableschema.Column +} + +type McSQLExecutor interface { + ExecSQlWithHints(sql string, hints map[string]string) (*odps.Instance, error) +} + +type McTable interface { + Create(schema tableschema.TableSchema, createIfNotExists bool, hints, alias map[string]string) error + BatchLoadTables(tableNames []string) ([]odps.Table, error) +} + +type TableHandle struct { + mcSQLExecutor McSQLExecutor + mcTable McTable +} + +func (t TableHandle) Create(res *resource.Resource) error { + table, err := ConvertSpecTo[Table](res) + if err != nil { + return err + } + name, err := getComponentName(res) + if err != nil { + return err + } + table.Name = name + + schema, err := buildTableSchema(table) + if err != nil { + return errors.AddErrContext(err, EntityTable, "failed to build table schema to create for "+res.FullName()) + } + + err = t.mcTable.Create(schema, false, table.Hints, nil) + if err != nil { + if strings.Contains(err.Error(), "Table or view already exists") { + return errors.AlreadyExists(EntityTable, "table already exists on maxcompute: "+res.FullName()) + } + return errors.InternalError(EntityTable, "error while creating table on maxcompute", err) + } + return nil +} + +func (t TableHandle) Update(res *resource.Resource) error { + tableName, err := getComponentName(res) + if err != nil { + return err + } + + existing, err := t.mcTable.BatchLoadTables([]string{tableName.String()}) + if err != nil { + return errors.InternalError(EntityTable, "error while get table on maxcompute", err) + } + + existingSchema, err := existing[0].GetSchema() + if err != nil { + return errors.AddErrContext(err, EntityTable, "failed to get old table schema to update for "+res.FullName()) + } + + table, err := ConvertSpecTo[Table](res) + if err != nil { + return err + } + + if table.Hints == nil { + table.Hints = make(map[string]string) + } + table.Hints["odps.sql.schema.evolution.json.enable"] = "true" + + schema, err := buildTableSchema(table) + if err != nil { + return errors.AddErrContext(err, EntityTable, "failed to build table schema to update for "+res.FullName()) + } + + sqlTasks, err := generateUpdateQuery(schema, *existingSchema) + if err != nil { + return errors.AddErrContext(err, EntityTable, "invalid schema for table "+res.FullName()) + } + + for _, task := range sqlTasks { + ins, err := t.mcSQLExecutor.ExecSQlWithHints(task, table.Hints) + if err != nil { + return errors.AddErrContext(err, EntityTable, "failed to create sql task to update for "+res.FullName()) + } + + err = ins.WaitForSuccess() + if err != nil { + return errors.InternalError(EntityTable, "error while execute sql query on maxcompute", err) + } + } + + return nil +} + +func (t TableHandle) Exists(tableName string) bool { + _, err := t.mcTable.BatchLoadTables([]string{tableName}) + return err == nil +} + +func buildTableSchema(t *Table) (tableschema.TableSchema, error) { + builder := tableschema.NewSchemaBuilder() + builder. + Name(t.Name.String()). + Comment(t.Description). + Lifecycle(t.Lifecycle) + + err := populateColumns(t, &builder) + if err != nil { + return tableschema.TableSchema{}, err + } + + return builder.Build(), nil +} + +func populateColumns(t *Table, schemaBuilder *tableschema.SchemaBuilder) error { + partitionColNames := map[string]struct{}{} + if t.Partition != nil { + partitionColNames = utils.ListToMap(t.Partition.Columns) + } + + return t.Schema.ToMaxComputeColumns(partitionColNames, t.Cluster, schemaBuilder) +} + +func generateUpdateQuery(incoming, existing tableschema.TableSchema) ([]string, error) { + var sqlTasks []string + if incoming.Comment != existing.Comment { + sqlTasks = append(sqlTasks, fmt.Sprintf("alter table %s set comment '%s';", existing.TableName, incoming.Comment)) + } + + if incoming.Lifecycle != existing.Lifecycle { + sqlTasks = append(sqlTasks, fmt.Sprintf("alter table %s set lifecycle %d;", existing.TableName, incoming.Lifecycle)) + } + + _, incomingFlattenSchema := flattenSchema(incoming, false) + existingFlattenSchema, _ := flattenSchema(existing, true) + + if err := getNormalColumnDifferences(existing.TableName, incomingFlattenSchema, existingFlattenSchema, &sqlTasks); err != nil { + return []string{}, err + } + + return sqlTasks, nil +} + +func flattenSchema(tableSchema tableschema.TableSchema, isExistingTable bool) (map[string]tableschema.Column, []ColumnRecord) { + columnCollection := make(map[string]tableschema.Column) + var columnList []ColumnRecord + for _, column := range tableSchema.Columns { + trackSchema("", column, columnCollection, &columnList, false, isExistingTable) + } + + return columnCollection, columnList +} + +func trackSchema(parent string, column tableschema.Column, columnCollection map[string]tableschema.Column, columnList *[]ColumnRecord, isArrayStructType, isExistingTable bool) { + if isStruct(column.Type) || isArrayStruct(column.Type) { + storeColumn(specifyColumnStructure(parent, column.Name, isArrayStructType), tableschema.Column{ + Name: column.Name, + Type: column.Type, + Comment: column.Comment, + IsNullable: true, + HasDefaultValue: false, + }, columnCollection, columnList, isExistingTable) + + var structData datatype.StructType + if isArrayStruct(column.Type) { + structData = column.Type.(datatype.ArrayType).ElementType.(datatype.StructType) + } else { + structData = column.Type.(datatype.StructType) + } + + for _, field := range structData.Fields { + trackSchema( + specifyColumnStructure(parent, column.Name, isArrayStructType), + tableschema.Column{ + Name: field.Name, + Type: field.Type, + IsNullable: true, + HasDefaultValue: false, + }, + columnCollection, + columnList, + isArrayStruct(column.Type), + isExistingTable, + ) + } + + return + } + + storeColumn(specifyColumnStructure(parent, column.Name, isArrayStructType), column, columnCollection, columnList, isExistingTable) +} + +func storeColumn(key string, column tableschema.Column, columnCollection map[string]tableschema.Column, columnList *[]ColumnRecord, isExistingTable bool) { + if isExistingTable { + columnCollection[key] = column + } else { + *columnList = append(*columnList, ColumnRecord{ + columnStructure: key, + columnValue: column, + }) + } +} + +func specifyColumnStructure(parent, columnName string, isArrayStruct bool) string { + if parent == "" { + return columnName + } + if isArrayStruct { + return fmt.Sprintf("%s.element.%s", parent, columnName) + } + return fmt.Sprintf("%s.%s", parent, columnName) +} + +func getNormalColumnDifferences(tableName string, incoming []ColumnRecord, existing map[string]tableschema.Column, sqlTasks *[]string) error { + var columnAddition []string + for _, incomingColumnRecord := range incoming { + columnFound, ok := existing[incomingColumnRecord.columnStructure] + if !ok { + if !incomingColumnRecord.columnValue.IsNullable { + return fmt.Errorf("unable to add new required column") + } + segment := fmt.Sprintf("if not exists %s %s", incomingColumnRecord.columnStructure, incomingColumnRecord.columnValue.Type.Name()) + if incomingColumnRecord.columnValue.HasDefaultValue { + segment += fmt.Sprintf(" default %s", incomingColumnRecord.columnValue.DefaultValue) + } + if incomingColumnRecord.columnValue.Comment != "" { + segment += fmt.Sprintf(" comment '%s'", incomingColumnRecord.columnValue.Comment) + } + columnAddition = append(columnAddition, segment) + continue + } + + if columnFound.IsNullable && !incomingColumnRecord.columnValue.IsNullable { + return fmt.Errorf("unable to modify column mode from nullable to required") + } else if !columnFound.IsNullable && incomingColumnRecord.columnValue.IsNullable { + *sqlTasks = append(*sqlTasks, fmt.Sprintf("alter table %s change column %s null;", tableName, columnFound.Name)) + } + + if columnFound.Type.ID() != incomingColumnRecord.columnValue.Type.ID() { + return fmt.Errorf("unable to modify column data type") + } + + if incomingColumnRecord.columnValue.Comment != columnFound.Comment { + *sqlTasks = append(*sqlTasks, fmt.Sprintf("alter table %s change column %s %s %s comment '%s';", + tableName, columnFound.Name, incomingColumnRecord.columnValue.Name, columnFound.Type, incomingColumnRecord.columnValue.Comment)) + } + delete(existing, incomingColumnRecord.columnStructure) + } + + if len(existing) != 0 { + for column := range existing { + return fmt.Errorf("field %s is missing in new schema", column) + } + } + + if len(columnAddition) > 0 { + for _, segment := range columnAddition { + addColumnQuery := fmt.Sprintf("alter table %s add column ", tableName) + segment + ";" + *sqlTasks = append(*sqlTasks, addColumnQuery) + } + } + + return nil +} + +func NewTableHandle(mcSQLExecutor McSQLExecutor, mc McTable) *TableHandle { + return &TableHandle{mcSQLExecutor: mcSQLExecutor, mcTable: mc} +} diff --git a/ext/store/maxcompute/table_spec.go b/ext/store/maxcompute/table_spec.go new file mode 100644 index 0000000000..b7f0c8b0c5 --- /dev/null +++ b/ext/store/maxcompute/table_spec.go @@ -0,0 +1,102 @@ +package maxcompute + +import ( + "github.com/goto/optimus/core/resource" + "github.com/goto/optimus/internal/errors" +) + +const ( + EntityTable = "resource_table" +) + +/* +Table defines the spec for max-compute table +We are trying to keep the spec similar to bigquery to make maintenance easier +Cluster: we accept name of column for clustering, check Cluster for more details +Partition: we accept the name of columns for partitioning and provide in config +ExtraConfig: + + hints: hints to be provided to the table for creation, map + alias: alias for table to be passed to table, map +*/ +type Table struct { + Name resource.Name + + Description string `mapstructure:"description,omitempty"` + Schema Schema `mapstructure:"schema,omitempty"` + Cluster *Cluster `mapstructure:"cluster,omitempty"` + Partition *Partition `mapstructure:"partition,omitempty"` + Lifecycle int `mapstructure:"lifecycle,omitempty"` + + Hints map[string]string `mapstructure:"hints,omitempty"` + ExtraConfig map[string]interface{} `mapstructure:",remain"` +} + +func (t *Table) FullName() string { + return t.Name.String() +} + +func (t *Table) Validate() error { + if len(t.Schema) == 0 { + return errors.InvalidArgument(EntityTable, "empty schema for table "+t.FullName()) + } + + if err := t.Schema.Validate(); err != nil { + return errors.AddErrContext(err, EntityTable, "invalid schema for table "+t.FullName()) + } + + if t.Partition != nil { + if len(t.Partition.Columns) == 0 { + return errors.InvalidArgument(EntityTable, "invalid partition columns for table "+t.FullName()) + } + } + + if t.Cluster != nil { + if err := t.Cluster.Validate(); err != nil { + return errors.AddErrContext(err, EntityTable, "invalid cluster for table "+t.FullName()) + } + } + + return nil +} + +/* +Cluster configuration +Using: define the columns used for clustering + +Type: type of clustering to use for table + + Hash: https://www.alibabacloud.com/help/en/maxcompute/use-cases/range-clustering + Range: https://www.alibabacloud.com/help/en/maxcompute/use-cases/hash-clustering + +SortBy: columns to use for sorting +Buckets: buckets to fill data in +*/ +type Cluster struct { + Using []string `mapstructure:"using,omitempty"` + Type string `mapstructure:"type,omitempty"` + SortBy []SortColumn `mapstructure:"sort_by,omitempty"` + Buckets int `mapstructure:"buckets,omitempty"` +} + +type SortColumn struct { + Name string `mapstructure:"name"` + Order string `mapstructure:"order,omitempty"` +} + +func (c Cluster) Validate() error { + if len(c.Using) == 0 { + return errors.InvalidArgument(EntityTable, "cluster config is empty") + } + for _, clause := range c.Using { + if clause == "" { + return errors.InvalidArgument(EntityTable, "cluster config has invalid value") + } + } + + return nil +} + +type Partition struct { + Columns []string `mapstructure:"field"` +} diff --git a/ext/store/maxcompute/table_spec_test.go b/ext/store/maxcompute/table_spec_test.go new file mode 100644 index 0000000000..a7d25b9f07 --- /dev/null +++ b/ext/store/maxcompute/table_spec_test.go @@ -0,0 +1,101 @@ +package maxcompute_test + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/goto/optimus/ext/store/maxcompute" +) + +func TestRelationalTable(t *testing.T) { + t.Run("when invalid", func(t *testing.T) { + t.Run("returns validation error for empty schema", func(t *testing.T) { + table := maxcompute.Table{ + Name: "playground.characters", + Schema: nil, + Cluster: &maxcompute.Cluster{Using: []string{"tags"}}, + Partition: &maxcompute.Partition{Columns: []string{"time"}}, + ExtraConfig: nil, + } + err := table.Validate() + assert.NotNil(t, err) + assert.ErrorContains(t, err, "empty schema for table playground.characters") + }) + t.Run("returns validation error for invalid schema", func(t *testing.T) { + table := maxcompute.Table{ + Name: "playground.characters", + Schema: maxcompute.Schema{{Name: "", Type: "string"}}, + Cluster: &maxcompute.Cluster{Using: []string{"tags"}}, + Partition: &maxcompute.Partition{Columns: []string{"time"}}, + ExtraConfig: nil, + } + err := table.Validate() + assert.NotNil(t, err) + assert.ErrorContains(t, err, "invalid schema for table playground.characters") + }) + t.Run("returns validation error for invalid cluster", func(t *testing.T) { + table := maxcompute.Table{ + Name: "playground.characters", + Schema: maxcompute.Schema{{Name: "id", Type: "string"}}, + Cluster: &maxcompute.Cluster{Using: []string{}}, + Partition: &maxcompute.Partition{Columns: []string{"time"}}, + ExtraConfig: nil, + } + err := table.Validate() + assert.NotNil(t, err) + assert.ErrorContains(t, err, "invalid cluster for table playground.characters") + }) + }) + t.Run("returns no validation error when correct", func(t *testing.T) { + table := maxcompute.Table{ + Name: "playground.characters", + Schema: maxcompute.Schema{{Name: "id", Type: "string"}}, + Cluster: &maxcompute.Cluster{Using: []string{"tags"}}, + Partition: &maxcompute.Partition{Columns: []string{"time"}}, + ExtraConfig: nil, + } + err := table.Validate() + assert.Nil(t, err) + + assert.Equal(t, "playground.characters", table.FullName()) + }) + t.Run("fails validation for empty field name in partition", func(t *testing.T) { + table := maxcompute.Table{ + Name: "playground.characters", + Schema: maxcompute.Schema{{Name: "id", Type: "string"}}, + Cluster: &maxcompute.Cluster{Using: []string{"tags"}}, + Partition: &maxcompute.Partition{Columns: []string{}}, + ExtraConfig: nil, + } + err := table.Validate() + assert.NotNil(t, err) + assert.ErrorContains(t, err, "invalid partition columns for table playground.characters") + }) +} + +func TestTableClustering(t *testing.T) { + t.Run("returns error when invalid", func(t *testing.T) { + cluster := maxcompute.Cluster{Using: nil} + + err := cluster.Validate() + assert.NotNil(t, err) + assert.ErrorContains(t, err, "cluster config is empty") + }) + t.Run("returns error when invalid value for cluster column", func(t *testing.T) { + cluster := maxcompute.Cluster{Using: []string{""}} + + err := cluster.Validate() + assert.NotNil(t, err) + assert.ErrorContains(t, err, "cluster config has invalid value") + }) + t.Run("no validation error when valid", func(t *testing.T) { + cluster := maxcompute.Cluster{ + Using: []string{"id"}, + Type: "RANGE", + SortBy: nil, + Buckets: 0, + } + assert.Nil(t, cluster.Validate()) + }) +} diff --git a/ext/store/maxcompute/table_test.go b/ext/store/maxcompute/table_test.go new file mode 100644 index 0000000000..fd89f9e2bb --- /dev/null +++ b/ext/store/maxcompute/table_test.go @@ -0,0 +1,246 @@ +package maxcompute_test + +import ( + "errors" + "fmt" + "testing" + + "github.com/aliyun/aliyun-odps-go-sdk/odps" + "github.com/aliyun/aliyun-odps-go-sdk/odps/account" + "github.com/aliyun/aliyun-odps-go-sdk/odps/tableschema" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + + "github.com/goto/optimus/core/resource" + "github.com/goto/optimus/core/tenant" + "github.com/goto/optimus/ext/store/maxcompute" +) + +var emptyStringMap map[string]string + +type mockMaxComputeTable struct { + mock.Mock +} + +func (m *mockMaxComputeTable) Create(schema tableschema.TableSchema, createIfNotExists bool, hints, alias map[string]string) error { + args := m.Called(schema, createIfNotExists, hints, alias) + return args.Error(0) +} + +func (m *mockMaxComputeTable) BatchLoadTables(tableNames []string) ([]odps.Table, error) { + args := m.Called(tableNames) + return args.Get(0).([]odps.Table), args.Error(1) +} + +type mockOdpsIns struct { + mock.Mock +} + +func (m *mockOdpsIns) ExecSQl(sql string) (*odps.Instance, error) { // nolint + args := m.Called(sql) + return args.Get(0).(*odps.Instance), args.Error(1) +} + +func (m *mockOdpsIns) ExecSQlWithHints(sql string, hints map[string]string) (*odps.Instance, error) { // nolint + args := m.Called(sql, hints) + return args.Get(0).(*odps.Instance), args.Error(1) +} + +func TestTableHandle(t *testing.T) { + accessID, accessKey, endpoint := "LNRJ5tH1XMSINW5J3TjYAvfX", "lAZBJhdkNbwVj3bej5BuhjwbdV0nSp", "http://service.ap-southeast-5.maxcompute.aliyun.com/api" + projectName, schemaName, tableName := "proj", "schema", "test_table" + fullName := projectName + "." + schemaName + "." + tableName + mcStore := resource.MaxCompute + tnnt, _ := tenant.NewTenant(projectName, "ns") + metadata := resource.Metadata{ + Version: 1, + Description: "resource description", + Labels: map[string]string{"owner": "optimus"}, + } + + normalTables := []odps.Table{ + odps.NewTable(odps.NewOdps(account.NewAliyunAccount(accessID, accessKey), endpoint), projectName, tableName), + } + + t.Run("Create", func(t *testing.T) { + t.Run("returns error when cannot convert spec", func(t *testing.T) { + table := new(mockMaxComputeTable) + odpsIns := new(mockOdpsIns) + tableHandle := maxcompute.NewTableHandle(odpsIns, table) + + spec := map[string]any{"description": []string{"test create"}} + res, err := resource.NewResource(fullName, maxcompute.KindTable, mcStore, tnnt, &metadata, spec) + assert.Nil(t, err) + + err = tableHandle.Create(res) + assert.NotNil(t, err) + assert.ErrorContains(t, err, "not able to decode spec for "+fullName) + }) + t.Run("returns error when use invalid schema data type", func(t *testing.T) { + table := new(mockMaxComputeTable) + odpsIns := new(mockOdpsIns) + tableHandle := maxcompute.NewTableHandle(odpsIns, table) + + spec := map[string]any{ + "description": "test create", + "schema": []map[string]any{ + { + "name": "customer_id", + "type": "STRING_ERROR", + }, + }, + "partition": map[string]any{ + "field": []string{"customer_id"}, + }, + } + res, err := resource.NewResource(fullName, maxcompute.KindTable, mcStore, tnnt, &metadata, spec) + assert.Nil(t, err) + + err = tableHandle.Create(res) + assert.NotNil(t, err) + assert.ErrorContains(t, err, "failed to build table schema to create for "+fullName) + }) + t.Run("returns error when table already present on maxcompute", func(t *testing.T) { + existTableErr := errors.New("Table or view already exists - table or view proj.test_table is already defined") + table := new(mockMaxComputeTable) + table.On("Create", mock.Anything, false, emptyStringMap, emptyStringMap).Return(existTableErr) + defer table.AssertExpectations(t) + odpsIns := new(mockOdpsIns) + tableHandle := maxcompute.NewTableHandle(odpsIns, table) + + spec := map[string]any{ + "description": "test create", + "schema": []map[string]any{ + { + "name": "customer_id", + "type": "STRING", + }, + }, + "partition": map[string]any{ + "field": []string{"customer_id"}, + }, + } + res, err := resource.NewResource(fullName, maxcompute.KindTable, mcStore, tnnt, &metadata, spec) + assert.Nil(t, err) + + err = tableHandle.Create(res) + assert.NotNil(t, err) + assert.ErrorContains(t, err, "table already exists on maxcompute: "+fullName) + }) + t.Run("returns error when table creation returns error", func(t *testing.T) { + table := new(mockMaxComputeTable) + table.On("Create", mock.Anything, false, emptyStringMap, emptyStringMap).Return(errors.New("some error")) + defer table.AssertExpectations(t) + odpsIns := new(mockOdpsIns) + tableHandle := maxcompute.NewTableHandle(odpsIns, table) + + spec := map[string]any{ + "description": "test create", + "schema": []map[string]any{ + { + "name": "customer_id", + "type": "STRING", + }, + }, + "partition": map[string]any{ + "field": []string{"customer_id"}, + }, + } + res, err := resource.NewResource(fullName, maxcompute.KindTable, mcStore, tnnt, &metadata, spec) + assert.Nil(t, err) + + err = tableHandle.Create(res) + assert.NotNil(t, err) + assert.ErrorContains(t, err, "error while creating table on maxcompute") + }) + t.Run("return success when create the resource with partition", func(t *testing.T) { + table := new(mockMaxComputeTable) + table.On("Create", mock.Anything, false, emptyStringMap, emptyStringMap).Return(nil) + defer table.AssertExpectations(t) + odpsIns := new(mockOdpsIns) + tableHandle := maxcompute.NewTableHandle(odpsIns, table) + + spec := map[string]any{ + "description": "test create", + "schema": []map[string]any{ + { + "name": "customer_id", + "type": "STRING", + }, + { + "name": "customer_name", + "type": "STRING", + }, + { + "name": "product_name", + "type": "STRING", + }, + }, + "partition": map[string]any{ + "field": []string{"customer_id"}, + }, + } + res, err := resource.NewResource(fullName, maxcompute.KindTable, mcStore, tnnt, &metadata, spec) + assert.Nil(t, err) + + err = tableHandle.Create(res) + assert.Nil(t, err) + }) + }) + + t.Run("Update", func(t *testing.T) { + t.Run("returns error when table is not found on maxcompute", func(t *testing.T) { + table := new(mockMaxComputeTable) + table.On("BatchLoadTables", mock.Anything).Return([]odps.Table{}, fmt.Errorf("table not found")) + defer table.AssertExpectations(t) + odpsIns := new(mockOdpsIns) + tableHandle := maxcompute.NewTableHandle(odpsIns, table) + + spec := map[string]any{"description": []string{"test update"}} + res, err := resource.NewResource(fullName, maxcompute.KindTable, mcStore, tnnt, &metadata, spec) + assert.Nil(t, err) + + err = tableHandle.Update(res) + assert.NotNil(t, err) + assert.ErrorContains(t, err, "error while get table on maxcompute") + }) + t.Run("returns error when get table schema", func(t *testing.T) { + table := new(mockMaxComputeTable) + table.On("BatchLoadTables", mock.Anything).Return(normalTables, nil) + defer table.AssertExpectations(t) + odpsIns := new(mockOdpsIns) + tableHandle := maxcompute.NewTableHandle(odpsIns, table) + + spec := map[string]any{"description": []string{"test update"}} + res, err := resource.NewResource(fullName, maxcompute.KindTable, mcStore, tnnt, &metadata, spec) + assert.Nil(t, err) + + err = tableHandle.Update(res) + assert.NotNil(t, err) + assert.ErrorContains(t, err, "failed to get old table schema to update for "+fullName) + }) + }) + + t.Run("Exists", func(t *testing.T) { + t.Run("returns false when error in checking existing tables", func(t *testing.T) { + table := new(mockMaxComputeTable) + table.On("BatchLoadTables", mock.Anything).Return([]odps.Table{}, errors.New("error in get")) + defer table.AssertExpectations(t) + odpsIns := new(mockOdpsIns) + tableHandle := maxcompute.NewTableHandle(odpsIns, table) + + exists := tableHandle.Exists(tableName) + assert.False(t, exists) + }) + t.Run("returns true when checking existing tables", func(t *testing.T) { + table := new(mockMaxComputeTable) + table.On("BatchLoadTables", mock.Anything).Return(normalTables, nil) + defer table.AssertExpectations(t) + odpsIns := new(mockOdpsIns) + tableHandle := maxcompute.NewTableHandle(odpsIns, table) + + exists := tableHandle.Exists(tableName) + assert.True(t, exists) + }) + }) +} diff --git a/ext/store/maxcompute/type_properties.go b/ext/store/maxcompute/type_properties.go new file mode 100644 index 0000000000..feef5cbb52 --- /dev/null +++ b/ext/store/maxcompute/type_properties.go @@ -0,0 +1,69 @@ +package maxcompute + +import ( + "fmt" + + "github.com/aliyun/aliyun-odps-go-sdk/odps/datatype" + + "github.com/goto/optimus/internal/errors" +) + +const ( + maxCharLength = 255 + minDecimalPrecision = 1 + maxDecimalPrecision = 38 + minDecimalScale = 0 + maxDecimalScale = 18 + maxVarcharLength = 65535 +) + +type Decimal struct { + Precision int32 `mapstructure:"precision"` + Scale int32 `mapstructure:"scale"` +} + +func (d Decimal) Validate() error { + if d.Scale > maxDecimalScale || d.Scale < minDecimalScale { + return errors.InvalidArgument(resourceSchema, fmt.Sprintf("decimal scale[%d] is not valid", d.Scale)) + } + if d.Precision < 1 || d.Precision > maxDecimalPrecision { + return errors.InvalidArgument(resourceSchema, fmt.Sprintf("decimal precision[%d] is not valid", d.Precision)) + } + return nil +} + +type Char struct { + Length int `mapstructure:"length"` +} + +func (c Char) Validate() error { + if c.Length > maxCharLength { + return errors.InvalidArgument(resourceSchema, fmt.Sprintf("char length[%d] is not valid", c.Length)) + } + return nil +} + +type VarChar struct { + Length int `mapstructure:"length"` +} + +func (v VarChar) Validate() error { + if v.Length > maxVarcharLength || v.Length < 1 { + return errors.InvalidArgument(resourceSchema, fmt.Sprintf("varchar length[%d] is not valid", v.Length)) + } + return nil +} + +func isStruct(dataType datatype.DataType) bool { + _, ok := dataType.(datatype.StructType) + return ok +} + +func isArrayStruct(dataType datatype.DataType) bool { + cast, ok := dataType.(datatype.ArrayType) + if !ok { + return false + } + + return isStruct(cast.ElementType) +} diff --git a/ext/store/maxcompute/type_properties_test.go b/ext/store/maxcompute/type_properties_test.go new file mode 100644 index 0000000000..520df84df8 --- /dev/null +++ b/ext/store/maxcompute/type_properties_test.go @@ -0,0 +1,68 @@ +package maxcompute_test + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/goto/optimus/ext/store/maxcompute" +) + +func TestDecimalProperties(t *testing.T) { + t.Run("when invalid value", func(t *testing.T) { + t.Run("returns error for invalid precision", func(t *testing.T) { + d1 := maxcompute.Decimal{ + Precision: 0, + Scale: 5, + } + err := d1.Validate() + assert.Error(t, err) + assert.ErrorContains(t, err, "decimal precision[0] is not valid") + }) + t.Run("returns error for invalid scale", func(t *testing.T) { + d1 := maxcompute.Decimal{ + Precision: 5, + Scale: 20, + } + err := d1.Validate() + assert.Error(t, err) + assert.ErrorContains(t, err, "decimal scale[20] is not valid") + }) + }) + t.Run("returns no error when valid", func(t *testing.T) { + d1 := maxcompute.Decimal{ + Precision: 5, + Scale: 10, + } + err := d1.Validate() + assert.Nil(t, err) + }) +} + +func TestCharProperties(t *testing.T) { + t.Run("returns error when invalid length", func(t *testing.T) { + v1 := maxcompute.Char{Length: 256} + err := v1.Validate() + assert.Error(t, err) + assert.ErrorContains(t, err, "char length[256] is not valid") + }) + t.Run("returns no error when valid", func(t *testing.T) { + v1 := maxcompute.Char{Length: 100} + err := v1.Validate() + assert.Nil(t, err) + }) +} + +func TestVarCharProperties(t *testing.T) { + t.Run("returns error when invalid length", func(t *testing.T) { + v1 := maxcompute.VarChar{Length: 0} + err := v1.Validate() + assert.Error(t, err) + assert.ErrorContains(t, err, "varchar length[0] is not valid") + }) + t.Run("returns no error when valid", func(t *testing.T) { + v1 := maxcompute.VarChar{Length: 100} + err := v1.Validate() + assert.Nil(t, err) + }) +} diff --git a/ext/store/maxcompute/view.go b/ext/store/maxcompute/view.go new file mode 100644 index 0000000000..5703d99fe0 --- /dev/null +++ b/ext/store/maxcompute/view.go @@ -0,0 +1,128 @@ +package maxcompute + +import ( + "bytes" + "strings" + "text/template" + + "github.com/aliyun/aliyun-odps-go-sdk/odps" + + "github.com/goto/optimus/core/resource" + "github.com/goto/optimus/internal/errors" +) + +type ViewSQLExecutor interface { + ExecSQl(sql string) (*odps.Instance, error) +} + +type ViewTable interface { + BatchLoadTables(tableNames []string) ([]odps.Table, error) +} + +type ViewHandle struct { + viewSQLExecutor ViewSQLExecutor + viewTable ViewTable +} + +func (v ViewHandle) Create(res *resource.Resource) error { + view, err := ConvertSpecTo[View](res) + if err != nil { + return err + } + + view.Name, err = getComponentName(res) + if err != nil { + return err + } + + sql, err := ToViewSQL(view) + if err != nil { + return errors.AddErrContext(err, EntityView, "failed to build view sql query to create view "+res.FullName()) + } + + inst, err := v.viewSQLExecutor.ExecSQl(sql) + if err != nil { + return errors.AddErrContext(err, EntityView, "failed to create sql task to create view "+res.FullName()) + } + + err = inst.WaitForSuccess() + if err != nil { + if strings.Contains(err.Error(), "Table or view already exists") { + return errors.AlreadyExists(EntityView, "view already exists on maxcompute: "+res.FullName()) + } + return errors.InternalError(EntityView, "failed to create view "+res.FullName(), err) + } + + return nil +} + +func (v ViewHandle) Update(res *resource.Resource) error { + viewName, err := getComponentName(res) + if err != nil { + return err + } + + _, err = v.viewTable.BatchLoadTables([]string{viewName.String()}) + if err != nil { + return errors.InternalError(EntityView, "error while get view on maxcompute", err) + } + + view, err := ConvertSpecTo[View](res) + if err != nil { + return err + } + view.Name = viewName + + sql, err := ToViewSQL(view) + if err != nil { + return errors.AddErrContext(err, EntityView, "failed to build view sql query to update view "+res.FullName()) + } + + inst, err := v.viewSQLExecutor.ExecSQl(sql) + if err != nil { + return errors.AddErrContext(err, EntityView, "failed to create sql task to update view "+res.FullName()) + } + + err = inst.WaitForSuccess() + if err != nil { + return errors.InternalError(EntityView, "failed to update view "+res.FullName(), err) + } + + return nil +} + +func (v ViewHandle) Exists(tableName string) bool { + _, err := v.viewTable.BatchLoadTables([]string{tableName}) + return err == nil +} + +func ToViewSQL(v *View) (string, error) { + fns := template.FuncMap{ + "join": func(sep string, s []string) string { + return strings.Join(s, sep) + }, + } + + tplStr := `create or replace view {{ .Name.String }} + ({{ join ", " .Columns }}) {{ if .Description }} + comment '{{ .Description}}' {{ end }} + as + {{ .ViewQuery}};` + + tpl, err := template.New("DDL_UPSERT_VIEW").Funcs(fns).Parse(tplStr) + if err != nil { + return "", err + } + + var out bytes.Buffer + err = tpl.Execute(&out, v) + if err != nil { + return "", err + } + + return out.String(), nil +} + +func NewViewHandle(viewSQLExecutor ViewSQLExecutor, view ViewTable) *ViewHandle { + return &ViewHandle{viewSQLExecutor: viewSQLExecutor, viewTable: view} +} diff --git a/ext/store/maxcompute/view_spec.go b/ext/store/maxcompute/view_spec.go new file mode 100644 index 0000000000..23b8201173 --- /dev/null +++ b/ext/store/maxcompute/view_spec.go @@ -0,0 +1,30 @@ +package maxcompute + +import ( + "github.com/goto/optimus/core/resource" + "github.com/goto/optimus/internal/errors" +) + +const ( + EntityView = "resource_view" +) + +type View struct { + Name resource.Name + + Description string `mapstructure:"description,omitempty"` + Columns []string `mapstructure:"columns,omitempty"` + ViewQuery string `mapstructure:"view_query,omitempty"` +} + +func (v *View) Validate() error { + if v.ViewQuery == "" { + return errors.InvalidArgument(EntityView, "view query is empty for "+v.Name.String()) + } + + if len(v.Columns) == 0 { + return errors.InvalidArgument(EntityView, "column names not provided for "+v.Name.String()) + } + + return nil +} diff --git a/ext/store/maxcompute/view_spec_test.go b/ext/store/maxcompute/view_spec_test.go new file mode 100644 index 0000000000..2cf6402d2f --- /dev/null +++ b/ext/store/maxcompute/view_spec_test.go @@ -0,0 +1,44 @@ +package maxcompute_test + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/goto/optimus/ext/store/maxcompute" +) + +func TestRelationalView(t *testing.T) { + t.Run("return validation error when query is empty", func(t *testing.T) { + view := maxcompute.View{ + Name: "playground.customer", + ViewQuery: "", + } + + err := view.Validate() + assert.NotNil(t, err) + assert.ErrorContains(t, err, "view query is empty for playground.customer") + }) + t.Run("return validation error when column names are empty", func(t *testing.T) { + view := maxcompute.View{ + Name: "playground.customer", + ViewQuery: "select * from `playground.customer`", + } + + err := view.Validate() + assert.NotNil(t, err) + assert.ErrorContains(t, err, "column names not provided for playground.customer") + }) + t.Run("has no validation error for correct view", func(t *testing.T) { + view := maxcompute.View{ + Name: "playground.customer", + ViewQuery: "select * from `playground.customer_table`", + Columns: []string{"id", "name"}, + } + + err := view.Validate() + assert.Nil(t, err) + + assert.Equal(t, "playground.customer", view.Name.String()) + }) +} diff --git a/ext/store/maxcompute/view_test.go b/ext/store/maxcompute/view_test.go new file mode 100644 index 0000000000..e015b35d19 --- /dev/null +++ b/ext/store/maxcompute/view_test.go @@ -0,0 +1,239 @@ +package maxcompute_test + +import ( + "errors" + "fmt" + "testing" + + "github.com/aliyun/aliyun-odps-go-sdk/odps" + "github.com/aliyun/aliyun-odps-go-sdk/odps/account" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + + "github.com/goto/optimus/core/resource" + "github.com/goto/optimus/core/tenant" + "github.com/goto/optimus/ext/store/maxcompute" +) + +func TestViewHandle(t *testing.T) { + accessID, accessKey, endpoint := "LNRJ5tH1XMSINW5J3TjYAvfX", "lAZBJhdkNbwVj3bej5BuhjwbdV0nSp", "http://service.ap-southeast-5.maxcompute.aliyun.com/api" + projectName, schemaName, tableName := "proj", "schema", "test_view" + fullName := projectName + "." + schemaName + "." + tableName + mcStore := resource.MaxCompute + tnnt, _ := tenant.NewTenant(projectName, "ns") + metadata := resource.Metadata{ + Version: 1, + Description: "resource description", + Labels: map[string]string{"owner": "optimus"}, + } + + odpsInstance := odps.NewInstance(odps.NewOdps(account.NewAliyunAccount(accessID, accessKey), endpoint), projectName, "") + + normalTables := []odps.Table{ + odps.NewTable(odps.NewOdps(account.NewAliyunAccount(accessID, accessKey), endpoint), projectName, tableName), + } + + t.Run("Create", func(t *testing.T) { + t.Run("returns error when cannot convert spec", func(t *testing.T) { + table := new(mockMaxComputeTable) + odpsIns := new(mockOdpsIns) + viewHandle := maxcompute.NewViewHandle(odpsIns, table) + + spec := map[string]any{"description": []string{"test create"}} + res, err := resource.NewResource(fullName, maxcompute.KindView, mcStore, tnnt, &metadata, spec) + assert.Nil(t, err) + + err = viewHandle.Create(res) + assert.NotNil(t, err) + assert.ErrorContains(t, err, "not able to decode spec for "+fullName) + }) + t.Run("returns error when view query is invalid", func(t *testing.T) { + table := new(mockMaxComputeTable) + odpsIns := new(mockOdpsIns) + odpsIns.On("ExecSQl", mock.Anything).Return(&odpsInstance, fmt.Errorf("sql task is invalid")) + defer odpsIns.AssertExpectations(t) + viewHandle := maxcompute.NewViewHandle(odpsIns, table) + + spec := map[string]any{ + "description": "test create", + "columns": []string{"customer_id", "customer_name", "product_name"}, + "view_query": "select * from test_customer;", + } + res, err := resource.NewResource(fullName, maxcompute.KindView, mcStore, tnnt, &metadata, spec) + assert.Nil(t, err) + + err = viewHandle.Create(res) + assert.NotNil(t, err) + assert.ErrorContains(t, err, "failed to create sql task to create view "+fullName) + }) + t.Run("returns error when view creation returns error", func(t *testing.T) { + table := new(mockMaxComputeTable) + odpsIns := new(mockOdpsIns) + odpsIns.On("ExecSQl", mock.Anything).Return(&odpsInstance, nil) + defer odpsIns.AssertExpectations(t) + viewHandle := maxcompute.NewViewHandle(odpsIns, table) + + spec := map[string]any{ + "description": "test create", + "view_query": "select * from test_customer", + } + res, err := resource.NewResource(fullName, maxcompute.KindView, mcStore, tnnt, &metadata, spec) + assert.Nil(t, err) + + err = viewHandle.Create(res) + assert.NotNil(t, err) + assert.ErrorContains(t, err, "failed to create view "+fullName) + }) + }) + + t.Run("Update", func(t *testing.T) { + t.Run("returns error when view is not found", func(t *testing.T) { + table := new(mockMaxComputeTable) + table.On("BatchLoadTables", mock.Anything).Return([]odps.Table{}, fmt.Errorf("view is not found")) + defer table.AssertExpectations(t) + odpsIns := new(mockOdpsIns) + viewHandle := maxcompute.NewViewHandle(odpsIns, table) + + spec := map[string]any{"description": []string{"test update"}} + res, err := resource.NewResource(fullName, maxcompute.KindView, mcStore, tnnt, &metadata, spec) + assert.Nil(t, err) + + err = viewHandle.Update(res) + assert.NotNil(t, err) + assert.ErrorContains(t, err, "error while get view on maxcompute") + }) + t.Run("returns error when cannot convert spec", func(t *testing.T) { + table := new(mockMaxComputeTable) + table.On("BatchLoadTables", mock.Anything).Return([]odps.Table{}, nil) + defer table.AssertExpectations(t) + odpsIns := new(mockOdpsIns) + viewHandle := maxcompute.NewViewHandle(odpsIns, table) + + spec := map[string]any{"description": []string{"test update"}} + res, err := resource.NewResource(fullName, maxcompute.KindView, mcStore, tnnt, &metadata, spec) + assert.Nil(t, err) + + err = viewHandle.Update(res) + assert.NotNil(t, err) + assert.ErrorContains(t, err, "not able to decode spec for "+fullName) + }) + t.Run("returns error when view query is invalid", func(t *testing.T) { + table := new(mockMaxComputeTable) + table.On("BatchLoadTables", mock.Anything).Return([]odps.Table{}, nil) + defer table.AssertExpectations(t) + odpsIns := new(mockOdpsIns) + odpsIns.On("ExecSQl", mock.Anything).Return(&odpsInstance, fmt.Errorf("sql task is invalid")) + defer odpsIns.AssertExpectations(t) + viewHandle := maxcompute.NewViewHandle(odpsIns, table) + + spec := map[string]any{ + "description": "test update", + "columns": []string{"customer_id", "customer_name", "product_name"}, + "view_query": "select * from test_customer;", + } + res, err := resource.NewResource(fullName, maxcompute.KindView, mcStore, tnnt, &metadata, spec) + assert.Nil(t, err) + + err = viewHandle.Update(res) + assert.NotNil(t, err) + assert.ErrorContains(t, err, "failed to create sql task to update view "+fullName) + }) + t.Run("returns error when view creation returns error", func(t *testing.T) { + table := new(mockMaxComputeTable) + table.On("BatchLoadTables", mock.Anything).Return([]odps.Table{}, nil) + defer table.AssertExpectations(t) + odpsIns := new(mockOdpsIns) + odpsIns.On("ExecSQl", mock.Anything).Return(&odpsInstance, nil) + defer odpsIns.AssertExpectations(t) + viewHandle := maxcompute.NewViewHandle(odpsIns, table) + + spec := map[string]any{ + "description": "test update", + "view_query": "select * from test_customer", + } + res, err := resource.NewResource(fullName, maxcompute.KindView, mcStore, tnnt, &metadata, spec) + assert.Nil(t, err) + + err = viewHandle.Update(res) + assert.NotNil(t, err) + assert.ErrorContains(t, err, "failed to update view "+fullName) + }) + }) + + t.Run("Exists", func(t *testing.T) { + t.Run("returns false when error in checking existing view", func(t *testing.T) { + table := new(mockMaxComputeTable) + table.On("BatchLoadTables", mock.Anything).Return([]odps.Table{}, errors.New("error in get")) + defer table.AssertExpectations(t) + odpsIns := new(mockOdpsIns) + viewHandle := maxcompute.NewViewHandle(odpsIns, table) + + exists := viewHandle.Exists(tableName) + assert.False(t, exists) + }) + t.Run("returns true when checking existing tables", func(t *testing.T) { + table := new(mockMaxComputeTable) + table.On("BatchLoadTables", mock.Anything).Return(normalTables, nil) + defer table.AssertExpectations(t) + odpsIns := new(mockOdpsIns) + viewHandle := maxcompute.NewViewHandle(odpsIns, table) + + exists := viewHandle.Exists(tableName) + assert.True(t, exists) + }) + }) +} + +func TestToViewSQL(t *testing.T) { + type args struct { + v *maxcompute.View + } + tests := []struct { + name string + args args + want string + wantErr assert.ErrorAssertionFunc + }{ + { + name: "create_view", + args: args{ + v: &maxcompute.View{ + Name: "Test_View1", + Description: "Create Test View", + Columns: []string{"a", "b", "c"}, + ViewQuery: "select a, b, c from t1", + }, + }, + want: `create or replace view Test_View1 + (a, b, c) + comment 'Create Test View' + as + select a, b, c from t1;`, + wantErr: nil, + }, + { + name: "create_view_missing_description", + args: args{ + v: &maxcompute.View{ + Name: "Test_View1", + Columns: []string{"a", "b", "c"}, + ViewQuery: "select a, b, c from t1", + }, + }, + want: `create or replace view Test_View1 + (a, b, c) + as + select a, b, c from t1;`, + wantErr: nil, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := maxcompute.ToViewSQL(tt.args.v) + if tt.wantErr != nil && !tt.wantErr(t, err, fmt.Sprintf("ToViewSQL error in (%s)", tt.name)) { + return + } + assert.Equalf(t, tt.want, got, "ToViewSQL(%v)", tt.args.v) + }) + } +} diff --git a/go.mod b/go.mod index 1edcdd53f4..96b0009b78 100644 --- a/go.mod +++ b/go.mod @@ -9,6 +9,7 @@ require ( github.com/AlecAivazis/survey/v2 v2.3.6 github.com/MakeNowJust/heredoc v1.0.0 github.com/PagerDuty/go-pagerduty v1.5.1 + github.com/aliyun/aliyun-odps-go-sdk v0.3.7 github.com/briandowns/spinner v1.18.0 github.com/charmbracelet/bubbles v0.13.0 github.com/charmbracelet/bubbletea v0.22.1 @@ -40,7 +41,7 @@ require ( github.com/spf13/afero v1.9.2 github.com/spf13/cobra v1.2.1 github.com/spf13/viper v1.8.1 - github.com/stretchr/testify v1.8.1 + github.com/stretchr/testify v1.8.4 github.com/vmware-labs/yaml-jsonpath v0.3.2 github.com/xanzy/go-gitlab v0.103.0 github.com/xlab/treeprint v1.1.0 @@ -58,7 +59,7 @@ require ( google.golang.org/api v0.103.0 google.golang.org/genproto v0.0.0-20221117204609-8f9c96812029 google.golang.org/grpc v1.50.1 - google.golang.org/protobuf v1.29.1 + google.golang.org/protobuf v1.31.0 gopkg.in/yaml.v2 v2.4.0 gopkg.in/yaml.v3 v3.0.1 ) @@ -135,7 +136,7 @@ require ( github.com/muesli/reflow v0.3.0 // indirect github.com/muesli/termenv v0.11.1-0.20220212125758-44cd13922739 // indirect github.com/pelletier/go-toml v1.9.3 // indirect - github.com/pierrec/lz4/v4 v4.1.15 // indirect + github.com/pierrec/lz4/v4 v4.1.18 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/prometheus/client_model v0.2.0 // indirect @@ -167,5 +168,5 @@ require ( golang.org/x/time v0.3.0 // indirect golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 // indirect google.golang.org/appengine v1.6.7 // indirect - gopkg.in/ini.v1 v1.62.0 // indirect + gopkg.in/ini.v1 v1.67.0 // indirect ) diff --git a/go.sum b/go.sum index 714fe70fe8..bfc933aba4 100644 --- a/go.sum +++ b/go.sum @@ -95,6 +95,7 @@ contrib.go.opencensus.io/exporter/stackdriver v0.13.10/go.mod h1:I5htMbyta491eUx contrib.go.opencensus.io/integrations/ocsql v0.1.7/go.mod h1:8DsSdjz3F+APR+0z0WkU1aRorQCFfRxvqjUUPMbF3fE= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= gioui.org v0.0.0-20210308172011-57750fc8a0a6/go.mod h1:RSH6KIUZ0p2xy5zHDxgAM4zumjgTw83q2ge/PI+yyw8= +git.sr.ht/~sbinet/gg v0.3.1/go.mod h1:KGYtlADtqsqANL9ueOFkWymvzUvLMQllU5Ixo+8v3pc= github.com/AdaLogics/go-fuzz-headers v0.0.0-20210715213245-6c3934b029d8/go.mod h1:CzsSbkDixRphAF5hS6wbMKq0eI6ccJRb7/A0M6JBnwg= github.com/AlecAivazis/survey/v2 v2.3.5/go.mod h1:4AuI9b7RjAR+G7v9+C4YSlX/YL3K3cWNXgWXOhllqvI= github.com/AlecAivazis/survey/v2 v2.3.6 h1:NvTuVHISgTHEHeBFqt6BHOe4Ny/NwGZr7w+F8S9ziyw= @@ -184,7 +185,10 @@ github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbt github.com/PuerkitoBio/urlesc v0.0.0-20160726150825-5bd2802263f2/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= github.com/Shopify/logrus-bugsnag v0.0.0-20171204204709-577dee27f20d/go.mod h1:HI8ITrYtUY+O+ZhtlqUnD8+KwNPOyugEhfP9fdUIaEQ= +github.com/ajstarks/deck v0.0.0-20200831202436-30c9fc6549a9/go.mod h1:JynElWSGnm/4RlzPXRlREEwqTHAN3T56Bv2ITsFT3gY= +github.com/ajstarks/deck/generate v0.0.0-20210309230005-c3f852c02e19/go.mod h1:T13YZdzov6OU0A1+RfKZiZN9ca6VeKdBdyDV+BY97Tk= github.com/ajstarks/svgo v0.0.0-20180226025133-644b8db467af/go.mod h1:K08gAheRH3/J6wwsYMMT4xOr94bZjxIelGM0+d/wbFw= +github.com/ajstarks/svgo v0.0.0-20211024235047-1546f124cd8b/go.mod h1:1KcenG0jGWcpt8ov532z81sp/kMMUG485J2InIOyADM= github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm+0dGI2AIUHY+w0BUc+4tn40djz7+6U= github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI= github.com/alecthomas/chroma v0.8.2 h1:x3zkuE2lUk/RIekyAJ3XRqSCP4zwWDfcw/YJCuCAACg= @@ -203,6 +207,10 @@ github.com/alessio/shellescape v1.4.1 h1:V7yhSDDn8LP4lc4jS8pFkt0zCnzVJlG5JXy9BVK github.com/alessio/shellescape v1.4.1/go.mod h1:PZAiSCk0LJaZkiCSkPv8qIobYglO3FPpyFjDCtHLS30= github.com/alexflint/go-filemutex v0.0.0-20171022225611-72bdc8eae2ae/go.mod h1:CgnQgUtFrFz9mxFNtED3jI5tLDjKlOM+oUF/sTk6ps0= github.com/alexflint/go-filemutex v1.1.0/go.mod h1:7P4iRhttt/nUvUOrYIhcpMzv2G6CY9UnI16Z+UJqRyk= +github.com/aliyun/aliyun-odps-go-sdk v0.3.4 h1:IwidtZJUmFjlwBRb/24LGsYn/PSeIAcV7r5Ia09dvkE= +github.com/aliyun/aliyun-odps-go-sdk v0.3.4/go.mod h1:o2yLh138hfeBZThn+rorDVNhoaFsPwFSF+CgE69yaw8= +github.com/aliyun/aliyun-odps-go-sdk v0.3.7 h1:mG+pmrQPLOwy5ycI54zJ9lcgpI7GHV8cPfX0iDM8WbM= +github.com/aliyun/aliyun-odps-go-sdk v0.3.7/go.mod h1:o2yLh138hfeBZThn+rorDVNhoaFsPwFSF+CgE69yaw8= github.com/andres-erbsen/clock v0.0.0-20160526145045-9e14626cd129 h1:MzBOUgng9orim59UnfUTLRjMpd09C5uEVQ6RPGeCaVI= github.com/andres-erbsen/clock v0.0.0-20160526145045-9e14626cd129/go.mod h1:rFgpPQZYZ8vdbc+48xibu8ALc3yeyd64IhHS+PU6Yyg= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= @@ -315,6 +323,7 @@ github.com/blang/semver v3.1.0+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnweb github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4= github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= +github.com/boombuler/barcode v1.0.1/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= github.com/briandowns/spinner v1.18.0 h1:SJs0maNOs4FqhBwiJ3Gr7Z1D39/rukIVGQvpNZVHVcM= github.com/briandowns/spinner v1.18.0/go.mod h1:QOuQk7x+EaDASo80FEXwlwiA+j/PPIcX3FScO+3/ZPQ= github.com/bshuster-repo/logrus-logstash-hook v0.4.1/go.mod h1:zsTqEiSzDgAa/8GZR7E1qaXrhYNDKBYy5/dWPTIflbk= @@ -609,7 +618,10 @@ github.com/gin-gonic/gin v1.6.3/go.mod h1:75u5sXoLsGZoRN5Sgbi1eraJ4GU3++wFwWzhwv github.com/gin-gonic/gin v1.7.3/go.mod h1:jD2toBW3GZUr5UMcdrwQA10I7RuaFOl/SGeDjXkfUtY= github.com/go-fonts/dejavu v0.1.0/go.mod h1:4Wt4I4OU2Nq9asgDCteaAaWZOV24E+0/Pwo0gppep4g= github.com/go-fonts/latin-modern v0.2.0/go.mod h1:rQVLdDMK+mK1xscDwsqM5J8U2jrRa3T0ecnM9pNujks= +github.com/go-fonts/latin-modern v0.3.0/go.mod h1:ysEQXnuT/sCDOAONxC7ImeEDVINbltClhasMAqEtRK0= github.com/go-fonts/liberation v0.1.1/go.mod h1:K6qoJYypsmfVjWg8KOVDQhLc8UDgIK2HYqyqAO9z7GY= +github.com/go-fonts/liberation v0.2.0/go.mod h1:K6qoJYypsmfVjWg8KOVDQhLc8UDgIK2HYqyqAO9z7GY= +github.com/go-fonts/liberation v0.3.0/go.mod h1:jdJ+cqF+F4SUL2V+qxBth8fvBpBDS7yloUL5Fi8GTGY= github.com/go-fonts/stix v0.1.0/go.mod h1:w/c1f0ldAUlJmLBvlbkvVXLAD+tAMqobIIQpmnUIzUY= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= @@ -619,6 +631,8 @@ github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2 github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= github.com/go-latex/latex v0.0.0-20210118124228-b3d85cf34e07/go.mod h1:CO1AlKB2CSIqUrmQPqA0gdRIlnLEY0gK5JGjh37zN5U= +github.com/go-latex/latex v0.0.0-20210823091927-c0d11ff05a81/go.mod h1:SX0U8uGpxhq9o2S/CELCSUxEWWAuoCUcVCQWv7G2OCk= +github.com/go-latex/latex v0.0.0-20230307184459-12ec69307ad9/go.mod h1:gWuR/CrFDDeVRFQwHPvsv9soJVB/iqymhuZQuJ3a9OM= github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= @@ -649,6 +663,8 @@ github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh github.com/go-openapi/swag v0.19.14/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= github.com/go-ozzo/ozzo-validation/v4 v4.3.0 h1:byhDUpfEwjsVQb1vBunvIjh2BHQ9ead57VkAEY4V+Es= github.com/go-ozzo/ozzo-validation/v4 v4.3.0/go.mod h1:2NKgrcHl3z6cJs+3Oo940FPRiTzuqKbvfrL2RxCj6Ew= +github.com/go-pdf/fpdf v0.5.0/go.mod h1:HzcnA+A23uwogo0tp9yU+l3V+KXhiESpt1PMayhOh5M= +github.com/go-pdf/fpdf v0.6.0/go.mod h1:HzcnA+A23uwogo0tp9yU+l3V+KXhiESpt1PMayhOh5M= github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8= github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA= @@ -764,6 +780,7 @@ github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Z github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.1/go.mod h1:xXMiIv4Fb/0kKde4SpL7qlzvu5cMJDRkFDxJfI9uaxA= github.com/google/flatbuffers v2.0.0+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= +github.com/google/flatbuffers v23.5.26+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= @@ -847,7 +864,6 @@ github.com/googleapis/gnostic v0.5.1/go.mod h1:6U4PtQXGIEt/Z3h5MAT7FNofLnw9vXk2c github.com/googleapis/gnostic v0.5.5/go.mod h1:7+EbHbldMins07ALC74bsA81Ovc97DwqyJO1AENw9kA= github.com/googleapis/go-type-adapters v1.0.0/go.mod h1:zHW75FOG2aur7gAO2B+MLby+cLsWGBF62rFAi7WjWO4= github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= -github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/gorilla/css v1.0.0 h1:BQqNyPTi50JCFMTw/b67hByjMVXZRwGha6wxVGkeihY= github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c= @@ -1027,7 +1043,6 @@ github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/ github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= -github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo= github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= @@ -1291,10 +1306,12 @@ github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCko github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= github.com/phpdave11/gofpdf v1.4.2/go.mod h1:zpO6xFn9yxo3YLyMvW8HcKWVdbNqgIfOOp2dXMnm1mY= github.com/phpdave11/gofpdi v1.0.12/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= +github.com/phpdave11/gofpdi v1.0.13/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= github.com/pierrec/lz4/v4 v4.1.8/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= -github.com/pierrec/lz4/v4 v4.1.15 h1:MO0/ucJhngq7299dKLwIMtgTfbkoSPF6AoMYDd8Q4q0= github.com/pierrec/lz4/v4 v4.1.15/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= +github.com/pierrec/lz4/v4 v4.1.18 h1:xaKrnTkyoqfh1YItXl56+6KJNVYWlEEPuAQW9xsplYQ= +github.com/pierrec/lz4/v4 v4.1.18/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pkg/browser v0.0.0-20180916011732-0a3d74bf9ce4/go.mod h1:4OwLy04Bl9Ef3GJJCoec+30X3LQs/0/m4HFRt/2LUSA= github.com/pkg/browser v0.0.0-20210706143420-7d21f8c997e2/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI= github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI= @@ -1367,6 +1384,7 @@ github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThC github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ruudk/golang-pdf417 v0.0.0-20181029194003-1af4ab5afa58/go.mod h1:6lfFZQK844Gfx8o5WFuvpxWRwnSoipWe/p622j1v06w= +github.com/ruudk/golang-pdf417 v0.0.0-20201230142125-a7e3863a1245/go.mod h1:pQAZKsJ8yyVxGRWYNEm9oFB8ieLgKFnamEyDmSA0BRk= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/safchain/ethtool v0.0.0-20190326074333-42ed695e3de8/go.mod h1:Z0q5wiBQGYcxhMZ6gUqHn6pYNLypFAvaL3UvgZLR0U4= github.com/safchain/ethtool v0.0.0-20210803160452-9aa261dae9b1/go.mod h1:Z0q5wiBQGYcxhMZ6gUqHn6pYNLypFAvaL3UvgZLR0U4= @@ -1401,10 +1419,8 @@ github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= github.com/slack-go/slack v0.9.1 h1:pekQBs0RmrdAgoqzcMCzUCWSyIkhzUU3F83ExAdZrKo= github.com/slack-go/slack v0.9.1/go.mod h1:wWL//kk0ho+FcQXcBTmEafUI5dz4qz5f4mMk8oIkioQ= -github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= -github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s= github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/snowflakedb/gosnowflake v1.6.3/go.mod h1:6hLajn6yxuJ4xUHZegMekpq9rnQbGJ7TMwXjgTmA6lg= github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= @@ -1456,8 +1472,9 @@ github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= -github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s= github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= github.com/syndtr/gocapability v0.0.0-20170704070218-db04d3cc01c8/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww= @@ -1653,6 +1670,7 @@ golang.org/x/crypto v0.0.0-20211115234514-b4de73f9ece8/go.mod h1:GvvjBRRGRdwPK5y golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220331220935-ae2d96664a29/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw= golang.org/x/crypto v0.12.0 h1:tFM/ta59kqch6LlvYnPa0yx5a83cL2nHflFhYKvv9Yk= golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw= golang.org/x/exp v0.0.0-20180321215751-8460e604b9de/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= @@ -1669,6 +1687,8 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0 golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/exp v0.0.0-20230321023759-10a507213a29/go.mod h1:CxIveKay+FTh1D0yPZemJVgC/95VzuuOLq5Qi4xnoYc= +golang.org/x/exp v0.0.0-20230713183714-613f0c0eb8a1/go.mod h1:FXUEEKJgO7OQYeo8N01OfiKP8RXMtf6e8aTskBGqWdc= golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81/go.mod h1:ux5Hcp/YLpHSI86hEcLt0YII63i6oz57MZXIpbrjZUs= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= @@ -1678,6 +1698,11 @@ golang.org/x/image v0.0.0-20200430140353-33d19683fad8/go.mod h1:FeLwcggjj3mMvU+o golang.org/x/image v0.0.0-20200618115811-c13761719519/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/image v0.0.0-20201208152932-35266b937fa6/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/image v0.0.0-20210216034530-4410531fe030/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/image v0.0.0-20210607152325-775e3b0c77b9/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM= +golang.org/x/image v0.0.0-20210628002857-a66eb6448b8d/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM= +golang.org/x/image v0.0.0-20211028202545-6944b10bf410/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM= +golang.org/x/image v0.5.0/go.mod h1:FVC7BI/5Ym8R25iw5OLsgshdUBbT1h5jZTpA+mvAdZ4= +golang.org/x/image v0.6.0/go.mod h1:MXLdDR43H7cDJq5GEGXEVeeNhPgi+YYEQ2pC1byI1x0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -1703,6 +1728,10 @@ golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.6.0/go.mod h1:4mET923SAdbXp2ki8ey+zGs1SLqsuM2Y0uvdZR/fUNI= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.11.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -1784,6 +1813,9 @@ golang.org/x/net v0.0.0-20220706163947-c90051bbdb60/go.mod h1:XRhObCWvk6IyKnWLug golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.0.0-20220812174116-3211cb980234/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= golang.org/x/net v0.0.0-20220919232410-f2f64ebce3c1/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= +golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= golang.org/x/net v0.10.0 h1:X2//UzNDwYmtCLn7To6G58Wr6f5ahEAQgKNzv9Y951M= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/oauth2 v0.0.0-20180227000427-d7d64896b5ff/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= @@ -1976,6 +2008,9 @@ golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220818161305-2296e01440c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.11.0 h1:eG7RXZHdqOJ1i+0lgLgCpSXAp6M3LYlAo6osgSi0xOM= golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= @@ -1984,6 +2019,9 @@ golang.org/x/term v0.0.0-20210220032956-6a3ed077a48d/go.mod h1:bj7SfCRtBDWHUb9sn golang.org/x/term v0.0.0-20210503060354-a79de5458b56/go.mod h1:tfny5GFUkzUvx4ps4ajbZsCe5lw1metzhBm9T3x7oIY= golang.org/x/term v0.0.0-20210615171337-6886f2dfbf5b/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= golang.org/x/term v0.11.0 h1:F9tnn/DA/Im8nCwm+fX+1/eBwi4qFjRT++MhtVC4ZX0= golang.org/x/term v0.11.0/go.mod h1:zC9APTIj3jG3FdV/Ons+XE1riIZXG4aZ4GTHiPZJPIU= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -1995,6 +2033,9 @@ golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.12.0 h1:k+n5B8goJNdU7hSvEtMUz3d1Q6D/XW4COJSJR6fN0mc= golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -2091,6 +2132,9 @@ golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.2.0/go.mod h1:y4OqIKeOV/fWJetJ8bXPU1sEVniLMIyDAZWeHdV+NTA= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.7.0/go.mod h1:4pg6aUX35JBAogB10C9AtvVL+qowtN4pT3CGSQex14s= golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -2105,9 +2149,11 @@ golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2/go.mod h1:K8+ghG5WaK9qNq gonum.org/v1/gonum v0.0.0-20180816165407-929014505bf4/go.mod h1:Y+Yx5eoAFn32cQvJDxZx5Dpnq+c3wtXuadVZAcxbbBo= gonum.org/v1/gonum v0.8.2/go.mod h1:oe/vMfY3deqTw+1EZJhuvEW2iwGF1bW9wwu7XCu0+v0= gonum.org/v1/gonum v0.9.3/go.mod h1:TZumC3NeyVQskjXqmyWt4S3bINhy7B4eYwW69EbyX+0= +gonum.org/v1/gonum v0.13.0/go.mod h1:/WPYRckkfWrhWefxyYTfrTtQR0KH4iyHNuzxqXAKyAU= gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0/go.mod h1:wa6Ws7BG/ESfp6dHfk7C6KdzKA7wR7u/rKwOGE66zvw= gonum.org/v1/plot v0.0.0-20190515093506-e2840ee46a6b/go.mod h1:Wt8AAjI+ypCyYX3nZBvf6cAIx93T+c/OS2HFAYskSZc= gonum.org/v1/plot v0.9.0/go.mod h1:3Pcqqmp6RHvJI72kgb8fThyUnav364FOsdDo2aGW5lY= +gonum.org/v1/plot v0.10.1/go.mod h1:VZW5OlhkL1mysU9vaqNHnsy86inf6Ot+jB3r+BczCEo= google.golang.org/api v0.0.0-20160322025152-9bf6e6e569ff/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= @@ -2336,8 +2382,8 @@ google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQ google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -google.golang.org/protobuf v1.29.1 h1:7QBf+IK2gx70Ap/hDsOmam3GE0v9HicjfEdAxE62UoM= -google.golang.org/protobuf v1.29.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8= +google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= @@ -2355,8 +2401,9 @@ gopkg.in/gemnasium/logrus-airbrake-hook.v2 v2.1.2/go.mod h1:Xk6kEKp8OKb+X14hQBKW gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s= gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= -gopkg.in/ini.v1 v1.62.0 h1:duBzk771uxoUuOlyRLkHsygud9+5lrlGjdFBb4mSKDU= gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= +gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k= gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= gopkg.in/square/go-jose.v2 v2.2.2/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= @@ -2396,6 +2443,7 @@ honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWh honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.1.3/go.mod h1:NgwopIslSNH47DimFoV78dnkksY2EFtX0ajyb3K/las= k8s.io/api v0.20.1/go.mod h1:KqwcCVogGxQY3nBlRpwt+wpAMF/KjaCc7RpywacvqUo= k8s.io/api v0.20.4/go.mod h1:++lNL1AJMkDymriNniQsWRkMDzRaX2Y/POTUi8yvqYQ= k8s.io/api v0.20.6/go.mod h1:X9e8Qag6JV/bL5G6bU8sdVRltWKmdHsFUGS3eVndqE8= diff --git a/internal/lib/cron/cron.go b/internal/lib/cron/cron.go index c5e1c334d5..b929681733 100644 --- a/internal/lib/cron/cron.go +++ b/internal/lib/cron/cron.go @@ -53,7 +53,7 @@ func (s *ScheduleSpec) getPreviousSchedule(currTime, startTime time.Time) time.T } func (s *ScheduleSpec) getEarliestTimeToStartCron(currTime time.Time) time.Time { - initialDelay := -time.Hour * 24 * 7 //nolint:gomnd + initialDelay := -time.Hour * 24 * 7 //nolint:mnd startTime := currTime for { startTime = startTime.Add(initialDelay) diff --git a/internal/lib/window/factory_test.go b/internal/lib/window/factory_test.go index bbaf09eaed..1ecdc8293b 100644 --- a/internal/lib/window/factory_test.go +++ b/internal/lib/window/factory_test.go @@ -37,7 +37,7 @@ func TestWindowFactory(t *testing.T) { config, err := window.NewPresetConfig("yesterday") assert.NoError(t, err) - _, err = window.From[Preset](config, "", func(name string) (Preset, error) { + _, err = window.From[Preset](config, "", func(_ string) (Preset, error) { return Preset{}, errors.New("cannot get window") }) assert.Error(t, err) @@ -46,7 +46,7 @@ func TestWindowFactory(t *testing.T) { config, err := window.NewPresetConfig("yesterday") assert.NoError(t, err) - w, err := window.From[Preset](config, "", func(name string) (Preset, error) { + w, err := window.From[Preset](config, "", func(_ string) (Preset, error) { conf := window.SimpleConfig{ Size: "1d", ShiftBy: "", @@ -70,7 +70,7 @@ func TestWindowFactory(t *testing.T) { config, err := window.NewConfig("1d", "", "", "") assert.NoError(t, err) - w, err := window.From[Preset](config, "", func(name string) (Preset, error) { + w, err := window.From[Preset](config, "", func(_ string) (Preset, error) { return Preset{ Name: "yesterday", config: config.GetSimpleConfig(), diff --git a/internal/models/window.go b/internal/models/window.go index 880f480a7f..3251b85eb8 100644 --- a/internal/models/window.go +++ b/internal/models/window.go @@ -22,7 +22,7 @@ func NewWindow(version int, truncateTo, offset, size string) (Window, error) { if version == 1 { return windowV1{truncateTo: truncateTo, offset: offset, size: size}, nil } - if version == 2 { // nolint:gomnd + if version == 2 { // nolint:mnd return windowV2{truncateTo: truncateTo, offset: offset, size: size}, nil } return nil, fmt.Errorf("window version [%d] is not recognized", version) diff --git a/internal/models/window_v2.go b/internal/models/window_v2.go index db490da335..b0f34e5e49 100644 --- a/internal/models/window_v2.go +++ b/internal/models/window_v2.go @@ -16,7 +16,7 @@ type windowV2 struct { } func (windowV2) GetVersion() int { - return 2 //nolint:gomnd + return 2 //nolint:mnd } func (w windowV2) Validate() error { diff --git a/internal/store/postgres/scheduler/job_operator_repository.go b/internal/store/postgres/scheduler/job_operator_repository.go index 637547b61d..0fa6c2dc4c 100644 --- a/internal/store/postgres/scheduler/job_operator_repository.go +++ b/internal/store/postgres/scheduler/job_operator_repository.go @@ -81,7 +81,6 @@ func (o *OperatorRunRepository) GetOperatorRun(ctx context.Context, name string, getJobRunByID := "SELECT " + jobOperatorColumns + " FROM " + operatorTableName + " j where job_run_id = $1 and name = $2 order by created_at desc limit 1" err = o.db.QueryRow(ctx, getJobRunByID, jobRunID, name). Scan(&opRun.ID, &opRun.Name, &opRun.JobRunID, &opRun.Status, &opRun.StartTime, &opRun.EndTime) - if err != nil { if errors.Is(err, pgx.ErrNoRows) { return nil, errors.NotFound(scheduler.EntityJobRun, "no record for "+operatorType.String()+"/"+name+" for job_run ID: "+jobRunID.String()) diff --git a/internal/store/postgres/tenant/secret_repository.go b/internal/store/postgres/tenant/secret_repository.go index 001f052eb0..c3ca3d7ecb 100644 --- a/internal/store/postgres/tenant/secret_repository.go +++ b/internal/store/postgres/tenant/secret_repository.go @@ -114,7 +114,6 @@ func (s SecretRepository) Save(ctx context.Context, tenantSecret *tenant.Secret) insertSecret := `INSERT INTO secret (name, value, project_name, namespace_name, created_at, updated_at) VALUES ($1, $2, $3, $4, NOW(), NOW())` _, err = s.db.Exec(ctx, insertSecret, secret.Name, secret.Value, secret.ProjectName, secret.NamespaceName) - if err != nil { return errors.Wrap(tenant.EntitySecret, "unable to save secret", err) } diff --git a/internal/utils/map.go b/internal/utils/map.go index ae97361a4d..54c10e2468 100644 --- a/internal/utils/map.go +++ b/internal/utils/map.go @@ -30,6 +30,14 @@ func MapToList[V any](inputMap map[string]V) []V { return smp } +func ListToMap(inputList []string) map[string]struct{} { + smp := map[string]struct{}{} + for _, value := range inputList { + smp[value] = struct{}{} + } + return smp +} + func AppendToMap(gmap map[string]interface{}, mp map[string]string) { for k, v := range mp { gmap[k] = v diff --git a/internal/utils/map_test.go b/internal/utils/map_test.go index 1d8c60a468..7d9ad2c11a 100644 --- a/internal/utils/map_test.go +++ b/internal/utils/map_test.go @@ -76,6 +76,14 @@ func TestMapHelper(t *testing.T) { assert.Equal(t, []string{"b", "d", "f"}, list) }) }) + t.Run("ListToMap", func(t *testing.T) { + t.Run("returns the list of strings as map", func(t *testing.T) { + list := []string{"a", "b", "c", "d"} + mapping := utils.ListToMap(list) + assert.Len(t, mapping, 4) + assert.EqualValues(t, map[string]struct{}{"a": {}, "b": {}, "c": {}, "d": {}}, mapping) + }) + }) t.Run("AppendToMap", func(t *testing.T) { t.Run("appends data from string map", func(t *testing.T) { orig := map[string]interface{}{ diff --git a/internal/utils/myers_diff.go b/internal/utils/myers_diff.go index 5a77d04c42..c54da8507f 100644 --- a/internal/utils/myers_diff.go +++ b/internal/utils/myers_diff.go @@ -103,13 +103,13 @@ func GetMyersDiff(src, dst []string, maxNeighbouringLines int) string { func shortestEditScript(src, dst []string) []operation { n := len(src) m := len(dst) - max := n + m + max := n + m // nolint: predeclared var trace []map[int]int var x, y int loop: for d := 0; d <= max; d++ { - v := make(map[int]int, d+2) //nolint: gomnd + v := make(map[int]int, d+2) //nolint: mnd trace = append(trace, v) if d == 0 { t := 0 diff --git a/plugin/plugin.go b/plugin/plugin.go index e6bb382366..536a5ef63e 100644 --- a/plugin/plugin.go +++ b/plugin/plugin.go @@ -87,7 +87,7 @@ func discoverPluginsGivenFilePattern(l log.Logger, prefix, suffix string) []stri continue } - if len(strings.Split(fullName, "-")) < 2 { //nolint: gomnd + if len(strings.Split(fullName, "-")) < 2 { //nolint: mnd continue } diff --git a/server/optimus.go b/server/optimus.go index 4afeef346d..37aca48e91 100644 --- a/server/optimus.go +++ b/server/optimus.go @@ -35,6 +35,7 @@ import ( "github.com/goto/optimus/ext/notify/slack" "github.com/goto/optimus/ext/notify/webhook" bqStore "github.com/goto/optimus/ext/store/bigquery" + mcStore "github.com/goto/optimus/ext/store/maxcompute" "github.com/goto/optimus/ext/transport/kafka" "github.com/goto/optimus/internal/compiler" "github.com/goto/optimus/internal/errors" @@ -393,6 +394,10 @@ func (s *OptimusServer) setupHandlers() error { bigqueryStore := bqStore.NewBigqueryDataStore(tenantService, bqClientProvider) resourceManager.RegisterDatastore(rModel.Bigquery, bigqueryStore) + mcClientProvider := mcStore.NewClientProvider() + maxComputeStore := mcStore.NewMaxComputeDataStore(tenantService, mcClientProvider) + resourceManager.RegisterDatastore(rModel.MaxCompute, maxComputeStore) + // Tenant Handlers pb.RegisterSecretServiceServer(s.grpcServer, tHandler.NewSecretsHandler(s.logger, tSecretService)) pb.RegisterProjectServiceServer(s.grpcServer, tHandler.NewProjectHandler(s.logger, tProjectService)) diff --git a/server/server.go b/server/server.go index 036d5ce517..82fc247a1c 100644 --- a/server/server.go +++ b/server/server.go @@ -169,7 +169,7 @@ func prepareHTTPProxy(httpAddr, grpcAddr string) (*http.Server, func(), error) { // base router baseMux := http.NewServeMux() - baseMux.HandleFunc("/ping", func(w http.ResponseWriter, r *http.Request) { + baseMux.HandleFunc("/ping", func(w http.ResponseWriter, _ *http.Request) { fmt.Fprintf(w, "pong") }) baseMux.HandleFunc("/plugins", func(w http.ResponseWriter, r *http.Request) { @@ -179,7 +179,7 @@ func prepareHTTPProxy(httpAddr, grpcAddr string) (*http.Server, func(), error) { }) baseMux.Handle("/api/", otelhttp.NewHandler(http.StripPrefix("/api", gwmux), "api")) - //nolint: gomnd + //nolint: mnd srv := &http.Server{ Handler: baseMux, Addr: httpAddr,