diff --git a/config/terraform.spc b/config/terraform.spc index 4e46dd7..ce190da 100644 --- a/config/terraform.spc +++ b/config/terraform.spc @@ -1,8 +1,10 @@ connection "terraform" { plugin = "terraform" - # Paths is a list of locations to search for Terraform configuration files - # Paths can be configured with a local directory, a remote Git repository URL, or an S3 bucket URL + # Configuration file paths is a list of locations to search for Terraform configuration files + # Plan File Paths is a list of locations to search for Terraform plan files + # State File Paths is a list of locations to search for Terraform state files + # Configuration, plan or state file paths can be configured with a local directory, a remote Git repository URL, or an S3 bucket URL # Wildcard based searches are supported, including recursive searches # Local paths are resolved relative to the current working directory (CWD) @@ -18,5 +20,7 @@ connection "terraform" { # the CWD will be matched, which may cause errors if incompatible file types exist # Defaults to CWD - paths = [ "*.tf" ] + configuration_file_paths = ["*.tf"] + plan_file_paths = ["tfplan.json", "*.tfplan.json"] + state_file_paths = ["*.tfstate"] } diff --git a/docs/index.md b/docs/index.md index 78d6065..01fc8c5 100644 --- a/docs/index.md +++ b/docs/index.md @@ -16,6 +16,46 @@ A Terraform configuration file is used to declare resources, variables, modules, [Steampipe](https://steampipe.io) is an open source CLI to instantly query data using SQL. +The plugin supports scanning Terraform configuration files from various sources (e.g., [Local files](#configuring-local-file-paths), [Git](#configuring-remote-git-repository-urls), [S3](#configuring-s3-urls) etc.), [parsing Terraform states](#scanning-terraform-state) and [parsing Terraform plans](#scanning-terraform-plan) as well. + +## Documentation + +- **[Table definitions & examples →](/plugins/turbot/terraform/tables)** + +## Get Started + +### Install + +Download and install the latest Terraform plugin: + +```bash +steampipe plugin install terraform +``` + +### Configuration + +Installing the latest terraform plugin will create a config file (`~/.steampipe/config/terraform.spc`) with a single connection named `terraform`: + +```hcl +connection "terraform" { + plugin = "terraform" + + configuration_file_paths = ["*.tf"] + plan_file_paths = ["tfplan.json", "*.tfplan.json"] + state_file_paths = ["*.tfstate"] +} +``` + +For a full list of configuration arguments, please see the [default configuration file](https://github.com/turbot/steampipe-plugin-terraform/blob/main/config/terraform.spc). + +### Run a Query + +Run steampipe: + +```shell +steampipe query +``` + Query all resources in your Terraform files: ```sql @@ -27,7 +67,7 @@ from terraform_resource; ``` -``` +```sh > select name, type, jsonb_pretty(arguments) as args from terraform_resource; +------------+----------------+--------------------------------------------+ | name | type | args | @@ -57,70 +97,30 @@ from +------------+----------------+--------------------------------------------+ ``` -## Documentation - -- **[Table definitions & examples →](/plugins/turbot/terraform/tables)** - -## Get started - -### Install - -Download and install the latest Terraform plugin: - -```bash -steampipe plugin install terraform -``` - -### Credentials - -No credentials are required. +## Configuring Paths -### Configuration +The plugin requires a list of locations to search for the Terraform configuration files. Paths can be configured with [Local files](#configuring-local-file-paths), [Git URLs](#configuring-remote-git-repository-urls), [S3 URLs](#configuring-s3-urls) etc. -Installing the latest terraform plugin will create a config file (`~/.steampipe/config/terraform.spc`) with a single connection named `terraform`: +**Note:** Local file paths are resolved relative to the current working directory (CWD). ```hcl connection "terraform" { plugin = "terraform" - # Paths is a list of locations to search for Terraform configuration files - # Paths can be configured with a local directory, a remote Git repository URL, or an S3 bucket URL - # Wildcard based searches are supported, including recursive searches - # Local paths are resolved relative to the current working directory (CWD) - - # For example: - # - "*.tf" matches all Terraform configuration files in the CWD - # - "**/*.tf" matches all Terraform configuration files in the CWD and all sub-directories - # - "../*.tf" matches all Terraform configuration files in the CWD's parent directory - # - "steampipe*.tf" matches all Terraform configuration files starting with "steampipe" in the CWD - # - "/path/to/dir/*.tf" matches all Terraform configuration files in a specific directory - # - "/path/to/dir/main.tf" matches a specific file - - # If paths includes "*", all files (including non-Terraform configuration files) in - # the CWD will be matched, which may cause errors if incompatible file types exist - - # Defaults to CWD - paths = [ "*.tf" ] + configuration_file_paths = [ + "terraform_test.tf", + "github.com/turbot/steampipe-plugin-aws//aws-test/tests/aws_acm_certificate//variables.tf" + ] } ``` -### Supported Path Formats - -The `paths` config argument is flexible and can search for Terraform configuration files from several different sources, e.g., local directory paths, Git, S3. - -The following sources are supported: - -- [Local files](#configuring-local-file-paths) -- [Remote Git repositories](#configuring-remote-git-repository-urls) -- [S3](#configuring-s3-urls) - Paths may [include wildcards](https://pkg.go.dev/path/filepath#Match) and support `**` for recursive matching. For example: ```hcl connection "terraform" { plugin = "terraform" - paths = [ + configuration_file_paths = [ "*.tf", "~/*.tf", "github.com/turbot/steampipe-plugin-aws//aws-test/tests/aws_acm_certificate//*.tf", @@ -134,7 +134,7 @@ connection "terraform" { **Note**: If any path matches on `*` without `.tf`, all files (including non-Terraform configuration files) in the directory will be matched, which may cause errors if incompatible file types exist. -#### Configuring Local File Paths +### Configuring Local File Paths You can define a list of local directory paths to search for terraform files. Paths are resolved relative to the current working directory. For example: @@ -151,11 +151,11 @@ You can define a list of local directory paths to search for terraform files. Pa connection "terraform" { plugin = "terraform" - paths = [ "*.tf", "~/*.tf", "/path/to/dir/main.tf" ] + configuration_file_paths = [ "*.tf", "~/*.tf", "/path/to/dir/main.tf" ] } ``` -#### Configuring Remote Git Repository URLs +### Configuring Remote Git Repository URLs You can also configure `paths` with any Git remote repository URLs, e.g., GitHub, BitBucket, GitLab. The plugin will then attempt to retrieve any Terraform configuration files from the remote repositories. @@ -176,7 +176,7 @@ You can specify a subdirectory after a double-slash (`//`) if you want to downlo connection "terraform" { plugin = "terraform" - paths = [ "github.com/turbot/steampipe-plugin-aws//aws-test/tests/aws_acm_certificate//*.tf" ] + configuration_file_paths = [ "github.com/turbot/steampipe-plugin-aws//aws-test/tests/aws_acm_certificate//*.tf" ] } ``` @@ -186,7 +186,7 @@ Similarly, you can define a list of GitLab and BitBucket URLs to search for Terr connection "terraform" { plugin = "terraform" - paths = [ + configuration_file_paths = [ "github.com/turbot/steampipe-plugin-aws//**/*.tf", "github.com/hashicorp/terraform-guides//infrastructure-as-code//**/*.tf", "bitbucket.org/benturrell/terraform-arcgis-portal//modules/shared//*.tf", @@ -197,11 +197,11 @@ connection "terraform" { } ``` -#### Configuring S3 URLs +### Configuring S3 URLs You can also query all Terraform configuration files stored inside an S3 bucket (public or private) using the bucket URL. -##### Accessing a Private Bucket +#### Accessing a Private Bucket In order to access your files in a private S3 bucket, you will need to configure your credentials. You can use your configured AWS profile from local `~/.aws/config`, or pass the credentials using the standard AWS environment variables, e.g., `AWS_PROFILE`, `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY`, `AWS_REGION`. @@ -215,7 +215,7 @@ You can also authenticate your request by setting the AWS profile and region in connection "terraform" { plugin = "terraform" - paths = [ + configuration_file_paths = [ "s3::https://bucket-2.s3.us-east-1.amazonaws.com//*.tf?aws_profile=", "s3::https://bucket-2.s3.us-east-1.amazonaws.com/test_folder//*.tf?aws_profile=" ] @@ -242,21 +242,14 @@ If the bucket is in another AWS account, the bucket policy will need to grant ac "Principal": { "AWS": "arn:aws:iam::123456789012:user/YOUR_USER" }, - "Action": [ - "s3:ListBucket", - "s3:GetObject", - "s3:GetObjectVersion" - ], - "Resource": [ - "arn:aws:s3:::test-bucket1", - "arn:aws:s3:::test-bucket1/*" - ] + "Action": ["s3:ListBucket", "s3:GetObject", "s3:GetObjectVersion"], + "Resource": ["arn:aws:s3:::test-bucket1", "arn:aws:s3:::test-bucket1/*"] } ] } ``` -##### Accessing a Public Bucket +#### Accessing a Public Bucket Public access granted to buckets and objects through ACLs and bucket policies allows any user access to data in the bucket. We do not recommend making S3 buckets public, but if there are specific objects you'd like to make public, please see [How can I grant public read access to some objects in my Amazon S3 bucket?](https://aws.amazon.com/premiumsupport/knowledge-center/read-access-objects-s3-bucket/). @@ -266,14 +259,76 @@ You can query any public S3 bucket directly using the URL without passing creden connection "terraform" { plugin = "terraform" - paths = [ + configuration_file_paths = [ "s3::https://bucket-1.s3.us-east-1.amazonaws.com/test_folder//*.tf", "s3::https://bucket-2.s3.us-east-1.amazonaws.com/test_folder//**/*.tf" ] } ``` -## Get involved +## Scanning Terraform Plan + +The plugin supports scanning the Terraform plans given in JSON, and allows the users to query them using Steampipe. + +**Note:** The plugin only scans the resource changes from the Terraform plan. + +To get the Terraform plan in JSON format simply follow the below steps: + +- Run `terraform plan` with `-out` flag to store the generated plan to the given filename. Terraform will allow any filename for the plan file, but a typical convention is to name it `tfplan`. + +```shell +terraform plan -out=tfplan +``` + +- Run `terraform show` command with `-json` flag to get the plan in JSON format, and store the output in a file. + +```shell +terraform show -json tfplan > tfplan.json +``` + +- And, finally add the path `tfplan.json` to the `plan_file_paths` argument in the config to read the plan using Steampipe. + +```hcl +connection "terraform" { + plugin = "terraform" + + plan_file_paths = [ + "/path/to/tfplan.json", + "github.com/turbot/steampipe-plugin-aws//aws-test/tests/plan_files//tfplan.json", + "s3::https://bucket-1.s3.us-east-1.amazonaws.com/test_plan//*.json" + ] +} +``` + +## Scanning Terraform State + +The plugin supports scanning the Terraform states, and allows the users to query them using Steampipe. + +**Note:** The plugin only scans the the outputs and resources from the Terraform state. + +To get the Terraform state simply follow the below steps: + +- Run `terraform apply` to automatically generate state file `terraform.tfstate`. + +```shell +terraform apply +``` + +- Add the path of the file `terraform.tfstate` to the `state_file_paths` argument in the config to read the state using Steampipe. + +```hcl +connection "terraform" { + plugin = "terraform" + + state_file_paths = [ + "terraform.tfstate", + "github.com/turbot/steampipe-plugin-aws//aws-test/tests/state_files//terraform.tfstate", + "s3::https://bucket-1.s3.us-east-1.amazonaws.com/state_files//*.tfstate" + ] +} +``` + +## Get Involved - Open source: https://github.com/turbot/steampipe-plugin-terraform - Community: [Join #steampipe on Slack →](https://turbot.com/community/join) diff --git a/docs/tables/terraform_resource.md b/docs/tables/terraform_resource.md index 545b761..9598e9d 100644 --- a/docs/tables/terraform_resource.md +++ b/docs/tables/terraform_resource.md @@ -119,3 +119,30 @@ where and (arguments -> 'public_network_access_enabled' is null or (arguments -> 'public_network_access_enabled')::boolean); ``` +### List resources from a plan file + +```sql +select + name, + type, + arguments, + path +from + terraform_resource +where + path = '/path/to/tfplan.json'; +``` + +### List resources from a state file + +```sql +select + name, + type, + arguments, + path +from + terraform_resource +where + path = '/path/to/terraform.tfstate'; +``` diff --git a/go.mod b/go.mod index 31a0f80..48a4307 100644 --- a/go.mod +++ b/go.mod @@ -61,6 +61,7 @@ require ( github.com/hashicorp/go-safetemp v1.0.0 // indirect github.com/hashicorp/go-version v1.6.0 // indirect github.com/hashicorp/hcl v1.0.0 // indirect + github.com/hashicorp/terraform-json v0.13.0 // indirect github.com/hashicorp/yamux v0.0.0-20181012175058-2f1d1f20f75d // indirect github.com/iancoleman/strcase v0.2.0 // indirect github.com/inconshreveable/mousetrap v1.0.0 // indirect diff --git a/terraform/connection_config.go b/terraform/connection_config.go index 79e6a9f..6b2b1a5 100644 --- a/terraform/connection_config.go +++ b/terraform/connection_config.go @@ -6,14 +6,29 @@ import ( ) type terraformConfig struct { - Paths []string `cty:"paths" steampipe:"watch"` + ConfigurationFilePaths []string `cty:"configuration_file_paths" steampipe:"watch"` + Paths []string `cty:"paths" steampipe:"watch"` + PlanFilePaths []string `cty:"plan_file_paths" steampipe:"watch"` + StateFilePaths []string `cty:"state_file_paths" steampipe:"watch"` } var ConfigSchema = map[string]*schema.Attribute{ + "configuration_file_paths": { + Type: schema.TypeList, + Elem: &schema.Attribute{Type: schema.TypeString}, + }, "paths": { Type: schema.TypeList, Elem: &schema.Attribute{Type: schema.TypeString}, }, + "plan_file_paths": { + Type: schema.TypeList, + Elem: &schema.Attribute{Type: schema.TypeString}, + }, + "state_file_paths": { + Type: schema.TypeList, + Elem: &schema.Attribute{Type: schema.TypeString}, + }, } func ConfigInstance() interface{} { diff --git a/terraform/table_terraform_data_source.go b/terraform/table_terraform_data_source.go index 7da82e6..845c480 100644 --- a/terraform/table_terraform_data_source.go +++ b/terraform/table_terraform_data_source.go @@ -107,17 +107,23 @@ type terraformDataSource struct { func listDataSources(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { // The path comes from a parent hydate, defaulting to the config paths or // available by the optional key column - path := h.Item.(filePath).Path + data := h.Item.(filePath) + path := data.Path - combinedParser, err := Parser() + content, err := os.ReadFile(path) if err != nil { - plugin.Logger(ctx).Error("terraform_data_source.listDataSources", "create_parser_error", err) + plugin.Logger(ctx).Error("terraform_data_source.listDataSources", "read_file_error", err, "path", path) return nil, err } - content, err := os.ReadFile(path) + // Return if the path is a TF plan or state path + if data.IsTFPlanFilePath || isTerraformPlan(content) || data.IsTFStateFilePath { + return nil, nil + } + + combinedParser, err := Parser() if err != nil { - plugin.Logger(ctx).Error("terraform_data_source.listDataSources", "read_file_error", err, "path", path) + plugin.Logger(ctx).Error("terraform_data_source.listDataSources", "create_parser_error", err) return nil, err } diff --git a/terraform/table_terraform_local.go b/terraform/table_terraform_local.go index 49d68c3..3529b97 100644 --- a/terraform/table_terraform_local.go +++ b/terraform/table_terraform_local.go @@ -66,17 +66,23 @@ type terraformLocal struct { func listLocals(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { // The path comes from a parent hydate, defaulting to the config paths or // available by the optional key column - path := h.Item.(filePath).Path + data := h.Item.(filePath) + path := data.Path - combinedParser, err := Parser() + content, err := os.ReadFile(path) if err != nil { - plugin.Logger(ctx).Error("terraform_local.listLocals", "create_parser_error", err) + plugin.Logger(ctx).Error("terraform_local.listLocals", "read_file_error", err, "path", path) return nil, err } - content, err := os.ReadFile(path) + // Return if the path is a TF plan or state path + if data.IsTFPlanFilePath || isTerraformPlan(content) || data.IsTFStateFilePath { + return nil, nil + } + + combinedParser, err := Parser() if err != nil { - plugin.Logger(ctx).Error("terraform_local.listLocals", "read_file_error", err, "path", path) + plugin.Logger(ctx).Error("terraform_local.listLocals", "create_parser_error", err) return nil, err } diff --git a/terraform/table_terraform_module.go b/terraform/table_terraform_module.go index 7763f3f..a4a4374 100644 --- a/terraform/table_terraform_module.go +++ b/terraform/table_terraform_module.go @@ -112,19 +112,25 @@ type terraformModule struct { } func listModules(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { - // The path comes from a parent hydrate, defaulting to the config paths - // or available by the optional key column - path := h.Item.(filePath).Path + // The path comes from a parent hydate, defaulting to the config paths or + // available by the optional key column + data := h.Item.(filePath) + path := data.Path - combinedParser, err := Parser() + content, err := os.ReadFile(path) if err != nil { - plugin.Logger(ctx).Error("terraform_module.listModules", "create_parser_error", err) + plugin.Logger(ctx).Error("terraform_module.listModules", "read_file_error", err, "path", path) return nil, err } - content, err := os.ReadFile(path) + // Return if the path is a TF plan or state path + if data.IsTFPlanFilePath || isTerraformPlan(content) || data.IsTFStateFilePath { + return nil, nil + } + + combinedParser, err := Parser() if err != nil { - plugin.Logger(ctx).Error("terraform_module.listModules", "read_file_error", err, "path", path) + plugin.Logger(ctx).Error("terraform_module.listModules", "create_parser_error", err) return nil, err } diff --git a/terraform/table_terraform_output.go b/terraform/table_terraform_output.go index 1d9030a..f89969f 100644 --- a/terraform/table_terraform_output.go +++ b/terraform/table_terraform_output.go @@ -5,8 +5,10 @@ import ( "fmt" "os" "reflect" + "strings" "github.com/Checkmarx/kics/pkg/model" + p "github.com/Checkmarx/kics/pkg/parser/json" "github.com/turbot/steampipe-plugin-sdk/v5/grpc/proto" "github.com/turbot/steampipe-plugin-sdk/v5/plugin" "github.com/zclconf/go-cty/cty/gocty" @@ -87,15 +89,10 @@ type terraformOutput struct { } func listOutputs(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { - // The path comes from a parent hydate, defaulting to the config paths or + // The path comes from a parent hydrate, defaulting to the config paths or // available by the optional key column - path := h.Item.(filePath).Path - - combinedParser, err := Parser() - if err != nil { - plugin.Logger(ctx).Error("terraform_output.listOutputs", "create_parser_error", err) - return nil, err - } + pathInfo := h.Item.(filePath) + path := pathInfo.Path content, err := os.ReadFile(path) if err != nil { @@ -103,20 +100,61 @@ func listOutputs(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData return nil, err } - var tfOutput terraformOutput + // Return if the path is a TF plan path + if pathInfo.IsTFPlanFilePath || isTerraformPlan(content) { + return nil, nil + } - for _, parser := range combinedParser { - parsedDocs, err := ParseContent(ctx, d, path, content, parser) + var docs []model.Document + + // Check if the file contains TF state + if pathInfo.IsTFStateFilePath { + // Initialize the JSON parser + jsonParser := p.Parser{} + + // Parse the file content using the JSON parser + var str string + documents, _, err := jsonParser.Parse(str, content) + if err != nil { + plugin.Logger(ctx).Error("terraform_output.listOutputs", "state_parse_error", err, "path", path) + return nil, fmt.Errorf("failed to parse state file %s: %v", path, err) + } + + docs = append(docs, documents...) + } else { + // Build the terraform parser + combinedParser, err := Parser() if err != nil { - plugin.Logger(ctx).Error("terraform_output.listOutputs", "parse_error", err, "path", path) - return nil, fmt.Errorf("failed to parse file %s: %v", path, err) + plugin.Logger(ctx).Error("terraform_output.listOutputs", "create_parser_error", err) + return nil, err + } + + for _, parser := range combinedParser { + parsedDocs, err := ParseContent(ctx, d, path, content, parser) + if err != nil { + plugin.Logger(ctx).Error("terraform_output.listOutputs", "parse_error", err, "path", path) + return nil, fmt.Errorf("failed to parse file %s: %v", path, err) + } + docs = append(docs, parsedDocs.Docs...) } + } - for _, doc := range parsedDocs.Docs { - if doc["output"] != nil { - // For each output, scan its arguments - for outputName, outputData := range doc["output"].(model.Document) { - tfOutput, err = buildOutput(ctx, path, content, outputName, outputData.(model.Document)) + for _, doc := range docs { + if doc["output"] != nil { + // For each output, scan its arguments + for outputName, outputData := range doc["output"].(model.Document) { + tfOutput, err := buildOutput(ctx, pathInfo.IsTFStateFilePath, path, content, outputName, outputData.(model.Document)) + if err != nil { + plugin.Logger(ctx).Error("terraform_output.listOutputs", "build_output_error", err) + return nil, err + } + d.StreamListItem(ctx, tfOutput) + } + } else if doc["outputs"] != nil { + // For each output, scan its arguments + for outputName, outputData := range convertModelDocumentToMapInterface(doc["outputs"]) { + if !strings.HasPrefix(outputName, "_kics") { + tfOutput, err := buildOutput(ctx, pathInfo.IsTFStateFilePath, path, content, outputName, convertModelDocumentToMapInterface(outputData)) if err != nil { plugin.Logger(ctx).Error("terraform_output.listOutputs", "build_output_error", err) return nil, err @@ -130,7 +168,7 @@ func listOutputs(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData return nil, nil } -func buildOutput(ctx context.Context, path string, content []byte, name string, d model.Document) (terraformOutput, error) { +func buildOutput(ctx context.Context, isTFStateFilePath bool, path string, content []byte, name string, d model.Document) (terraformOutput, error) { var tfOutput terraformOutput tfOutput.Path = path @@ -139,15 +177,26 @@ func buildOutput(ctx context.Context, path string, content []byte, name string, // Remove all "_kics" arguments sanitizeDocument(d) - start, end, source, err := getBlock(ctx, path, content, "output", []string{name}) - if err != nil { - plugin.Logger(ctx).Error("terraform_output.buildOutput", "getBlock", err) - return tfOutput, err + if isTFStateFilePath { + file, err := os.Open(path) + if err != nil { + plugin.Logger(ctx).Error("terraform_output.listOutputs", "open_file_error", err, "path", path) + return tfOutput, err + } + startLine, endLine, source := findBlockLinesFromJSON(file, "outputs", name) + tfOutput.StartLine = startLine + tfOutput.EndLine = endLine + tfOutput.Source = source + } else { + start, end, source, err := getBlock(ctx, path, content, "output", []string{name}) + if err != nil { + plugin.Logger(ctx).Error("terraform_output.buildOutput", "getBlock", err) + return tfOutput, err + } + tfOutput.StartLine = start.Line + tfOutput.EndLine = end.Line + tfOutput.Source = source } - tfOutput.StartLine = start.Line - tfOutput.EndLine = end.Line - tfOutput.Source = source - for k, v := range d { switch k { case "description": diff --git a/terraform/table_terraform_provider.go b/terraform/table_terraform_provider.go index 88c8213..f78069a 100644 --- a/terraform/table_terraform_provider.go +++ b/terraform/table_terraform_provider.go @@ -80,17 +80,23 @@ type terraformProvider struct { func listProviders(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { // The path comes from a parent hydate, defaulting to the config paths or // available by the optional key column - path := h.Item.(filePath).Path + data := h.Item.(filePath) + path := data.Path - combinedParser, err := Parser() + content, err := os.ReadFile(path) if err != nil { - plugin.Logger(ctx).Error("terraform_provider.listProviders", "create_parser_error", err) + plugin.Logger(ctx).Error("terraform_provider.listProviders", "read_file_error", err, "path", path) return nil, err } - content, err := os.ReadFile(path) + // Return if the path is a TF plan or state path + if data.IsTFPlanFilePath || isTerraformPlan(content) || data.IsTFStateFilePath { + return nil, nil + } + + combinedParser, err := Parser() if err != nil { - plugin.Logger(ctx).Error("terraform_provider.listProviders", "read_file_error", err, "path", path) + plugin.Logger(ctx).Error("terraform_provider.listProviders", "create_parser_error", err) return nil, err } diff --git a/terraform/table_terraform_resource.go b/terraform/table_terraform_resource.go index e54c431..607660c 100644 --- a/terraform/table_terraform_resource.go +++ b/terraform/table_terraform_resource.go @@ -8,6 +8,8 @@ import ( "strings" "github.com/Checkmarx/kics/pkg/model" + p "github.com/Checkmarx/kics/pkg/parser/json" + "github.com/turbot/steampipe-plugin-sdk/v5/grpc/proto" "github.com/turbot/steampipe-plugin-sdk/v5/plugin" "github.com/zclconf/go-cty/cty/gocty" @@ -34,11 +36,21 @@ func tableTerraformResource(ctx context.Context) *plugin.Table { Description: "Resource type.", Type: proto.ColumnType_STRING, }, + { + Name: "mode", + Description: "The type of resource Terraform creates, either a resource (managed) or data source (data).", + Type: proto.ColumnType_STRING, + }, { Name: "arguments", Description: "Resource arguments.", Type: proto.ColumnType_JSON, }, + { + Name: "instances", + Description: "The attributes of the resource.", + Type: proto.ColumnType_JSON, + }, // Meta-arguments { Name: "count", @@ -98,6 +110,7 @@ type terraformResource struct { Name string Type string Path string + Mode string StartLine int Source string EndLine int @@ -110,54 +123,92 @@ type terraformResource struct { // A resource's provider arg will always reference a provider block Provider string Lifecycle map[string]interface{} + Instances map[string]interface{} } func listResources(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { - // The path comes from a parent hydate, defaulting to the config paths or + // The path comes from a parent hydrate, defaulting to the config paths or // available by the optional key column - path := h.Item.(filePath).Path - - combinedParser, err := Parser() - if err != nil { - plugin.Logger(ctx).Error("terraform_resource.listResources", "create_parser_error", err) - return nil, err - } + pathInfo := h.Item.(filePath) + path := pathInfo.Path + // Read the content from the file content, err := os.ReadFile(path) if err != nil { plugin.Logger(ctx).Error("terraform_resource.listResources", "read_file_error", err, "path", path) return nil, err } - for _, parser := range combinedParser { - parsedDocs, err := ParseContent(ctx, d, path, content, parser) + // if the file contains TF plan then set IsTFPlanFilePath to true + if isTerraformPlan(content) { + pathInfo.IsTFPlanFilePath = true + } + + var docs []model.Document + + // Check if the file contains TF plan or state + if pathInfo.IsTFPlanFilePath || pathInfo.IsTFStateFilePath { + // Initialize the JSON parser + jsonParser := p.Parser{} + + // Parse the file content using the JSON parser + var str string + documents, _, err := jsonParser.Parse(str, content) if err != nil { plugin.Logger(ctx).Error("terraform_resource.listResources", "parse_error", err, "path", path) - return nil, fmt.Errorf("failed to parse file %s: %v", path, err) + return nil, fmt.Errorf("failed to parse plan or state file %s: %v", path, err) } + docs = append(docs, documents...) + } else { + // Build the terraform parser + combinedParser, err := Parser() + if err != nil { + plugin.Logger(ctx).Error("terraform_resource.listResources", "create_parser_error", err) + return nil, err + } + + for _, parser := range combinedParser { + parsedDocs, err := ParseContent(ctx, d, path, content, parser) + if err != nil { + plugin.Logger(ctx).Error("terraform_resource.listResources", "parse_error", err, "path", path) + return nil, fmt.Errorf("failed to parse file %s: %v", path, err) + } + docs = append(docs, parsedDocs.Docs...) + } + } - for _, doc := range parsedDocs.Docs { - if doc["resource"] != nil { - // Resources are grouped by resource type - for resourceType, resources := range doc["resource"].(model.Document) { - // For each resource, scan its arguments - for resourceName, resourceData := range resources.(model.Document) { - tfResource, err := buildResource(ctx, content, path, resourceType, resourceName, resourceData.(model.Document)) - if err != nil { - plugin.Logger(ctx).Error("terraform_resource.listResources", "build_resource_error", err) - return nil, err - } - d.StreamListItem(ctx, tfResource) + // Stream the data + for _, doc := range docs { + if doc["resource"] != nil { + // Resources are grouped by resource type + for resourceType, resources := range convertModelDocumentToMapInterface(doc["resource"]) { + // For each resource, scan its arguments + for resourceName, resourceData := range convertModelDocumentToMapInterface(resources) { + tfResource, err := buildResource(ctx, pathInfo.IsTFPlanFilePath, content, path, resourceType, resourceName, convertModelDocumentToMapInterface(resourceData)) + if err != nil { + plugin.Logger(ctx).Error("terraform_resource.listResources", "build_resource_error", err) + return nil, err } + d.StreamListItem(ctx, tfResource) } } + } else if doc["resources"] != nil { // state file returns resources + for _, resource := range doc["resources"].([]interface{}) { + resourceData := convertModelDocumentToMapInterface(resource) + tfResource, err := buildResource(ctx, pathInfo.IsTFStateFilePath, content, path, resourceData["type"].(string), resourceData["name"].(string), resourceData) + if err != nil { + plugin.Logger(ctx).Error("terraform_resource.listResources", "build_resource_error", err) + return nil, err + } + d.StreamListItem(ctx, tfResource) + } } } return nil, nil } -func buildResource(ctx context.Context, content []byte, path string, resourceType string, name string, d model.Document) (*terraformResource, error) { +func buildResource(ctx context.Context, isTFFilePath bool, content []byte, path string, resourceType string, name string, d model.Document) (*terraformResource, error) { tfResource := new(terraformResource) tfResource.Path = path @@ -165,20 +216,32 @@ func buildResource(ctx context.Context, content []byte, path string, resourceTyp tfResource.Name = name tfResource.Arguments = make(map[string]interface{}) tfResource.Lifecycle = make(map[string]interface{}) + tfResource.Instances = make(map[string]interface{}) // Remove all "_kics" arguments sanitizeDocument(d) - startPosition, endPosition, source, err := getBlock(ctx, path, content, "resource", []string{resourceType, name}) - if err != nil { - plugin.Logger(ctx).Error("error getting details of block", err) - return nil, err - } - - tfResource.StartLine = startPosition.Line - tfResource.Source = source - tfResource.EndLine = endPosition.Line + if isTFFilePath { + file, err := os.Open(path) + if err != nil { + plugin.Logger(ctx).Error("terraform_resource.buildResource", "open_file_error", err, "path", path) + return tfResource, err + } + startLine, endLine, source := findBlockLinesFromJSON(file, "resources", resourceType, name) + tfResource.StartLine = startLine + tfResource.EndLine = endLine + tfResource.Source = source + } else { + startPosition, endPosition, source, err := getBlock(ctx, path, content, "resource", []string{resourceType, name}) + if err != nil { + plugin.Logger(ctx).Error("error getting details of block", err) + return nil, err + } + tfResource.StartLine = startPosition.Line + tfResource.Source = source + tfResource.EndLine = endPosition.Line + } // TODO: Can we return source code as well? for k, v := range d { switch k { @@ -208,6 +271,28 @@ func buildResource(ctx context.Context, content []byte, path string, resourceTyp } tfResource.Provider = v.(string) + case "name": + if reflect.TypeOf(v).String() != "string" { + return tfResource, fmt.Errorf("The 'name' argument for resource '%s' must be of type string", name) + } + if tfResource.Name == "" { + tfResource.Name = v.(string) + } + + case "type": + if reflect.TypeOf(v).String() != "string" { + return tfResource, fmt.Errorf("The 'type' argument for resource '%s' must be of type string", name) + } + if tfResource.Name == "" { + tfResource.Type = v.(string) + } + + case "mode": + if reflect.TypeOf(v).String() != "string" { + return tfResource, fmt.Errorf("The 'mode' argument for resource '%s' must be of type string", name) + } + tfResource.Mode = v.(string) + case "for_each": valStr, err := convertExpressionValue(v) if err != nil { @@ -237,6 +322,18 @@ func buildResource(ctx context.Context, content []byte, path string, resourceTyp } tfResource.DependsOn = s + case "instances": + if reflect.TypeOf(v).String() != "[]interface {}" { + return tfResource, fmt.Errorf("The 'instances' argument for resource '%s' must be of type list", name) + } + for _, v := range v.([]interface{}) { + convertedValue := convertModelDocumentToMapInterface(v) + cleanedValue := removeKicsLabels(convertedValue).(map[string]interface{}) + for property, value := range cleanedValue { + tfResource.Instances[property] = value + } + } + // It's safe to add any remaining arguments since we've already removed all "_kics" arguments default: tfResource.Arguments[k] = v @@ -244,3 +341,35 @@ func buildResource(ctx context.Context, content []byte, path string, resourceTyp } return tfResource, nil } + +// convertModelDocumentToMapInterface takes the documents in model.Document format and converts it into map[string]interface{} +func convertModelDocumentToMapInterface(data interface{}) map[string]interface{} { + result := map[string]interface{}{} + + switch item := data.(type) { + case model.Document: + result = item + case map[string]interface{}: + result = item + } + return result +} + +func removeKicsLabels(data interface{}) interface{} { + if dataMap, isMap := data.(map[string]interface{}); isMap { + for key, value := range dataMap { + if strings.HasPrefix(key, "_kics") { + delete(dataMap, key) + } else { + dataMap[key] = removeKicsLabels(value) + } + } + return dataMap + } else if dataList, isList := data.([]interface{}); isList { + for i, item := range dataList { + dataList[i] = removeKicsLabels(item) + } + return dataList + } + return data +} diff --git a/terraform/utils.go b/terraform/utils.go index dd1af7a..74a5302 100644 --- a/terraform/utils.go +++ b/terraform/utils.go @@ -1,11 +1,13 @@ package terraform import ( + "bufio" "context" _ "embed" // Embed kics CLI img and scan-flags json "encoding/json" "errors" "fmt" + "os" "reflect" "strings" "sync" @@ -23,7 +25,9 @@ import ( ) type filePath struct { - Path string + Path string + IsTFPlanFilePath bool + IsTFStateFilePath bool } // Use when parsing any TF file to prevent concurrent map read and write errors @@ -38,7 +42,16 @@ func tfConfigList(ctx context.Context, d *plugin.QueryData, _ *plugin.HydrateDat // will never match the requested value. quals := d.EqualsQuals if quals["path"] != nil { - d.StreamListItem(ctx, filePath{Path: quals["path"].GetStringValue()}) + + path := d.EqualsQualString("path") + + // check if state file is provide in the qual + if strings.HasSuffix(path, ".tfstate") { + d.StreamListItem(ctx, filePath{Path: path, IsTFStateFilePath: true}) + return nil, nil + } + + d.StreamListItem(ctx, filePath{Path: path}) return nil, nil } @@ -46,14 +59,22 @@ func tfConfigList(ctx context.Context, d *plugin.QueryData, _ *plugin.HydrateDat // Fail if no paths are specified terraformConfig := GetConfig(d.Connection) - if terraformConfig.Paths == nil { - return nil, errors.New("paths must be configured") + if terraformConfig.Paths == nil && terraformConfig.ConfigurationFilePaths == nil && terraformConfig.PlanFilePaths == nil && terraformConfig.StateFilePaths == nil { + return nil, nil } // Gather file path matches for the glob - var matches []string - paths := terraformConfig.Paths - for _, i := range paths { + var paths, matches []string + + // TODO:: Remove backward compatibility for the argument 'Paths' + if terraformConfig.Paths != nil { + paths = terraformConfig.Paths + } else { + paths = terraformConfig.ConfigurationFilePaths + } + configurationFilePaths := paths + + for _, i := range configurationFilePaths { // List the files in the given source directory files, err := d.GetSourceFiles(i) @@ -73,6 +94,57 @@ func tfConfigList(ctx context.Context, d *plugin.QueryData, _ *plugin.HydrateDat d.StreamListItem(ctx, filePath{Path: i}) } + // Gather TF plan file path matches for the glob + var matchedPlanFilePaths []string + planFilePaths := terraformConfig.PlanFilePaths + for _, i := range planFilePaths { + + // List the files in the given source directory + files, err := d.GetSourceFiles(i) + if err != nil { + return nil, err + } + matchedPlanFilePaths = append(matchedPlanFilePaths, files...) + } + + // Sanitize the matches to ignore the directories + for _, i := range matchedPlanFilePaths { + + // Ignore directories + if filehelpers.DirectoryExists(i) { + continue + } + d.StreamListItem(ctx, filePath{ + Path: i, + IsTFPlanFilePath: true, + }) + } + + // Gather TF state file path matches for the glob + var matchedStateFilePaths []string + stateFilePaths := terraformConfig.StateFilePaths + for _, i := range stateFilePaths { + + // List the files in the given source directory + files, err := d.GetSourceFiles(i) + if err != nil { + return nil, err + } + matchedStateFilePaths = append(matchedStateFilePaths, files...) + } + + // Sanitize the matches to ignore the directories + for _, i := range matchedStateFilePaths { + + // Ignore directories + if filehelpers.DirectoryExists(i) { + continue + } + d.StreamListItem(ctx, filePath{ + Path: i, + IsTFStateFilePath: true, + }) + } return nil, nil } @@ -261,3 +333,137 @@ var terraformSchema = &hcl.BodySchema{ }, }, } + +func isTerraformPlan(content []byte) bool { + var data map[string]interface{} + err := json.Unmarshal(content, &data) + if err != nil { + return false + } + + // Check for fields that are common in Terraform plans + _, hasResourceChanges := data["resource_changes"] + _, hasFormatVersion := data["format_version"] + + return hasResourceChanges && hasFormatVersion +} + +// findBlockLinesFromJSON locates the start and end lines of a specific block or nested element within a block. +// The file should contain structured data (e.g., JSON) and this function expects to search for blocks with specific names. +func findBlockLinesFromJSON(file *os.File, blockName string, pathName ...string) (int, int, string) { + var currentLine, startLine, endLine int + var bracketCounter, startCounter int + + // These boolean flags indicate which part of the structured data we're currently processing. + inBlock, inOutput, inTargetBlock := false, false, false + + // Move the file pointer to the start of the file. + _, _ = file.Seek(0, 0) + scanner := bufio.NewScanner(file) + + for scanner.Scan() { + currentLine++ + line := scanner.Text() + trimmedLine := strings.TrimSpace(line) + + // Detect the start of the desired block, path, response, etc. + // Depending on the blockName and provided pathName, different conditions are checked. + + // Generic block detection + if !inBlock && (trimmedLine == fmt.Sprintf(`"%s": {`, blockName) || trimmedLine == fmt.Sprintf(`"%s": [`, blockName)) { + inBlock = true + startLine = currentLine + continue + } else if inBlock && blockName == "outputs" && trimmedLine == fmt.Sprintf(`"%s": {`, pathName[0]) { + // Different output block detection within the "outputs" block + inOutput = true + bracketCounter = 1 + startLine = currentLine + continue + } else if inBlock && blockName == "resources" { + if inBlock && strings.Contains(trimmedLine, "{") { + bracketCounter++ + startCounter = currentLine + } + if inBlock && strings.Contains(trimmedLine, "}") { + bracketCounter-- + } + + if inBlock && strings.Contains(trimmedLine, fmt.Sprintf(`"type": "%s"`, pathName[0])) { + peekCounter := 1 + nameFound := false + + for { + peekLine, _ := readLineN(file, currentLine+peekCounter) + if strings.Contains(peekLine, fmt.Sprintf(`"name": "%s"`, pathName[1])) { + nameFound = true + break + } + if strings.Contains(peekLine, "}") { + break + } + peekCounter++ + } + + if nameFound { + inTargetBlock = true + startLine = startCounter // Assume the opening brace is at the start of this resource + } + } + } + // If we are within a block, we need to track the opening and closing brackets + // to determine where the block ends. + if inBlock && inOutput && !inTargetBlock { + bracketCounter += strings.Count(line, "{") + bracketCounter -= strings.Count(line, "}") + + if bracketCounter == 0 { + endLine = currentLine + break + } + } + + if inBlock && inTargetBlock && bracketCounter == 0 { + endLine = currentLine + break + } + } + source := getSourceFromFile(file, startLine, endLine) + + if startLine != 0 && endLine == 0 { + // If we found the start but not the end, reset the start to indicate the block doesn't exist in entirety. + startLine = 0 + } + + return startLine, endLine, source +} + +func getSourceFromFile(file *os.File, startLine int, endLine int) string { + var source string + _, _ = file.Seek(0, 0) // Go to the start + scanner := bufio.NewScanner(file) + currentSourceLine := 0 + for scanner.Scan() { + currentSourceLine++ + if currentSourceLine >= startLine && currentSourceLine <= endLine { + source += scanner.Text() + "\n" + } + if currentSourceLine > endLine { + break + } + } + return source +} + +func readLineN(file *os.File, lineNum int) (string, error) { + _, _ = file.Seek(0, 0) // Go to the start + scanner := bufio.NewScanner(file) + currentLine := 0 + for scanner.Scan() { + currentLine++ + if currentLine == lineNum { + return scanner.Text(), nil + } + } + return "", nil +}