Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Test out idea to parse errors into sarif issues for tfsec #525

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
2 changes: 2 additions & 0 deletions .trunk/trunk.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@ plugins:
lint:
# enabled linters inherited from github.com/trunk-io/configs plugin

enabled:
- tfsec@1.28.4
disabled:
- pylint # pylint diagnostics are too strict

Expand Down
75 changes: 70 additions & 5 deletions linters/tfsec/parse.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,16 +18,81 @@
"""
# trunk-ignore-end(ruff)

import json
import re
import sys


def main():
original_input = sys.stdin.read()
def extract_json(original_input):
try:
index = original_input.index("{")
print(original_input[index:])
json_obj = json.loads(original_input)
return json.dumps(json_obj), True
except ValueError:
print(original_input)
index = original_input.find("{")
if index != -1:
return original_input[index:], True
else:
return original_input, False


def to_result_sarif(
path: str, line_number: int, column_number: int, rule_id: str, message: str
):
return {
"level": "error",
"locations": [
{
"physicalLocation": {
"artifactLocation": {
"uri": path,
},
"region": {
"startColumn": column_number,
"startLine": line_number,
},
}
}
],
"message": {
"text": message,
},
"ruleId": rule_id,
}


def extract_error_to_sarif(input_str):
# Example output from tfsec to parse with regex
r"""
Error: scan failed: home/kohkubo/GitHub/review/akuma_review/terraform/modules/cloud_run/main.tf:19,25-20,1: Invalid multi-line string; Quoted strings may not be split over multiple lines. To produce a multi-line string, either use the \n escape to represent a newline character or use the "heredoc" multi-line template syntax., and 54 other diagnostic(s).
"""

match = re.match(r"^Error: scan failed: (.+\.tf):(\d+),(\d+).*: (.+)$", input_str)
if not match:
return input_str

filename = match.group(1)
line = match.group(2)
col = match.group(3)
error_context = match.group(4)

results = [to_result_sarif(filename, line, col, "malformed", error_context)]

sarif = {
"$schema": "https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json",
"version": "2.1.0",
"runs": [{"results": results}],
}
return json.dumps(sarif, indent=2)


def main():
original_input = sys.stdin.read()
extracted_json, has_json = extract_json(original_input)

if has_json:
print(extracted_json)
else:
print(extract_error_to_sarif)


if __name__ == "__main__":
Expand Down
35 changes: 35 additions & 0 deletions linters/tfsec/test_data/malformed.in.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
module "docdb" {
`source = "./modules/docdb"`
docdb_subnet_group_name = "${var.project_name}-${var.environment}-group"
docdb_subnet_ids = module.vpc.private_subnets
docdb_cluster_identifier = "${var.project_name}-${var.environment}-docdb"
docdb_username = random_password.uname_create[0].result
docdb_password = random_password.password_create[0].result
skip_final_snapshot = var.skip_final_snapshot
docdb_vpc_security_group_ids = [module.sg_docdb.security_group_id]
docdb_cluster_instance_count = var.docdb_cluster_instance_count
docdb_cluster_instance_identifier = "docdb-cluster-${var.environment}-${count.index}"
instance_class = var.instance_class
docdb_parameter_group_name = "${var.project_name}-${var.environment}-docdb"

}

moved {
from = aws_docdb_cluster.docdb
to = module.docdb.aws_docdb_subnet_group.docdbgroup
}

moved {
from = aws_docdb_cluster_instance.docdb_cluster_instances[0]
to = module.docdb.aws_docdb_cluster.docdb
}

moved {
from = aws_docdb_cluster_parameter_group.cluster_para_group
to = module.docdb.aws_docdb_cluster_instance.docdb_cluster_instances
}

moved {
from = aws_docdb_subnet_group.docdbgroup
to = module.docdb.aws_docdb_cluster_parameter_group.cluster_para_group
}