Skip to content

Commit

Permalink
Merge branch 'Checkmarx:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
gabriel-cx authored Dec 7, 2023
2 parents 14f7848 + 66cbf9c commit 3abc80b
Show file tree
Hide file tree
Showing 1,851 changed files with 9,543 additions and 3,803 deletions.
9 changes: 8 additions & 1 deletion .github/scripts/queries-validator/metadata-schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,8 @@
"aws",
"azure",
"common",
"gcp"
"gcp",
"nifcloud"
]
},
"platform": {
Expand Down Expand Up @@ -139,6 +140,12 @@
}
}
}
},
"experimental": {
"type": "string",
"enum": [
"true"
]
}
}
}
Binary file modified .github/scripts/queries-validator/requirements.txt
Binary file not shown.
8 changes: 4 additions & 4 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -148,13 +148,13 @@ run-local: build
.PHONY: generate-queries-docs
generate-queries-docs: ## generate queries catalog md files
$(call print-target)
@pip3 install -r .github/generators/requirements.txt
@python3 -u .github/generators/docs_generator.py \
@pip3 install -r .github/scripts/docs-generator/requirements.txt
@python3 -u .github/scripts/docs-generator/docs-generator.py \
-p ./assets/queries/ \
-o ./docs/queries/ \
-f md \
-t .github/generators/templates
@python3 -u .github/scripts/docs-generator/query-page-generator/query-page-generator.py \
-t .github/scripts/docs-generator/templates
@python3 -u -B .github/scripts/docs-generator/query-page-generator/query-page-generator.py \
-p ./assets/queries/ \
-o ./docs/queries/ \
-f md \
Expand Down
7 changes: 5 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
[![Docker Pulls](https://img.shields.io/docker/pulls/checkmarx/kics)](https://hub.docker.com/r/checkmarx/kics)
[![Documentation](https://img.shields.io/badge/docs-viewdocs-blue.svg?style=flat-square "Viewdocs")](https://docs.kics.io/)
[![GitHub Discussions](https://img.shields.io/badge/chat-discussions-blue.svg?logo=github&style=flat-square)](https://github.com/Checkmarx/kics/discussions)
[![Discord Server](https://img.shields.io/discord/1116626376674521169?logo=discord&style=flat-square)](https://discord.gg/nzryxFup6Z)

[![checkmarx](https://img.shields.io/endpoint?url=https://pgp36n22ol.execute-api.eu-west-1.amazonaws.com/dev/cxflowcache-results?style=plastic&logoWidth=20&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAACXBIWXMAAA7EAAAOxAGVKw4bAAADbklEQVRYhc2XTWidRRSGn3MJl1AkZFFcFCkFIYKCYMEgIhZEMtNS2kHBvxYbl1kUIqELQXHhQigtCK266EIoSt0YplbsTJGqaFFahC6yiiJFUIJ2IUGClJLj4vvu/X7uTC7cJOoLH5eZM3Ped37OOXPhP4akOl20XeAQsB+YAsZH9H8HuAVcAxa9CStDBbhgDyKcAfaMSJoVo8pZEd7wJqwNCHDRdlSZF+H0FhO3cR3FeBv+BOjUDM8InEZhm79pFT5w0Xb6Aly0Eyjvb+OqGxBVBzwJMAaA8jLCTrQ3olRbHNBv5TcKxoCH6e10z6cKKMeAr8ZKwsN94r5MABaAd7wJ6yMKwEU7A8SG/+J3Gqo78GBi7tXNkpf4giIUG1CYBOiUl2FHYuLNLSAH1SnQXQnDbYBOlkTZNHmRU+QbkG7bJsi3UF3CQbRS1OFoxwSeRplEuOpN+D1LHO048DbKfNJ3gXPQzANZuGh3CHyNchm4gPKji/axzNgp4BrKfN6jnvU23KwJSMhsds2iPF5rT6jqJy7ae2vEHRftUVR/QNm7wXoCIid6jTIMhYEc0BRzf/9ISpuI7EK54KI1qoyrckZgdsBXfTHCe8Cr3oQ7PdfVEQjNOG3mhC8z454CzolwQ4TZDeavIjwPHK+TVzswHJ+jfIhwNGGbHTL3e+CIN+HnlDF/B2rwJqwjzAFLw5RW0HXgJLAvRw79HUi8S1qavAl/uWifBb0BMjGEfQXkmDfhSr2zLHr3ACveFvknH4YJTd6EZUVe2ZBauQI8UicvI+Qt4A+EXxH9zkV7XyUgVbczuGjCIsqpxJy7wAmE/Ymn1wzK6yjdYqxMA+9C/wiq2ltbSR7Cayi7FZ4rZulPKnLkognXM7uyL9H3BPR2QGSwFEv+eLwJdxFeFOEBhEcReShLXvjrtkoxSFEIe2G4BkzWRWjxGs6iLGLLG42p+5ImOcCaN2G9J2AJaJRMgQMu2peAj0cty2WpfwE4kDAvQZWILgEzrQEd4CNgwUXzS/OM2ncm294N2bpwqS7gvMKbAjsTA/eCtJy0Y3RYuwlVbotwHspL6E1YFWXuX3iSg4LAnDdhtS+gFL2oxSN0eyEsICxWzRZctAfL0rpnK3kVbolw3JvwWVNPAi7aLsohZNN/Tv8GllEuI3zaLsX/C/wDM7pjD59N2pkAAAAASUVORK5CYII=)](https://sast.checkmarx.net/cxwebclient/portal#/projectState/702/Summary)
[![Codacy Badge](https://app.codacy.com/project/badge/Grade/ceddb5b1b37d4edfa56440842c6248a4)](https://www.codacy.com/gh/Checkmarx/kics/dashboard?utm_source=github.com&utm_medium=referral&utm_content=Checkmarx/kics&utm_campaign=Badge_Grade)
Expand Down Expand Up @@ -55,9 +54,13 @@ Find security vulnerabilities, compliance issues, and infrastructure misconfigur
<br>
<img alt="Azure BluePrints" src="docs/img/logo-azure-blueprints.png" width="100">&nbsp;&nbsp;&nbsp;
<img alt="GitHub Workflows" src="docs/img/logo-github-icon.png" width="100">&nbsp;&nbsp;&nbsp;
<img alt="OpenTofu" src="docs/img/logo-opentofu.png" width="150">&nbsp;&nbsp;&nbsp;

#### Beta Features
Soon...
<img alt="Databricks" src="docs/img/logo-databricks.png" width="200">&nbsp;&nbsp;&nbsp;
<img alt="NIFCloud" src="docs/img/logo-nifcloud.png" width="110">&nbsp;&nbsp;&nbsp;

In order to run the Databricks and NIFCloud queries, use the `--experimental-queries` flag when running KICS.

## Getting Started

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
"queryName": "SQL DB Instance With SSL Disabled",
"severity": "HIGH",
"category": "Encryption",
"descriptionText": "Cloud SQL Database Instance should have SLL enabled",
"descriptionText": "Cloud SQL Database Instance should have SSL enabled",
"descriptionUrl": "https://docs.ansible.com/ansible/latest/collections/google/cloud/gcp_sql_instance_module.html#parameter-settings/ip_configuration/require_ssl",
"platform": "Ansible",
"descriptionID": "50bb06d6",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
"queryName": "SQL DB Instance With SSL Disabled",
"severity": "HIGH",
"category": "Encryption",
"descriptionText": "Cloud SQL Database Instance should have SLL enabled",
"descriptionText": "Cloud SQL Database Instance should have SSL enabled",
"descriptionUrl": "https://cloud.google.com/sql/docs/mysql/admin-api/rest/v1beta4/instances",
"platform": "GoogleDeploymentManager",
"descriptionID": "b42ee5a9",
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
{
"id": "953c0cc6-5f30-44cb-a803-bf4ef2571be8",
"queryName": "Databricks Autoscale Badly Setup",
"severity": "MEDIUM",
"category": "Resource Management",
"descriptionText": "Databricks should have min and max worker setup for autoscale",
"descriptionUrl": "https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/cluster",
"descriptionID": "e6eabd3e",
"platform": "Terraform",
"cloudProvider": "common",
"experimental": "true"
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
package Cx

import data.generic.terraform as tf_lib

CxPolicy[result] {
resource := input.document[i].resource.databricks_cluster[name]
not resource.autoscale.min_workers

result := {
"documentId": input.document[i].id,
"resourceType": "databricks_cluster",
"resourceName": tf_lib.get_resource_name(resource, name),
"searchKey": sprintf("databricks_cluster[%s].autoscale", [name]),
"issueType": "MissingAttribute",
"keyExpectedValue": sprintf("'databricks_cluster[%s].autoscale.min_workers' should not be empty", [name]),
"keyActualValue": sprintf("'databricks_cluster[%s].autoscale.min_workers' is not setup'", [name]),
}
}

CxPolicy[result] {
resource := input.document[i].resource.databricks_cluster[name]
not resource.autoscale.max_workers

result := {
"documentId": input.document[i].id,
"resourceType": "databricks_cluster",
"resourceName": tf_lib.get_resource_name(resource, name),
"searchKey": sprintf("databricks_cluster[%s].autoscale", [name]),
"issueType": "MissingAttribute",
"keyExpectedValue": sprintf("'databricks_cluster[%s].autoscale.max_workers' should not be empty", [name]),
"keyActualValue": sprintf("'databricks_cluster[%s].autoscale.max_workers' is not setup'", [name]),
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
resource "databricks_cluster" "negative" {
cluster_name = "Shared Autoscaling"
spark_version = data.databricks_spark_version.latest.id
node_type_id = data.databricks_node_type.smallest.id
autotermination_minutes = 20
autoscale {
min_workers = 1
max_workers = 50
}
aws_attributes {
availability = "SPOT_WITH_FALLBACK"
zone_id = "auto"
first_on_demand = 1
spot_bid_price_percent = 100
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
resource "databricks_cluster" "positive1" {
cluster_name = "Shared Autoscaling"
spark_version = data.databricks_spark_version.latest.id
node_type_id = data.databricks_node_type.smallest.id
autotermination_minutes = 20
autoscale {
min_workers = 1
}
aws_attributes {
availability = "SPOT"
zone_id = "us-east-1"
first_on_demand = 1
spot_bid_price_percent = 100
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
resource "databricks_cluster" "positive2" {
cluster_name = "Shared Autoscaling"
spark_version = data.databricks_spark_version.latest.id
node_type_id = data.databricks_node_type.smallest.id
autotermination_minutes = 20
autoscale {
max_workers = 50
}
aws_attributes {
availability = "SPOT"
zone_id = "us-east-1"
first_on_demand = 1
spot_bid_price_percent = 100
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
[
{
"queryName": "Databricks Autoscale Badly Setup",
"severity": "MEDIUM",
"line": 6,
"fileName": "positive1.tf"
},
{
"queryName": "Databricks Autoscale Badly Setup",
"severity": "MEDIUM",
"line": 6,
"fileName": "positive2.tf"
}
]
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
{
"id": "b0749c53-e3ff-4d09-bbe4-dca94e2e7a38",
"queryName": "Check Databricks Cluster AWS Attribute Best Practices",
"severity": "MEDIUM",
"category": "Best Practices",
"descriptionText": "One or some Databricks Cluster AWS Attribute Best Practices are not respected",
"descriptionUrl": "https://docs.databricks.com/clusters/cluster-config-best-practices.html",
"descriptionID": "b6345aa6",
"platform": "Terraform",
"cloudProvider": "common",
"experimental": "true"
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
package Cx

import data.generic.terraform as tf_lib

CxPolicy[result] {
resource := input.document[i].resource.databricks_cluster[name]
resource.aws_attributes.availability == "SPOT"

result := {
"documentId": input.document[i].id,
"resourceType": "databricks_cluster",
"resourceName": tf_lib.get_resource_name(resource, name),
"searchKey": sprintf("databricks_cluster[%s].aws_attributes.availability", [name]),
"issueType": "IncorrectValue",
"keyExpectedValue": sprintf("'databricks_cluster[%s].aws_attributes.availability' should not be equal to 'SPOT'", [name]),
"keyActualValue": sprintf("'databricks_cluster[%s].aws_attributes.availability' is equal to 'SPOT'", [name]),
}
}

CxPolicy[result] {
resource := input.document[i].resource.databricks_cluster[name]
resource.aws_attributes.first_on_demand == 0

result := {
"documentId": input.document[i].id,
"resourceType": "databricks_cluster",
"resourceName": tf_lib.get_resource_name(resource, name),
"searchKey": sprintf("databricks_cluster[%s].aws_attributes.first_on_demand", [name]),
"issueType": "IncorrectValue",
"keyExpectedValue": sprintf("'databricks_cluster[%s].aws_attributes.first_on_demand' should not be equal to '0'", [name]),
"keyActualValue": sprintf("'databricks_cluster[%s].aws_attributes.first_on_demand' is equal to '0'", [name]),
}
}

CxPolicy[result] {
resource := input.document[i].resource.databricks_cluster[name].aws_attributes
not resource.first_on_demand

result := {
"documentId": input.document[i].id,
"resourceType": "databricks_cluster",
"resourceName": tf_lib.get_resource_name(resource, name),
"searchKey": sprintf("databricks_cluster[%s].aws_attributes.first_on_demand", [name]),
"issueType": "IncorrectValue",
"keyExpectedValue": sprintf("'databricks_cluster[%s].aws_attributes.first_on_demand' should present", [name]),
"keyActualValue": sprintf("'databricks_cluster[%s].aws_attributes.first_on_demand' is not present", [name]),
}
}

CxPolicy[result] {
resource := input.document[i].resource.databricks_cluster[name].aws_attributes
not resource.zone_id == "auto"

result := {
"documentId": input.document[i].id,
"resourceType": "databricks_cluster",
"resourceName": tf_lib.get_resource_name(resource, name),
"searchKey": sprintf("databricks_cluster[%s].aws_attributes.zone_id", [name]),
"issueType": "IncorrectValue",
"keyExpectedValue": sprintf("'databricks_cluster[%s].aws_attributes.zone_id' should be egal to 'auto'", [name]),
"keyActualValue": sprintf("'databricks_cluster[%s].aws_attributes.zone_id' is not equal to 'auto'", [name]),
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
resource "databricks_cluster" "negative" {
cluster_name = "Shared Autoscaling"
spark_version = data.databricks_spark_version.latest.id
node_type_id = data.databricks_node_type.smallest.id
autotermination_minutes = 20
autoscale {
min_workers = 1
max_workers = 50
}
aws_attributes {
availability = "SPOT_WITH_FALLBACK"
zone_id = "auto"
first_on_demand = 1
spot_bid_price_percent = 100
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
resource "databricks_cluster" "positive1" {
cluster_name = "data"
spark_version = data.databricks_spark_version.latest.id
node_type_id = data.databricks_node_type.smallest.id
autotermination_minutes = 20
autoscale {
min_workers = 1
max_workers = 50
}
aws_attributes {
availability = "SPOT"
zone_id = "auto"
first_on_demand = 1
spot_bid_price_percent = 100
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
resource "databricks_cluster" "positive2" {
cluster_name = "data"
spark_version = data.databricks_spark_version.latest.id
node_type_id = data.databricks_node_type.smallest.id
autotermination_minutes = 20
autoscale {
min_workers = 1
max_workers = 50
}
aws_attributes {
availability = "SPOT_WITH_FALLBACK"
zone_id = "auto"
first_on_demand = 0
spot_bid_price_percent = 100
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
resource "databricks_cluster" "positive3" {
cluster_name = "data"
spark_version = data.databricks_spark_version.latest.id
node_type_id = data.databricks_node_type.smallest.id
autotermination_minutes = 20
autoscale {
min_workers = 1
max_workers = 50
}
aws_attributes {
availability = "SPOT_WITH_FALLBACK"
zone_id = "auto"
spot_bid_price_percent = 100
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
resource "databricks_cluster" "positive4" {
cluster_name = "data"
spark_version = data.databricks_spark_version.latest.id
node_type_id = data.databricks_node_type.smallest.id
autotermination_minutes = 20
autoscale {
min_workers = 1
max_workers = 50
}
aws_attributes {
availability = "SPOT_WITH_FALLBACK"
zone_id = "us-west-2a"
first_on_demand = 1
spot_bid_price_percent = 100
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
[
{
"queryName": "Check Databricks Cluster AWS Attribute Best Practices",
"severity": "MEDIUM",
"line": 11,
"fileName": "positive1.tf"
},
{
"queryName": "Check Databricks Cluster AWS Attribute Best Practices",
"severity": "MEDIUM",
"line": 13,
"fileName": "positive2.tf"
},
{
"queryName": "Check Databricks Cluster AWS Attribute Best Practices",
"severity": "MEDIUM",
"line": 10,
"fileName": "positive3.tf"
},
{
"queryName": "Check Databricks Cluster AWS Attribute Best Practices",
"severity": "MEDIUM",
"line": 12,
"fileName": "positive4.tf"
}
]
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
{
"id": "38028698-e663-4ef7-aa92-773fef0ca86f",
"queryName": "Check Databricks Cluster Azure Attribute Best Practices",
"severity": "MEDIUM",
"category": "Best Practices",
"descriptionText": "One or some Databricks Cluster Azure Attribute Best Practices are not respected",
"descriptionUrl": "https://docs.databricks.com/clusters/cluster-config-best-practices.html",
"descriptionID": "bb2e4c81",
"platform": "Terraform",
"cloudProvider": "common",
"experimental": "true"
}
Loading

0 comments on commit 3abc80b

Please sign in to comment.