Skip to content

Commit

Permalink
setup tox and fully add one feature
Browse files Browse the repository at this point in the history
  • Loading branch information
Kudbettin committed Jul 7, 2020
1 parent b77f9c8 commit bba3ada
Show file tree
Hide file tree
Showing 11 changed files with 197 additions and 7 deletions.
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ example/tf_files/*
terraform_compliance.egg-info

.DS_Store
terraform-compliance.iml
user-friendly-features.iml

**/.terraform
**/plan.out
Expand Down
12 changes: 7 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ user-friendly-features (Do all in one for now, split if it feels cluttered)
azure
other_provider…
tests
test_file.py
fun_functional_tests.py
aws_tests
passing_setups
ALB
Expand All @@ -46,17 +46,19 @@ General features vs provider specific features. How to place them? Make a new di

## TODO (which should really be converted to issues later)

add policies

add tests

setup Tox
- add all passing tests
- add all failing tests
- fix the .expected's (which might be cancer)


## Notes
Why have passing_setups and failing_setups?
- Felt it would be easier to name the files that way.

Each passsing/failing setup is tested against all the features in that directory.
- Unnecesssary. (Or even good practice?)
- Should pass/skip unrelated stuff nonetheless


usage: `terraform-compliance -p plan.out.json -f path_to_repo`
2 changes: 1 addition & 1 deletion aws/S3.feature
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ Feature: S3 related general feature

# is this logging access logging?
# check if at least one s3 has logging enabled, because logging will require another s3
@noskip
@noskip_at_line_24
Scenario: S3 must have access logging enabled
Given I have aws_s3_bucket defined
When it has logging
Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
terraform-compliance
1 change: 1 addition & 0 deletions tests/aws/failing_setups/ALB_protocol_not_https/.expected
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Failure: Can not find HTTPS in protocol property of aws_lb_listener.front_end.
Empty file.
20 changes: 20 additions & 0 deletions tests/aws/failing_setups/ALB_protocol_not_https/main.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
resource "aws_lb" "front_end" {
# ...
}

resource "aws_lb_target_group" "front_end" {
# ...
}

resource "aws_lb_listener" "front_end" {
load_balancer_arn = "$aws_lb.front_end.arn"
port = "443"
protocol = "HTTP"
ssl_policy = "ELBSecurityPolicy-2016-08"
certificate_arn = "arn:aws:iam::187416307283:server-certificate/test_cert_rab3wuqwgja25ct3n4jdj2tzu4"

default_action {
type = "forward"
target_group_arn = "$aws_lb_target_group.front_end.arn"
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"format_version":"0.1","terraform_version":"0.12.25","planned_values":{"root_module":{"resources":[{"address":"aws_lb.front_end","mode":"managed","type":"aws_lb","name":"front_end","provider_name":"aws","schema_version":0,"values":{"access_logs":[],"drop_invalid_header_fields":false,"enable_cross_zone_load_balancing":null,"enable_deletion_protection":false,"enable_http2":true,"idle_timeout":60,"load_balancer_type":"application","name_prefix":null,"tags":null,"timeouts":null}},{"address":"aws_lb_listener.front_end","mode":"managed","type":"aws_lb_listener","name":"front_end","provider_name":"aws","schema_version":0,"values":{"certificate_arn":"arn:aws:iam::187416307283:server-certificate/test_cert_rab3wuqwgja25ct3n4jdj2tzu4","default_action":[{"authenticate_cognito":[],"authenticate_oidc":[],"fixed_response":[],"forward":[],"redirect":[],"target_group_arn":"$aws_lb_target_group.front_end.arn","type":"forward"}],"load_balancer_arn":"$aws_lb.front_end.arn","port":443,"protocol":"HTTP","ssl_policy":"ELBSecurityPolicy-2016-08","timeouts":null}},{"address":"aws_lb_target_group.front_end","mode":"managed","type":"aws_lb_target_group","name":"front_end","provider_name":"aws","schema_version":0,"values":{"deregistration_delay":300,"lambda_multi_value_headers_enabled":false,"name_prefix":null,"port":null,"protocol":null,"proxy_protocol_v2":false,"slow_start":0,"tags":null,"target_type":"instance","vpc_id":null}}]}},"resource_changes":[{"address":"aws_lb.front_end","mode":"managed","type":"aws_lb","name":"front_end","provider_name":"aws","change":{"actions":["create"],"before":null,"after":{"access_logs":[],"drop_invalid_header_fields":false,"enable_cross_zone_load_balancing":null,"enable_deletion_protection":false,"enable_http2":true,"idle_timeout":60,"load_balancer_type":"application","name_prefix":null,"tags":null,"timeouts":null},"after_unknown":{"access_logs":[],"arn":true,"arn_suffix":true,"dns_name":true,"id":true,"internal":true,"ip_address_type":true,"name":true,"security_groups":true,"subnet_mapping":true,"subnets":true,"vpc_id":true,"zone_id":true}}},{"address":"aws_lb_listener.front_end","mode":"managed","type":"aws_lb_listener","name":"front_end","provider_name":"aws","change":{"actions":["create"],"before":null,"after":{"certificate_arn":"arn:aws:iam::187416307283:server-certificate/test_cert_rab3wuqwgja25ct3n4jdj2tzu4","default_action":[{"authenticate_cognito":[],"authenticate_oidc":[],"fixed_response":[],"forward":[],"redirect":[],"target_group_arn":"$aws_lb_target_group.front_end.arn","type":"forward"}],"load_balancer_arn":"$aws_lb.front_end.arn","port":443,"protocol":"HTTP","ssl_policy":"ELBSecurityPolicy-2016-08","timeouts":null},"after_unknown":{"arn":true,"default_action":[{"authenticate_cognito":[],"authenticate_oidc":[],"fixed_response":[],"forward":[],"order":true,"redirect":[]}],"id":true}}},{"address":"aws_lb_target_group.front_end","mode":"managed","type":"aws_lb_target_group","name":"front_end","provider_name":"aws","change":{"actions":["create"],"before":null,"after":{"deregistration_delay":300,"lambda_multi_value_headers_enabled":false,"name_prefix":null,"port":null,"protocol":null,"proxy_protocol_v2":false,"slow_start":0,"tags":null,"target_type":"instance","vpc_id":null},"after_unknown":{"arn":true,"arn_suffix":true,"health_check":true,"id":true,"load_balancing_algorithm_type":true,"name":true,"stickiness":true}}}],"configuration":{"root_module":{"resources":[{"address":"aws_lb.front_end","mode":"managed","type":"aws_lb","name":"front_end","provider_config_key":"aws","schema_version":0},{"address":"aws_lb_listener.front_end","mode":"managed","type":"aws_lb_listener","name":"front_end","provider_config_key":"aws","expressions":{"certificate_arn":{"constant_value":"arn:aws:iam::187416307283:server-certificate/test_cert_rab3wuqwgja25ct3n4jdj2tzu4"},"default_action":[{"target_group_arn":{"constant_value":"$aws_lb_target_group.front_end.arn"},"type":{"constant_value":"forward"}}],"load_balancer_arn":{"constant_value":"$aws_lb.front_end.arn"},"port":{"constant_value":"443"},"protocol":{"constant_value":"HTTP"},"ssl_policy":{"constant_value":"ELBSecurityPolicy-2016-08"}},"schema_version":0},{"address":"aws_lb_target_group.front_end","mode":"managed","type":"aws_lb_target_group","name":"front_end","provider_config_key":"aws","schema_version":0}]}}}
1 change: 1 addition & 0 deletions tests/aws/passing_setups/ALB/feature.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
ALB.feature
143 changes: 143 additions & 0 deletions tests/aws/run_functional_tests.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,143 @@
import os
import sys
import subprocess
import colorful
import re


class Config(object):
provider = 'aws' # specifically, the name of the directory
test_dir = 'tests/{}'.format(provider)
default_parameters = [
'--no-ansi'
]

print('Running functional tests in {}.'.format(Config.test_dir))

if len(sys.argv) == 2:
tests = [sys.argv[1]]
else:
tests = []
for setups in os.listdir(Config.test_dir):
if os.path.isdir('{}/{}'.format(Config.test_dir, setups)):
for setup in os.listdir('{}/{}'.format(Config.test_dir, setups)):
if os.path.isdir('{}/{}/{}'.format(Config.test_dir, setups, setup)):
tests.append((setups, setup))

print('Total {} number of tests will be executed.'.format(len(tests)))

test_summary = []
failure_happened = False

for outer_dir, test_dir in tests:
parameters = ['terraform-compliance']
parameters.extend(Config.default_parameters.copy())
directory = '{}/{}/{}'.format(Config.test_dir, outer_dir, test_dir)

feature_directory = Config.provider
test_result = ''

# if not os.path.isfile('{}/feature.txt'.format(directory)):
# test_result = colorful.orange('skipped')
# else:
# with open('{}/feature.txt'.format(directory)) as f:
# feature = f.readline().strip()
# feature_directory = '{}/{}'.format(Config.provider, feature)

expected = ''
unexpected = ''

if not os.path.isfile('{}/plan.out.json'.format(directory)):# or not os.path.isfile(feature_directory):
test_result = colorful.orange('skipped')
else:
if os.path.isfile('{}/.failure'.format(directory)):
parameters.append('--wip')

if os.path.isfile('{}/.expected'.format(directory)):
with open('{}/.expected'.format(directory)) as expected_file:
expected = expected_file.read().split('\n')

if os.path.isfile('{}/.unexpected'.format(directory)):
with open('{}/.unexpected'.format(directory)) as unexpected_file:
unexpected = unexpected_file.read().split('\n')

if not os.path.isfile('{}/.no_early_exit'.format(directory)):
parameters.append('-q')

parameters.extend([
'-f', '{}'.format(feature_directory),
'-p', '{}/plan.out.json'.format(directory)
])

try:
print('Running {}.'.format(colorful.yellow(test_dir)))
# TODO: Add multithreading here if we have more than 50+ integration tests ?
test_process = subprocess.run(parameters,
check=True,
# shell=True,
stdout=subprocess.PIPE,
universal_newlines=True,
)

if os.environ.get('DEBUG'):
print('Output: {}'.format(colorful.grey(test_process.stdout)))

if test_process.returncode == 0:
if expected:
expected_failures = [
exp for exp in expected
if not re.findall(exp, str(test_process.stdout))
]

if expected_failures:
print('\nOutput: {}'.format(test_process.stdout))
print('Can not find ;')
for failure in expected_failures:
print('\t{}'.format(colorful.yellow(failure)))
print('in the test output.\n')

test_result = colorful.red('failed')
failure_happened = True

if unexpected:
unexpected_failures = [
unexp for unexp in unexpected
if re.findall(unexp, str(test_process.stdout))
]

if unexpected_failures:
print('\nOutput: {}'.format(test_process.stdout))
print('Found;')
for failure in expected_failures:
print('\t{}'.format(colorful.yellow(failure)))
print('in the test output. This was unexpected.\n')

test_result = colorful.red('failed')
failure_happened = True

if not failure_happened:
test_result = colorful.green('passed')

else:
print('Output: {}'.format(test_process.stdout))
test_result = colorful.red('failed')
failure_happened = True

except subprocess.CalledProcessError as e:
failure_happened = True

if e.returncode != 1:
test_result = colorful.orange('errored')
else:
test_result = colorful.red('failed')
print('Expected a different return code. Received {}'.format(colorful.yellow(e.returncode)))

print('Output: {}'.format(e.stdout))

test_summary.append('{:.<70s}{:.>10s}'.format(test_dir, test_result))

print('\n\nRan {} tests.'.format(len(tests)))
print('\n'.join(sorted(test_summary)))

if failure_happened:
sys.exit(1)
21 changes: 21 additions & 0 deletions tox.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
[tox]
envlist = aws_integration
skipsdist = True

[testenv]
basepython = python3.6
setenv =
VIRTUALENV_NO_DOWNLOAD=1
PYTHONIOENCODING=UTF-8
deps =
-rrequirements.txt
ignore_errors=false

[testenv:aws_integration]
basepython = python3.6
setenv =
VIRTUALENV_NO_DOWNLOAD=1
PYTHONIOENCODING=UTF-8
; DEBUG=1
ignore_errors=false
commands = python tests/aws/run_functional_tests.py

0 comments on commit bba3ada

Please sign in to comment.